2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiPictureH264 *current_picture;
449 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
450 GstVaapiParserInfoH264 *active_sps;
451 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
452 GstVaapiParserInfoH264 *active_pps;
453 GstVaapiParserInfoH264 *prev_pi;
454 GstVaapiParserInfoH264 *prev_slice_pi;
455 GstVaapiFrameStore **prev_frames;
456 guint prev_frames_alloc;
457 GstVaapiFrameStore **dpb;
462 GstVaapiProfile profile;
463 GstVaapiEntrypoint entrypoint;
464 GstVaapiChromaType chroma_type;
465 GPtrArray *inter_views;
466 GstVaapiPictureH264 *short_ref[32];
467 guint short_ref_count;
468 GstVaapiPictureH264 *long_ref[32];
469 guint long_ref_count;
470 GstVaapiPictureH264 *RefPicList0[32];
471 guint RefPicList0_count;
472 GstVaapiPictureH264 *RefPicList1[32];
473 guint RefPicList1_count;
474 guint nal_length_size;
477 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
478 gint32 poc_msb; // PicOrderCntMsb
479 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
480 gint32 prev_poc_msb; // prevPicOrderCntMsb
481 gint32 prev_poc_lsb; // prevPicOrderCntLsb
482 gint32 frame_num_offset; // FrameNumOffset
483 gint32 frame_num; // frame_num (from slice_header())
484 gint32 prev_frame_num; // prevFrameNum
485 gboolean prev_pic_has_mmco5; // prevMmco5Pic
486 gboolean prev_pic_structure; // previous picture structure
489 guint has_context : 1;
490 guint progressive_sequence : 1;
494 * GstVaapiDecoderH264:
496 * A decoder based on H264.
498 struct _GstVaapiDecoderH264 {
500 GstVaapiDecoder parent_instance;
501 GstVaapiDecoderH264Private priv;
505 * GstVaapiDecoderH264Class:
507 * A decoder class based on H264.
509 struct _GstVaapiDecoderH264Class {
511 GstVaapiDecoderClass parent_class;
515 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
518 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
519 GstVaapiPictureH264 *picture);
521 static inline gboolean
522 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
523 GstVaapiFrameStore *fs)
525 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
528 /* Determines if the supplied profile is one of the MVC set */
530 is_mvc_profile(GstH264Profile profile)
532 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
533 profile == GST_H264_PROFILE_STEREO_HIGH;
536 /* Determines the view_id from the supplied NAL unit */
538 get_view_id(GstH264NalUnit *nalu)
540 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
543 /* Determines the view order index (VOIdx) from the supplied view_id */
545 get_view_order_index(GstH264SPS *sps, guint16 view_id)
547 GstH264SPSExtMVC *mvc;
550 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
553 mvc = &sps->extension.mvc;
554 for (i = 0; i <= mvc->num_views_minus1; i++) {
555 if (mvc->view[i].view_id == view_id)
558 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
562 /* Determines NumViews */
564 get_num_views(GstH264SPS *sps)
566 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
567 sps->extension.mvc.num_views_minus1 : 0);
570 /* Get number of reference frames to use */
572 get_max_dec_frame_buffering(GstH264SPS *sps)
574 guint num_views, max_dpb_frames;
575 guint max_dec_frame_buffering, PicSizeMbs;
576 GstVaapiLevelH264 level;
577 const GstVaapiH264LevelLimits *level_limits;
579 /* Table A-1 - Level limits */
580 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
581 level = GST_VAAPI_LEVEL_H264_L1b;
583 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
584 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
585 if (G_UNLIKELY(!level_limits)) {
586 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
587 max_dec_frame_buffering = 16;
590 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
591 (sps->pic_height_in_map_units_minus1 + 1) *
592 (sps->frame_mbs_only_flag ? 1 : 2));
593 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
595 if (is_mvc_profile(sps->profile_idc))
596 max_dec_frame_buffering <<= 1;
599 if (sps->vui_parameters_present_flag) {
600 GstH264VUIParams * const vui_params = &sps->vui_parameters;
601 if (vui_params->bitstream_restriction_flag)
602 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
604 switch (sps->profile_idc) {
605 case 44: // CAVLC 4:4:4 Intra profile
606 case GST_H264_PROFILE_SCALABLE_HIGH:
607 case GST_H264_PROFILE_HIGH:
608 case GST_H264_PROFILE_HIGH10:
609 case GST_H264_PROFILE_HIGH_422:
610 case GST_H264_PROFILE_HIGH_444:
611 if (sps->constraint_set3_flag)
612 max_dec_frame_buffering = 0;
618 num_views = get_num_views(sps);
619 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
620 if (max_dec_frame_buffering > max_dpb_frames)
621 max_dec_frame_buffering = max_dpb_frames;
622 else if (max_dec_frame_buffering < sps->num_ref_frames)
623 max_dec_frame_buffering = sps->num_ref_frames;
624 return MAX(1, max_dec_frame_buffering);
628 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
630 gpointer * const entries = array;
631 guint num_entries = *array_length_ptr;
633 g_return_if_fail(index < num_entries);
635 if (index != --num_entries)
636 entries[index] = entries[num_entries];
637 entries[num_entries] = NULL;
638 *array_length_ptr = num_entries;
643 array_remove_index(void *array, guint *array_length_ptr, guint index)
645 array_remove_index_fast(array, array_length_ptr, index);
649 array_remove_index(void *array, guint *array_length_ptr, guint index)
651 gpointer * const entries = array;
652 const guint num_entries = *array_length_ptr - 1;
655 g_return_if_fail(index <= num_entries);
657 for (i = index; i < num_entries; i++)
658 entries[i] = entries[i + 1];
659 entries[num_entries] = NULL;
660 *array_length_ptr = num_entries;
664 #define ARRAY_REMOVE_INDEX(array, index) \
665 array_remove_index(array, &array##_count, index)
668 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
670 GstVaapiDecoderH264Private * const priv = &decoder->priv;
671 guint i, num_frames = --priv->dpb_count;
673 if (USE_STRICT_DPB_ORDERING) {
674 for (i = index; i < num_frames; i++)
675 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
677 else if (index != num_frames)
678 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
679 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
684 GstVaapiDecoderH264 *decoder,
685 GstVaapiFrameStore *fs,
686 GstVaapiPictureH264 *picture
689 picture->output_needed = FALSE;
692 if (--fs->output_needed > 0)
694 picture = fs->buffers[0];
696 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
700 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
702 GstVaapiDecoderH264Private * const priv = &decoder->priv;
703 GstVaapiFrameStore * const fs = priv->dpb[i];
705 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
706 dpb_remove_index(decoder, i);
709 /* Finds the frame store holding the supplied picture */
711 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
713 GstVaapiDecoderH264Private * const priv = &decoder->priv;
716 for (i = 0; i < priv->dpb_count; i++) {
717 GstVaapiFrameStore * const fs = priv->dpb[i];
718 for (j = 0; j < fs->num_buffers; j++) {
719 if (fs->buffers[j] == picture)
726 /* Finds the picture with the lowest POC that needs to be output */
728 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
729 GstVaapiPictureH264 **found_picture_ptr)
731 GstVaapiDecoderH264Private * const priv = &decoder->priv;
732 GstVaapiPictureH264 *found_picture = NULL;
733 guint i, j, found_index;
735 for (i = 0; i < priv->dpb_count; i++) {
736 GstVaapiFrameStore * const fs = priv->dpb[i];
737 if (!fs->output_needed)
739 if (picture && picture->base.view_id != fs->view_id)
741 for (j = 0; j < fs->num_buffers; j++) {
742 GstVaapiPictureH264 * const pic = fs->buffers[j];
743 if (!pic->output_needed)
745 if (!found_picture || found_picture->base.poc > pic->base.poc ||
746 (found_picture->base.poc == pic->base.poc &&
747 found_picture->base.voc > pic->base.voc))
748 found_picture = pic, found_index = i;
752 if (found_picture_ptr)
753 *found_picture_ptr = found_picture;
754 return found_picture ? found_index : -1;
757 /* Finds the picture with the lowest VOC that needs to be output */
759 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
760 GstVaapiPictureH264 **found_picture_ptr)
762 GstVaapiDecoderH264Private * const priv = &decoder->priv;
763 GstVaapiPictureH264 *found_picture = NULL;
764 guint i, j, found_index;
766 for (i = 0; i < priv->dpb_count; i++) {
767 GstVaapiFrameStore * const fs = priv->dpb[i];
768 if (!fs->output_needed || fs->view_id == picture->base.view_id)
770 for (j = 0; j < fs->num_buffers; j++) {
771 GstVaapiPictureH264 * const pic = fs->buffers[j];
772 if (!pic->output_needed || pic->base.poc != picture->base.poc)
774 if (!found_picture || found_picture->base.voc > pic->base.voc)
775 found_picture = pic, found_index = i;
779 if (found_picture_ptr)
780 *found_picture_ptr = found_picture;
781 return found_picture ? found_index : -1;
785 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
786 GstVaapiPictureH264 *picture, guint voc)
788 GstVaapiDecoderH264Private * const priv = &decoder->priv;
789 GstVaapiPictureH264 *found_picture;
793 if (priv->max_views == 1)
796 /* Emit all other view components that were in the same access
797 unit than the picture we have just found */
798 found_picture = picture;
800 found_index = dpb_find_lowest_voc(decoder, found_picture,
802 if (found_index < 0 || found_picture->base.voc >= voc)
804 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
805 dpb_evict(decoder, found_picture, found_index);
813 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
815 GstVaapiDecoderH264Private * const priv = &decoder->priv;
816 GstVaapiPictureH264 *found_picture;
820 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
824 if (picture && picture->base.poc != found_picture->base.poc)
825 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
827 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
828 dpb_evict(decoder, found_picture, found_index);
829 if (priv->max_views == 1)
832 if (picture && picture->base.poc != found_picture->base.poc)
833 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
838 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 GstVaapiDecoderH264Private * const priv = &decoder->priv;
843 for (i = 0; i < priv->dpb_count; i++) {
844 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
846 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
849 for (i = 0, n = 0; i < priv->dpb_count; i++) {
851 priv->dpb[n++] = priv->dpb[i];
855 /* Clear previous frame buffers only if this is a "flush-all" operation,
856 or if the picture is the first one in the access unit */
857 if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
858 GST_VAAPI_PICTURE_FLAG_AU_START)) {
859 for (i = 0; i < priv->max_views; i++)
860 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
865 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
867 while (dpb_bump(decoder, picture))
869 dpb_clear(decoder, picture);
873 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
875 GstVaapiDecoderH264Private * const priv = &decoder->priv;
876 const gboolean is_last_picture = /* in the access unit */
877 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
880 // Remove all unused inter-view only reference components of the current AU
882 while (i < priv->dpb_count) {
883 GstVaapiFrameStore * const fs = priv->dpb[i];
884 if (fs->view_id != picture->base.view_id &&
885 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
887 !is_inter_view_reference_for_next_frames(decoder, fs)))
888 dpb_remove_index(decoder, i);
895 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
897 GstVaapiDecoderH264Private * const priv = &decoder->priv;
898 GstVaapiFrameStore *fs;
901 if (priv->max_views > 1)
902 dpb_prune_mvc(decoder, picture);
904 // Remove all unused pictures
905 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
907 while (i < priv->dpb_count) {
908 GstVaapiFrameStore * const fs = priv->dpb[i];
909 if (fs->view_id == picture->base.view_id &&
910 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
911 dpb_remove_index(decoder, i);
917 // Check if picture is the second field and the first field is still in DPB
918 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
919 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
920 const gint found_index = dpb_find_picture(decoder,
921 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
922 if (found_index >= 0)
923 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
926 // Create new frame store, and split fields if necessary
927 fs = gst_vaapi_frame_store_new(picture);
930 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
931 gst_vaapi_frame_store_unref(fs);
933 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
934 if (!gst_vaapi_frame_store_split_fields(fs))
938 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
939 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
940 while (priv->dpb_count == priv->dpb_size) {
941 if (!dpb_bump(decoder, picture))
946 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
948 const gboolean StoreInterViewOnlyRefFlag =
949 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
950 GST_VAAPI_PICTURE_FLAG_AU_END) &&
951 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
952 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
953 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
955 while (priv->dpb_count == priv->dpb_size) {
956 if (!StoreInterViewOnlyRefFlag) {
957 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
958 return dpb_output(decoder, NULL, picture);
960 if (!dpb_bump(decoder, picture))
965 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
966 if (picture->output_flag) {
967 picture->output_needed = TRUE;
974 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
976 GstVaapiDecoderH264Private * const priv = &decoder->priv;
978 if (dpb_size < priv->dpb_count)
981 if (dpb_size > priv->dpb_size_max) {
982 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
985 memset(&priv->dpb[priv->dpb_size_max], 0,
986 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
987 priv->dpb_size_max = dpb_size;
990 if (priv->dpb_size < dpb_size)
991 priv->dpb_size = dpb_size;
992 else if (dpb_size < priv->dpb_count)
995 GST_DEBUG("DPB size %u", priv->dpb_size);
1000 unref_inter_view(GstVaapiPictureH264 *picture)
1004 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1005 gst_vaapi_picture_unref(picture);
1008 /* Resets MVC resources */
1010 mvc_reset(GstVaapiDecoderH264 *decoder)
1012 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1015 // Resize array of inter-view references
1016 if (!priv->inter_views) {
1017 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1018 (GDestroyNotify)unref_inter_view);
1019 if (!priv->inter_views)
1023 // Resize array of previous frame buffers
1024 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1025 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
1027 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1028 sizeof(*priv->prev_frames));
1029 if (!priv->prev_frames) {
1030 priv->prev_frames_alloc = 0;
1033 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1034 priv->prev_frames[i] = NULL;
1035 priv->prev_frames_alloc = priv->max_views;
1039 static GstVaapiDecoderStatus
1040 get_status(GstH264ParserResult result)
1042 GstVaapiDecoderStatus status;
1045 case GST_H264_PARSER_OK:
1046 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1048 case GST_H264_PARSER_NO_NAL_END:
1049 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1051 case GST_H264_PARSER_ERROR:
1052 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1055 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1062 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1064 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1066 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1067 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1068 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1070 dpb_clear(decoder, NULL);
1072 if (priv->inter_views) {
1073 g_ptr_array_unref(priv->inter_views);
1074 priv->inter_views = NULL;
1078 gst_h264_nal_parser_free(priv->parser);
1079 priv->parser = NULL;
1084 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1086 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1088 gst_vaapi_decoder_h264_close(decoder);
1090 priv->parser = gst_h264_nal_parser_new();
1097 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1099 GstVaapiDecoderH264 * const decoder =
1100 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1101 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1104 gst_vaapi_decoder_h264_close(decoder);
1110 g_free(priv->prev_frames);
1111 priv->prev_frames = NULL;
1112 priv->prev_frames_alloc = 0;
1114 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1115 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1116 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1118 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1119 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1120 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1124 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1126 GstVaapiDecoderH264 * const decoder =
1127 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1128 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1130 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1131 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1132 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1133 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1134 priv->progressive_sequence = TRUE;
1138 /* Activates the supplied PPS */
1140 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1142 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1143 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1145 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1146 return pi ? &pi->data.pps : NULL;
1149 /* Returns the active PPS */
1150 static inline GstH264PPS *
1151 get_pps(GstVaapiDecoderH264 *decoder)
1153 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1155 return pi ? &pi->data.pps : NULL;
1158 /* Activate the supplied SPS */
1160 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1162 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1163 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1165 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1166 return pi ? &pi->data.sps : NULL;
1169 /* Returns the active SPS */
1170 static inline GstH264SPS *
1171 get_sps(GstVaapiDecoderH264 *decoder)
1173 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1175 return pi ? &pi->data.sps : NULL;
1179 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1180 GstVaapiProfile profile)
1182 guint n_profiles = *n_profiles_ptr;
1184 profiles[n_profiles++] = profile;
1186 case GST_VAAPI_PROFILE_H264_MAIN:
1187 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1192 *n_profiles_ptr = n_profiles;
1195 /* Fills in compatible profiles for MVC decoding */
1197 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1198 guint *n_profiles_ptr, guint dpb_size)
1200 const gchar * const vendor_string =
1201 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1203 gboolean add_high_profile = FALSE;
1208 const struct map *m;
1210 // Drivers that support slice level decoding
1211 if (vendor_string && dpb_size <= 16) {
1212 static const struct map drv_names[] = {
1213 { "Intel i965 driver", 17 },
1216 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1217 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1218 add_high_profile = TRUE;
1222 if (add_high_profile)
1223 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1226 static GstVaapiProfile
1227 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1229 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1230 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1231 GstVaapiProfile profile, profiles[4];
1232 guint i, n_profiles = 0;
1234 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1236 return GST_VAAPI_PROFILE_UNKNOWN;
1238 fill_profiles(profiles, &n_profiles, profile);
1240 case GST_VAAPI_PROFILE_H264_BASELINE:
1241 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1242 fill_profiles(profiles, &n_profiles,
1243 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1244 fill_profiles(profiles, &n_profiles,
1245 GST_VAAPI_PROFILE_H264_MAIN);
1248 case GST_VAAPI_PROFILE_H264_EXTENDED:
1249 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1250 fill_profiles(profiles, &n_profiles,
1251 GST_VAAPI_PROFILE_H264_MAIN);
1254 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1255 if (priv->max_views == 2) {
1256 fill_profiles(profiles, &n_profiles,
1257 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1259 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1261 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1262 if (sps->frame_mbs_only_flag) {
1263 fill_profiles(profiles, &n_profiles,
1264 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1266 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1272 /* If the preferred profile (profiles[0]) matches one that we already
1273 found, then just return it now instead of searching for it again */
1274 if (profiles[0] == priv->profile)
1275 return priv->profile;
1277 for (i = 0; i < n_profiles; i++) {
1278 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1281 return GST_VAAPI_PROFILE_UNKNOWN;
1284 static GstVaapiDecoderStatus
1285 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1287 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1288 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1289 GstVaapiContextInfo info;
1290 GstVaapiProfile profile;
1291 GstVaapiChromaType chroma_type;
1292 gboolean reset_context = FALSE;
1293 guint mb_width, mb_height, dpb_size;
1295 dpb_size = get_max_dec_frame_buffering(sps);
1296 if (priv->dpb_size < dpb_size) {
1297 GST_DEBUG("DPB size increased");
1298 reset_context = TRUE;
1301 profile = get_profile(decoder, sps, dpb_size);
1303 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1304 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1307 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1308 GST_DEBUG("profile changed");
1309 reset_context = TRUE;
1310 priv->profile = profile;
1313 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1315 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1316 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1319 if (priv->chroma_type != chroma_type) {
1320 GST_DEBUG("chroma format changed");
1321 reset_context = TRUE;
1322 priv->chroma_type = chroma_type;
1325 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1326 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1327 !sps->frame_mbs_only_flag;
1328 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1329 GST_DEBUG("size changed");
1330 reset_context = TRUE;
1331 priv->mb_width = mb_width;
1332 priv->mb_height = mb_height;
1335 priv->progressive_sequence = sps->frame_mbs_only_flag;
1336 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1338 gst_vaapi_decoder_set_pixel_aspect_ratio(
1340 sps->vui_parameters.par_n,
1341 sps->vui_parameters.par_d
1344 if (!reset_context && priv->has_context)
1345 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1347 /* XXX: fix surface size when cropping is implemented */
1348 info.profile = priv->profile;
1349 info.entrypoint = priv->entrypoint;
1350 info.chroma_type = priv->chroma_type;
1351 info.width = sps->width;
1352 info.height = sps->height;
1353 info.ref_frames = dpb_size;
1355 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1356 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1357 priv->has_context = TRUE;
1360 if (!dpb_reset(decoder, dpb_size))
1361 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1363 /* Reset MVC data */
1364 if (!mvc_reset(decoder))
1365 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1366 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1370 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1371 const GstH264SPS *sps)
1375 /* There are always 6 4x4 scaling lists */
1376 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1377 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1379 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1380 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1381 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1385 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1386 const GstH264SPS *sps)
1390 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1391 if (!pps->transform_8x8_mode_flag)
1394 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1395 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1397 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1398 for (i = 0; i < n; i++) {
1399 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1400 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1404 static GstVaapiDecoderStatus
1405 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1407 GstVaapiPicture * const base_picture = &picture->base;
1408 GstH264PPS * const pps = get_pps(decoder);
1409 GstH264SPS * const sps = get_sps(decoder);
1410 VAIQMatrixBufferH264 *iq_matrix;
1412 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1413 if (!base_picture->iq_matrix) {
1414 GST_ERROR("failed to allocate IQ matrix");
1415 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1417 iq_matrix = base_picture->iq_matrix->param;
1419 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1420 is not large enough to hold lists for 4:4:4 */
1421 if (sps->chroma_format_idc == 3)
1422 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1424 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1425 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1427 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1430 static inline gboolean
1431 is_valid_state(guint state, guint ref_state)
1433 return (state & ref_state) == ref_state;
1436 static GstVaapiDecoderStatus
1437 decode_current_picture(GstVaapiDecoderH264 *decoder)
1439 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1440 GstVaapiPictureH264 * const picture = priv->current_picture;
1442 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1444 priv->decoder_state = 0;
1447 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1449 if (!exec_ref_pic_marking(decoder, picture))
1451 if (!dpb_add(decoder, picture))
1453 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1455 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1456 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1459 /* XXX: fix for cases where first field failed to be decoded */
1460 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1461 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1464 priv->decoder_state = 0;
1465 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1468 static GstVaapiDecoderStatus
1469 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1471 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1472 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1473 GstH264SPS * const sps = &pi->data.sps;
1474 GstH264ParserResult result;
1476 GST_DEBUG("parse SPS");
1478 priv->parser_state = 0;
1480 /* Variables that don't have inferred values per the H.264
1481 standard but that should get a default value anyway */
1482 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1484 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1485 if (result != GST_H264_PARSER_OK)
1486 return get_status(result);
1488 /* Reset defaults */
1489 priv->max_views = 1;
1491 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1492 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1495 static GstVaapiDecoderStatus
1496 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1498 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1499 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1500 GstH264SPS * const sps = &pi->data.sps;
1501 GstH264ParserResult result;
1503 GST_DEBUG("parse subset SPS");
1505 /* Variables that don't have inferred values per the H.264
1506 standard but that should get a default value anyway */
1507 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1509 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1511 if (result != GST_H264_PARSER_OK)
1512 return get_status(result);
1514 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1515 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1518 static GstVaapiDecoderStatus
1519 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1521 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1522 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1523 GstH264PPS * const pps = &pi->data.pps;
1524 GstH264ParserResult result;
1526 GST_DEBUG("parse PPS");
1528 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1530 /* Variables that don't have inferred values per the H.264
1531 standard but that should get a default value anyway */
1532 pps->slice_group_map_type = 0;
1533 pps->slice_group_change_rate_minus1 = 0;
1535 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1536 if (result != GST_H264_PARSER_OK)
1537 return get_status(result);
1539 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1540 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1543 static GstVaapiDecoderStatus
1544 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1546 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1547 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1548 GArray ** const sei_ptr = &pi->data.sei;
1549 GstH264ParserResult result;
1551 GST_DEBUG("parse SEI");
1553 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1554 if (result != GST_H264_PARSER_OK) {
1555 GST_WARNING("failed to parse SEI messages");
1556 return get_status(result);
1558 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1561 static GstVaapiDecoderStatus
1562 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1564 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1565 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1566 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1567 GstH264NalUnit * const nalu = &pi->nalu;
1569 GstH264ParserResult result;
1572 GST_DEBUG("parse slice");
1574 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1575 GST_H264_VIDEO_STATE_GOT_PPS);
1577 /* Propagate Prefix NAL unit info, if necessary */
1578 switch (nalu->type) {
1579 case GST_H264_NAL_SLICE:
1580 case GST_H264_NAL_SLICE_IDR: {
1581 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1582 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1583 /* MVC sequences shall have a Prefix NAL unit immediately
1584 preceding this NAL unit */
1585 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1586 pi->nalu.extension = prev_pi->nalu.extension;
1589 /* In the very unlikely case there is no Prefix NAL unit
1590 immediately preceding this NAL unit, try to infer some
1591 defaults (H.7.4.1.1) */
1592 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1593 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1594 nalu->idr_pic_flag = !mvc->non_idr_flag;
1595 mvc->priority_id = 0;
1597 mvc->temporal_id = 0;
1598 mvc->anchor_pic_flag = 0;
1599 mvc->inter_view_flag = 1;
1605 /* Variables that don't have inferred values per the H.264
1606 standard but that should get a default value anyway */
1607 slice_hdr->cabac_init_idc = 0;
1608 slice_hdr->direct_spatial_mv_pred_flag = 0;
1610 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1611 slice_hdr, TRUE, TRUE);
1612 if (result != GST_H264_PARSER_OK)
1613 return get_status(result);
1615 sps = slice_hdr->pps->sequence;
1617 /* Update MVC data */
1618 num_views = get_num_views(sps);
1619 if (priv->max_views < num_views) {
1620 priv->max_views = num_views;
1621 GST_DEBUG("maximum number of views changed to %u", num_views);
1623 pi->view_id = get_view_id(&pi->nalu);
1624 pi->voc = get_view_order_index(sps, pi->view_id);
1626 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1627 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1630 static GstVaapiDecoderStatus
1631 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1633 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1634 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1635 GstH264SPS * const sps = &pi->data.sps;
1637 GST_DEBUG("decode SPS");
1639 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1640 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1643 static GstVaapiDecoderStatus
1644 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1646 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1647 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1648 GstH264SPS * const sps = &pi->data.sps;
1650 GST_DEBUG("decode subset SPS");
1652 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1653 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1656 static GstVaapiDecoderStatus
1657 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1659 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1660 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1661 GstH264PPS * const pps = &pi->data.pps;
1663 GST_DEBUG("decode PPS");
1665 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1666 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1669 static GstVaapiDecoderStatus
1670 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1672 GstVaapiDecoderStatus status;
1674 GST_DEBUG("decode sequence-end");
1676 status = decode_current_picture(decoder);
1677 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1680 dpb_flush(decoder, NULL);
1681 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1684 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1687 GstVaapiDecoderH264 *decoder,
1688 GstVaapiPictureH264 *picture,
1689 GstH264SliceHdr *slice_hdr
1692 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1693 GstH264SPS * const sps = get_sps(decoder);
1694 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1697 GST_DEBUG("decode picture order count type 0");
1699 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1700 priv->prev_poc_msb = 0;
1701 priv->prev_poc_lsb = 0;
1703 else if (priv->prev_pic_has_mmco5) {
1704 priv->prev_poc_msb = 0;
1705 priv->prev_poc_lsb =
1706 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1707 0 : priv->field_poc[TOP_FIELD]);
1710 priv->prev_poc_msb = priv->poc_msb;
1711 priv->prev_poc_lsb = priv->poc_lsb;
1715 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1716 if (priv->poc_lsb < priv->prev_poc_lsb &&
1717 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1718 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1719 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1720 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1721 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1723 priv->poc_msb = priv->prev_poc_msb;
1725 temp_poc = priv->poc_msb + priv->poc_lsb;
1726 switch (picture->structure) {
1727 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1729 priv->field_poc[TOP_FIELD] = temp_poc;
1730 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1731 slice_hdr->delta_pic_order_cnt_bottom;
1733 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1735 priv->field_poc[TOP_FIELD] = temp_poc;
1737 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1739 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1744 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1747 GstVaapiDecoderH264 *decoder,
1748 GstVaapiPictureH264 *picture,
1749 GstH264SliceHdr *slice_hdr
1752 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1753 GstH264SPS * const sps = get_sps(decoder);
1754 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1755 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1758 GST_DEBUG("decode picture order count type 1");
1760 if (priv->prev_pic_has_mmco5)
1761 prev_frame_num_offset = 0;
1763 prev_frame_num_offset = priv->frame_num_offset;
1766 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1767 priv->frame_num_offset = 0;
1768 else if (priv->prev_frame_num > priv->frame_num)
1769 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1771 priv->frame_num_offset = prev_frame_num_offset;
1774 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1775 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1778 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1779 abs_frame_num = abs_frame_num - 1;
1781 if (abs_frame_num > 0) {
1782 gint32 expected_delta_per_poc_cycle;
1783 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1785 expected_delta_per_poc_cycle = 0;
1786 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1787 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1790 poc_cycle_cnt = (abs_frame_num - 1) /
1791 sps->num_ref_frames_in_pic_order_cnt_cycle;
1792 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1793 sps->num_ref_frames_in_pic_order_cnt_cycle;
1796 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1797 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1798 expected_poc += sps->offset_for_ref_frame[i];
1802 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1803 expected_poc += sps->offset_for_non_ref_pic;
1806 switch (picture->structure) {
1807 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1808 priv->field_poc[TOP_FIELD] = expected_poc +
1809 slice_hdr->delta_pic_order_cnt[0];
1810 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1811 sps->offset_for_top_to_bottom_field +
1812 slice_hdr->delta_pic_order_cnt[1];
1814 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1815 priv->field_poc[TOP_FIELD] = expected_poc +
1816 slice_hdr->delta_pic_order_cnt[0];
1818 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1819 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1820 sps->offset_for_top_to_bottom_field +
1821 slice_hdr->delta_pic_order_cnt[0];
1826 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1829 GstVaapiDecoderH264 *decoder,
1830 GstVaapiPictureH264 *picture,
1831 GstH264SliceHdr *slice_hdr
1834 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1835 GstH264SPS * const sps = get_sps(decoder);
1836 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1837 gint32 prev_frame_num_offset, temp_poc;
1839 GST_DEBUG("decode picture order count type 2");
1841 if (priv->prev_pic_has_mmco5)
1842 prev_frame_num_offset = 0;
1844 prev_frame_num_offset = priv->frame_num_offset;
1847 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1848 priv->frame_num_offset = 0;
1849 else if (priv->prev_frame_num > priv->frame_num)
1850 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1852 priv->frame_num_offset = prev_frame_num_offset;
1855 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1857 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1858 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1860 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1863 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1864 priv->field_poc[TOP_FIELD] = temp_poc;
1865 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1866 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1869 /* 8.2.1 - Decoding process for picture order count */
1872 GstVaapiDecoderH264 *decoder,
1873 GstVaapiPictureH264 *picture,
1874 GstH264SliceHdr *slice_hdr
1877 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1878 GstH264SPS * const sps = get_sps(decoder);
1880 switch (sps->pic_order_cnt_type) {
1882 init_picture_poc_0(decoder, picture, slice_hdr);
1885 init_picture_poc_1(decoder, picture, slice_hdr);
1888 init_picture_poc_2(decoder, picture, slice_hdr);
1892 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1893 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1894 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1895 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1896 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1900 compare_picture_pic_num_dec(const void *a, const void *b)
1902 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1903 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1905 return picB->pic_num - picA->pic_num;
1909 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1911 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1912 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1914 return picA->long_term_pic_num - picB->long_term_pic_num;
1918 compare_picture_poc_dec(const void *a, const void *b)
1920 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1921 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1923 return picB->base.poc - picA->base.poc;
1927 compare_picture_poc_inc(const void *a, const void *b)
1929 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1930 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1932 return picA->base.poc - picB->base.poc;
1936 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1938 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1939 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1941 return picB->frame_num_wrap - picA->frame_num_wrap;
1945 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1947 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1948 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1950 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1953 /* 8.2.4.1 - Decoding process for picture numbers */
1955 init_picture_refs_pic_num(
1956 GstVaapiDecoderH264 *decoder,
1957 GstVaapiPictureH264 *picture,
1958 GstH264SliceHdr *slice_hdr
1961 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1962 GstH264SPS * const sps = get_sps(decoder);
1963 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1966 GST_DEBUG("decode picture numbers");
1968 for (i = 0; i < priv->short_ref_count; i++) {
1969 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1972 if (pic->base.view_id != picture->base.view_id)
1976 if (pic->frame_num > priv->frame_num)
1977 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1979 pic->frame_num_wrap = pic->frame_num;
1981 // (8-28, 8-30, 8-31)
1982 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1983 pic->pic_num = pic->frame_num_wrap;
1985 if (pic->structure == picture->structure)
1986 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1988 pic->pic_num = 2 * pic->frame_num_wrap;
1992 for (i = 0; i < priv->long_ref_count; i++) {
1993 GstVaapiPictureH264 * const pic = priv->long_ref[i];
1996 if (pic->base.view_id != picture->base.view_id)
1999 // (8-29, 8-32, 8-33)
2000 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2001 pic->long_term_pic_num = pic->long_term_frame_idx;
2003 if (pic->structure == picture->structure)
2004 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2006 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2011 #define SORT_REF_LIST(list, n, compare_func) \
2012 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2015 init_picture_refs_fields_1(
2016 guint picture_structure,
2017 GstVaapiPictureH264 *RefPicList[32],
2018 guint *RefPicList_count,
2019 GstVaapiPictureH264 *ref_list[32],
2020 guint ref_list_count
2027 n = *RefPicList_count;
2030 for (; i < ref_list_count; i++) {
2031 if (ref_list[i]->structure == picture_structure) {
2032 RefPicList[n++] = ref_list[i++];
2036 for (; j < ref_list_count; j++) {
2037 if (ref_list[j]->structure != picture_structure) {
2038 RefPicList[n++] = ref_list[j++];
2042 } while (i < ref_list_count || j < ref_list_count);
2043 *RefPicList_count = n;
2047 init_picture_refs_fields(
2048 GstVaapiPictureH264 *picture,
2049 GstVaapiPictureH264 *RefPicList[32],
2050 guint *RefPicList_count,
2051 GstVaapiPictureH264 *short_ref[32],
2052 guint short_ref_count,
2053 GstVaapiPictureH264 *long_ref[32],
2054 guint long_ref_count
2059 /* 8.2.4.2.5 - reference picture lists in fields */
2060 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2061 short_ref, short_ref_count);
2062 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2063 long_ref, long_ref_count);
2064 *RefPicList_count = n;
2067 /* Finds the inter-view reference picture with the supplied view id */
2068 static GstVaapiPictureH264 *
2069 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2071 GPtrArray * const inter_views = decoder->priv.inter_views;
2074 for (i = 0; i < inter_views->len; i++) {
2075 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2076 if (picture->base.view_id == view_id)
2080 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2085 /* Checks whether the view id exists in the supplied list of view ids */
2087 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2091 for (i = 0; i < num_view_ids; i++) {
2092 if (view_ids[i] == view_id)
2099 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2103 return (find_view_id(view_id, view->anchor_ref_l0,
2104 view->num_anchor_refs_l0) ||
2105 find_view_id(view_id, view->anchor_ref_l1,
2106 view->num_anchor_refs_l1));
2108 return (find_view_id(view_id, view->non_anchor_ref_l0,
2109 view->num_non_anchor_refs_l0) ||
2110 find_view_id(view_id, view->non_anchor_ref_l1,
2111 view->num_non_anchor_refs_l1));
2114 /* Checks whether the inter-view reference picture with the supplied
2115 view id is used for decoding the current view component picture */
2117 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2118 guint16 view_id, GstVaapiPictureH264 *picture)
2120 const GstH264SPS * const sps = get_sps(decoder);
2123 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2124 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2127 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2128 return find_view_id_in_view(view_id,
2129 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2132 /* Checks whether the supplied inter-view reference picture is used
2133 for decoding the next view component pictures */
2135 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2136 GstVaapiPictureH264 *picture)
2138 const GstH264SPS * const sps = get_sps(decoder);
2142 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2143 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2146 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2147 num_views = sps->extension.mvc.num_views_minus1 + 1;
2148 for (i = picture->base.voc + 1; i < num_views; i++) {
2149 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2150 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2156 /* H.8.2.1 - Initialization process for inter-view prediction references */
2158 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2159 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2160 const guint16 *view_ids, guint num_view_ids)
2164 n = *ref_list_count_ptr;
2165 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2166 GstVaapiPictureH264 * const pic =
2167 find_inter_view_reference(decoder, view_ids[j]);
2169 ref_list[n++] = pic;
2171 *ref_list_count_ptr = n;
2175 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2176 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2178 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2179 const GstH264SPS * const sps = get_sps(decoder);
2180 const GstH264SPSExtMVCView *view;
2182 GST_DEBUG("initialize reference picture list for inter-view prediction");
2184 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2186 view = &sps->extension.mvc.view[picture->base.voc];
2188 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2189 init_picture_refs_mvc_1(decoder, \
2190 priv->RefPicList##ref_list, \
2191 &priv->RefPicList##ref_list##_count, \
2192 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2193 view->view_list##_l##ref_list, \
2194 view->num_##view_list##s_l##ref_list); \
2198 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2199 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2201 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2204 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2205 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2207 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2210 #undef INVOKE_INIT_PICTURE_REFS_MVC
2214 init_picture_refs_p_slice(
2215 GstVaapiDecoderH264 *decoder,
2216 GstVaapiPictureH264 *picture,
2217 GstH264SliceHdr *slice_hdr
2220 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2221 GstVaapiPictureH264 **ref_list;
2224 GST_DEBUG("decode reference picture list for P and SP slices");
2226 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2227 /* 8.2.4.2.1 - P and SP slices in frames */
2228 if (priv->short_ref_count > 0) {
2229 ref_list = priv->RefPicList0;
2230 for (i = 0; i < priv->short_ref_count; i++)
2231 ref_list[i] = priv->short_ref[i];
2232 SORT_REF_LIST(ref_list, i, pic_num_dec);
2233 priv->RefPicList0_count += i;
2236 if (priv->long_ref_count > 0) {
2237 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2238 for (i = 0; i < priv->long_ref_count; i++)
2239 ref_list[i] = priv->long_ref[i];
2240 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2241 priv->RefPicList0_count += i;
2245 /* 8.2.4.2.2 - P and SP slices in fields */
2246 GstVaapiPictureH264 *short_ref[32];
2247 guint short_ref_count = 0;
2248 GstVaapiPictureH264 *long_ref[32];
2249 guint long_ref_count = 0;
2251 if (priv->short_ref_count > 0) {
2252 for (i = 0; i < priv->short_ref_count; i++)
2253 short_ref[i] = priv->short_ref[i];
2254 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2255 short_ref_count = i;
2258 if (priv->long_ref_count > 0) {
2259 for (i = 0; i < priv->long_ref_count; i++)
2260 long_ref[i] = priv->long_ref[i];
2261 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2265 init_picture_refs_fields(
2267 priv->RefPicList0, &priv->RefPicList0_count,
2268 short_ref, short_ref_count,
2269 long_ref, long_ref_count
2273 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2275 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2280 init_picture_refs_b_slice(
2281 GstVaapiDecoderH264 *decoder,
2282 GstVaapiPictureH264 *picture,
2283 GstH264SliceHdr *slice_hdr
2286 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2287 GstVaapiPictureH264 **ref_list;
2290 GST_DEBUG("decode reference picture list for B slices");
2292 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2293 /* 8.2.4.2.3 - B slices in frames */
2296 if (priv->short_ref_count > 0) {
2297 // 1. Short-term references
2298 ref_list = priv->RefPicList0;
2299 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2300 if (priv->short_ref[i]->base.poc < picture->base.poc)
2301 ref_list[n++] = priv->short_ref[i];
2303 SORT_REF_LIST(ref_list, n, poc_dec);
2304 priv->RefPicList0_count += n;
2306 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2307 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2308 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2309 ref_list[n++] = priv->short_ref[i];
2311 SORT_REF_LIST(ref_list, n, poc_inc);
2312 priv->RefPicList0_count += n;
2315 if (priv->long_ref_count > 0) {
2316 // 2. Long-term references
2317 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2318 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2319 ref_list[n++] = priv->long_ref[i];
2320 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2321 priv->RefPicList0_count += n;
2325 if (priv->short_ref_count > 0) {
2326 // 1. Short-term references
2327 ref_list = priv->RefPicList1;
2328 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2329 if (priv->short_ref[i]->base.poc > picture->base.poc)
2330 ref_list[n++] = priv->short_ref[i];
2332 SORT_REF_LIST(ref_list, n, poc_inc);
2333 priv->RefPicList1_count += n;
2335 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2336 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2337 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2338 ref_list[n++] = priv->short_ref[i];
2340 SORT_REF_LIST(ref_list, n, poc_dec);
2341 priv->RefPicList1_count += n;
2344 if (priv->long_ref_count > 0) {
2345 // 2. Long-term references
2346 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2347 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2348 ref_list[n++] = priv->long_ref[i];
2349 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2350 priv->RefPicList1_count += n;
2354 /* 8.2.4.2.4 - B slices in fields */
2355 GstVaapiPictureH264 *short_ref0[32];
2356 guint short_ref0_count = 0;
2357 GstVaapiPictureH264 *short_ref1[32];
2358 guint short_ref1_count = 0;
2359 GstVaapiPictureH264 *long_ref[32];
2360 guint long_ref_count = 0;
2362 /* refFrameList0ShortTerm */
2363 if (priv->short_ref_count > 0) {
2364 ref_list = short_ref0;
2365 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2366 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2367 ref_list[n++] = priv->short_ref[i];
2369 SORT_REF_LIST(ref_list, n, poc_dec);
2370 short_ref0_count += n;
2372 ref_list = &short_ref0[short_ref0_count];
2373 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2374 if (priv->short_ref[i]->base.poc > picture->base.poc)
2375 ref_list[n++] = priv->short_ref[i];
2377 SORT_REF_LIST(ref_list, n, poc_inc);
2378 short_ref0_count += n;
2381 /* refFrameList1ShortTerm */
2382 if (priv->short_ref_count > 0) {
2383 ref_list = short_ref1;
2384 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2385 if (priv->short_ref[i]->base.poc > picture->base.poc)
2386 ref_list[n++] = priv->short_ref[i];
2388 SORT_REF_LIST(ref_list, n, poc_inc);
2389 short_ref1_count += n;
2391 ref_list = &short_ref1[short_ref1_count];
2392 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2393 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2394 ref_list[n++] = priv->short_ref[i];
2396 SORT_REF_LIST(ref_list, n, poc_dec);
2397 short_ref1_count += n;
2400 /* refFrameListLongTerm */
2401 if (priv->long_ref_count > 0) {
2402 for (i = 0; i < priv->long_ref_count; i++)
2403 long_ref[i] = priv->long_ref[i];
2404 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2408 init_picture_refs_fields(
2410 priv->RefPicList0, &priv->RefPicList0_count,
2411 short_ref0, short_ref0_count,
2412 long_ref, long_ref_count
2415 init_picture_refs_fields(
2417 priv->RefPicList1, &priv->RefPicList1_count,
2418 short_ref1, short_ref1_count,
2419 long_ref, long_ref_count
2423 /* Check whether RefPicList1 is identical to RefPicList0, then
2424 swap if necessary */
2425 if (priv->RefPicList1_count > 1 &&
2426 priv->RefPicList1_count == priv->RefPicList0_count &&
2427 memcmp(priv->RefPicList0, priv->RefPicList1,
2428 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2429 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2430 priv->RefPicList1[0] = priv->RefPicList1[1];
2431 priv->RefPicList1[1] = tmp;
2434 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2436 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2439 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2443 #undef SORT_REF_LIST
2446 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2448 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2451 for (i = 0; i < priv->short_ref_count; i++) {
2452 if (priv->short_ref[i]->pic_num == pic_num)
2455 GST_ERROR("found no short-term reference picture with PicNum = %d",
2461 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2463 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2466 for (i = 0; i < priv->long_ref_count; i++) {
2467 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2470 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2476 exec_picture_refs_modification_1(
2477 GstVaapiDecoderH264 *decoder,
2478 GstVaapiPictureH264 *picture,
2479 GstH264SliceHdr *slice_hdr,
2483 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2484 GstH264SPS * const sps = get_sps(decoder);
2485 GstH264RefPicListModification *ref_pic_list_modification;
2486 guint num_ref_pic_list_modifications;
2487 GstVaapiPictureH264 **ref_list;
2488 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2489 const guint16 *view_ids = NULL;
2490 guint i, j, n, num_refs, num_view_ids = 0;
2492 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2494 GST_DEBUG("modification process of reference picture list %u", list);
2497 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2498 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2499 ref_list = priv->RefPicList0;
2500 ref_list_count_ptr = &priv->RefPicList0_count;
2501 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2503 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2504 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2505 const GstH264SPSExtMVCView * const view =
2506 &sps->extension.mvc.view[picture->base.voc];
2507 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2508 view_ids = view->anchor_ref_l0;
2509 num_view_ids = view->num_anchor_refs_l0;
2512 view_ids = view->non_anchor_ref_l0;
2513 num_view_ids = view->num_non_anchor_refs_l0;
2518 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2519 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2520 ref_list = priv->RefPicList1;
2521 ref_list_count_ptr = &priv->RefPicList1_count;
2522 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2524 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2525 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2526 const GstH264SPSExtMVCView * const view =
2527 &sps->extension.mvc.view[picture->base.voc];
2528 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2529 view_ids = view->anchor_ref_l1;
2530 num_view_ids = view->num_anchor_refs_l1;
2533 view_ids = view->non_anchor_ref_l1;
2534 num_view_ids = view->num_non_anchor_refs_l1;
2538 ref_list_count = *ref_list_count_ptr;
2540 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2541 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2542 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2545 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2546 CurrPicNum = slice_hdr->frame_num; // frame_num
2549 picNumPred = CurrPicNum;
2550 picViewIdxPred = -1;
2552 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2553 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2554 if (l->modification_of_pic_nums_idc == 3)
2557 /* 8.2.4.3.1 - Short-term reference pictures */
2558 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2559 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2560 gint32 picNum, picNumNoWrap;
2563 if (l->modification_of_pic_nums_idc == 0) {
2564 picNumNoWrap = picNumPred - abs_diff_pic_num;
2565 if (picNumNoWrap < 0)
2566 picNumNoWrap += MaxPicNum;
2571 picNumNoWrap = picNumPred + abs_diff_pic_num;
2572 if (picNumNoWrap >= MaxPicNum)
2573 picNumNoWrap -= MaxPicNum;
2575 picNumPred = picNumNoWrap;
2578 picNum = picNumNoWrap;
2579 if (picNum > CurrPicNum)
2580 picNum -= MaxPicNum;
2583 for (j = num_refs; j > ref_list_idx; j--)
2584 ref_list[j] = ref_list[j - 1];
2585 found_ref_idx = find_short_term_reference(decoder, picNum);
2586 ref_list[ref_list_idx++] =
2587 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2589 for (j = ref_list_idx; j <= num_refs; j++) {
2594 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2595 ref_list[j]->pic_num : MaxPicNum;
2596 if (PicNumF != picNum ||
2597 ref_list[j]->base.view_id != picture->base.view_id)
2598 ref_list[n++] = ref_list[j];
2602 /* 8.2.4.3.2 - Long-term reference pictures */
2603 else if (l->modification_of_pic_nums_idc == 2) {
2605 for (j = num_refs; j > ref_list_idx; j--)
2606 ref_list[j] = ref_list[j - 1];
2608 find_long_term_reference(decoder, l->value.long_term_pic_num);
2609 ref_list[ref_list_idx++] =
2610 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2612 for (j = ref_list_idx; j <= num_refs; j++) {
2613 gint32 LongTermPicNumF;
2617 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2618 ref_list[j]->long_term_pic_num : INT_MAX;
2619 if (LongTermPicNumF != l->value.long_term_pic_num ||
2620 ref_list[j]->base.view_id != picture->base.view_id)
2621 ref_list[n++] = ref_list[j];
2625 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2626 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2627 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2628 (l->modification_of_pic_nums_idc == 4 ||
2629 l->modification_of_pic_nums_idc == 5)) {
2630 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2631 gint32 picViewIdx, targetViewId;
2634 if (l->modification_of_pic_nums_idc == 4) {
2635 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2637 picViewIdx += num_view_ids;
2642 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2643 if (picViewIdx >= num_view_ids)
2644 picViewIdx -= num_view_ids;
2646 picViewIdxPred = picViewIdx;
2649 targetViewId = view_ids[picViewIdx];
2652 for (j = num_refs; j > ref_list_idx; j--)
2653 ref_list[j] = ref_list[j - 1];
2654 ref_list[ref_list_idx++] =
2655 find_inter_view_reference(decoder, targetViewId);
2657 for (j = ref_list_idx; j <= num_refs; j++) {
2660 if (ref_list[j]->base.view_id != targetViewId ||
2661 ref_list[j]->base.poc != picture->base.poc)
2662 ref_list[n++] = ref_list[j];
2668 for (i = 0; i < num_refs; i++)
2670 GST_ERROR("list %u entry %u is empty", list, i);
2672 *ref_list_count_ptr = num_refs;
2675 /* 8.2.4.3 - Modification process for reference picture lists */
2677 exec_picture_refs_modification(
2678 GstVaapiDecoderH264 *decoder,
2679 GstVaapiPictureH264 *picture,
2680 GstH264SliceHdr *slice_hdr
2683 GST_DEBUG("execute ref_pic_list_modification()");
2686 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2687 slice_hdr->ref_pic_list_modification_flag_l0)
2688 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2691 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2692 slice_hdr->ref_pic_list_modification_flag_l1)
2693 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2697 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2698 GstVaapiPictureH264 *picture)
2700 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2701 guint i, j, short_ref_count, long_ref_count;
2703 short_ref_count = 0;
2705 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2706 for (i = 0; i < priv->dpb_count; i++) {
2707 GstVaapiFrameStore * const fs = priv->dpb[i];
2708 GstVaapiPictureH264 *pic;
2709 if (!gst_vaapi_frame_store_has_frame(fs))
2711 pic = fs->buffers[0];
2712 if (pic->base.view_id != picture->base.view_id)
2714 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2715 priv->short_ref[short_ref_count++] = pic;
2716 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2717 priv->long_ref[long_ref_count++] = pic;
2718 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2719 pic->other_field = fs->buffers[1];
2723 for (i = 0; i < priv->dpb_count; i++) {
2724 GstVaapiFrameStore * const fs = priv->dpb[i];
2725 for (j = 0; j < fs->num_buffers; j++) {
2726 GstVaapiPictureH264 * const pic = fs->buffers[j];
2727 if (pic->base.view_id != picture->base.view_id)
2729 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2730 priv->short_ref[short_ref_count++] = pic;
2731 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2732 priv->long_ref[long_ref_count++] = pic;
2733 pic->structure = pic->base.structure;
2734 pic->other_field = fs->buffers[j ^ 1];
2739 for (i = short_ref_count; i < priv->short_ref_count; i++)
2740 priv->short_ref[i] = NULL;
2741 priv->short_ref_count = short_ref_count;
2743 for (i = long_ref_count; i < priv->long_ref_count; i++)
2744 priv->long_ref[i] = NULL;
2745 priv->long_ref_count = long_ref_count;
2750 GstVaapiDecoderH264 *decoder,
2751 GstVaapiPictureH264 *picture,
2752 GstH264SliceHdr *slice_hdr
2755 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2758 init_picture_ref_lists(decoder, picture);
2759 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2761 priv->RefPicList0_count = 0;
2762 priv->RefPicList1_count = 0;
2764 switch (slice_hdr->type % 5) {
2765 case GST_H264_P_SLICE:
2766 case GST_H264_SP_SLICE:
2767 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2769 case GST_H264_B_SLICE:
2770 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2776 exec_picture_refs_modification(decoder, picture, slice_hdr);
2778 switch (slice_hdr->type % 5) {
2779 case GST_H264_B_SLICE:
2780 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2781 for (i = priv->RefPicList1_count; i < num_refs; i++)
2782 priv->RefPicList1[i] = NULL;
2783 priv->RefPicList1_count = num_refs;
2786 case GST_H264_P_SLICE:
2787 case GST_H264_SP_SLICE:
2788 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2789 for (i = priv->RefPicList0_count; i < num_refs; i++)
2790 priv->RefPicList0[i] = NULL;
2791 priv->RefPicList0_count = num_refs;
2800 GstVaapiDecoderH264 *decoder,
2801 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2803 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2804 GstVaapiPicture * const base_picture = &picture->base;
2805 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2807 priv->prev_frame_num = priv->frame_num;
2808 priv->frame_num = slice_hdr->frame_num;
2809 picture->frame_num = priv->frame_num;
2810 picture->frame_num_wrap = priv->frame_num;
2811 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2812 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2813 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2814 base_picture->view_id = pi->view_id;
2815 base_picture->voc = pi->voc;
2817 /* Initialize extensions */
2818 switch (pi->nalu.extension_type) {
2819 case GST_H264_NAL_EXTENSION_MVC: {
2820 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2822 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2823 if (mvc->inter_view_flag)
2824 GST_VAAPI_PICTURE_FLAG_SET(picture,
2825 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2826 if (mvc->anchor_pic_flag)
2827 GST_VAAPI_PICTURE_FLAG_SET(picture,
2828 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2833 /* Reset decoder state for IDR pictures */
2834 if (pi->nalu.idr_pic_flag) {
2836 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2837 dpb_flush(decoder, picture);
2840 /* Initialize picture structure */
2841 if (!slice_hdr->field_pic_flag)
2842 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2844 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2845 if (!slice_hdr->bottom_field_flag)
2846 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2848 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2850 picture->structure = base_picture->structure;
2852 /* Initialize reference flags */
2853 if (pi->nalu.ref_idc) {
2854 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2855 &slice_hdr->dec_ref_pic_marking;
2857 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2858 dec_ref_pic_marking->long_term_reference_flag)
2859 GST_VAAPI_PICTURE_FLAG_SET(picture,
2860 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2862 GST_VAAPI_PICTURE_FLAG_SET(picture,
2863 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2866 init_picture_poc(decoder, picture, slice_hdr);
2870 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2872 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2874 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2875 GstH264SPS * const sps = get_sps(decoder);
2876 GstVaapiPictureH264 *ref_picture;
2877 guint i, m, max_num_ref_frames;
2879 GST_DEBUG("reference picture marking process (sliding window)");
2881 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2884 max_num_ref_frames = sps->num_ref_frames;
2885 if (max_num_ref_frames == 0)
2886 max_num_ref_frames = 1;
2887 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2888 max_num_ref_frames <<= 1;
2890 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2892 if (priv->short_ref_count < 1)
2895 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2896 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2897 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2901 ref_picture = priv->short_ref[m];
2902 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2903 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2905 /* Both fields need to be marked as "unused for reference", so
2906 remove the other field from the short_ref[] list as well */
2907 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2908 for (i = 0; i < priv->short_ref_count; i++) {
2909 if (priv->short_ref[i] == ref_picture->other_field) {
2910 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2918 static inline gint32
2919 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2923 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2924 pic_num = picture->frame_num_wrap;
2926 pic_num = 2 * picture->frame_num_wrap + 1;
2927 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2931 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2933 exec_ref_pic_marking_adaptive_mmco_1(
2934 GstVaapiDecoderH264 *decoder,
2935 GstVaapiPictureH264 *picture,
2936 GstH264RefPicMarking *ref_pic_marking
2939 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2942 picNumX = get_picNumX(picture, ref_pic_marking);
2943 i = find_short_term_reference(decoder, picNumX);
2947 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2948 GST_VAAPI_PICTURE_IS_FRAME(picture));
2949 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2952 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2954 exec_ref_pic_marking_adaptive_mmco_2(
2955 GstVaapiDecoderH264 *decoder,
2956 GstVaapiPictureH264 *picture,
2957 GstH264RefPicMarking *ref_pic_marking
2960 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2963 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2967 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2968 GST_VAAPI_PICTURE_IS_FRAME(picture));
2969 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2972 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2974 exec_ref_pic_marking_adaptive_mmco_3(
2975 GstVaapiDecoderH264 *decoder,
2976 GstVaapiPictureH264 *picture,
2977 GstH264RefPicMarking *ref_pic_marking
2980 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2981 GstVaapiPictureH264 *ref_picture, *other_field;
2984 for (i = 0; i < priv->long_ref_count; i++) {
2985 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2988 if (i != priv->long_ref_count) {
2989 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2990 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2993 picNumX = get_picNumX(picture, ref_pic_marking);
2994 i = find_short_term_reference(decoder, picNumX);
2998 ref_picture = priv->short_ref[i];
2999 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3000 priv->long_ref[priv->long_ref_count++] = ref_picture;
3002 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3003 gst_vaapi_picture_h264_set_reference(ref_picture,
3004 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3005 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3007 /* Assign LongTermFrameIdx to the other field if it was also
3008 marked as "used for long-term reference */
3009 other_field = ref_picture->other_field;
3010 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3011 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3014 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3015 * as "unused for reference" */
3017 exec_ref_pic_marking_adaptive_mmco_4(
3018 GstVaapiDecoderH264 *decoder,
3019 GstVaapiPictureH264 *picture,
3020 GstH264RefPicMarking *ref_pic_marking
3023 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3024 gint32 i, long_term_frame_idx;
3026 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3028 for (i = 0; i < priv->long_ref_count; i++) {
3029 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3031 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3032 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3037 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3039 exec_ref_pic_marking_adaptive_mmco_5(
3040 GstVaapiDecoderH264 *decoder,
3041 GstVaapiPictureH264 *picture,
3042 GstH264RefPicMarking *ref_pic_marking
3045 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3047 dpb_flush(decoder, picture);
3049 priv->prev_pic_has_mmco5 = TRUE;
3051 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3052 priv->frame_num = 0;
3053 priv->frame_num_offset = 0;
3054 picture->frame_num = 0;
3056 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3057 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3058 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3059 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3060 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3061 picture->base.poc = 0;
3064 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3066 exec_ref_pic_marking_adaptive_mmco_6(
3067 GstVaapiDecoderH264 *decoder,
3068 GstVaapiPictureH264 *picture,
3069 GstH264RefPicMarking *ref_pic_marking
3072 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3073 GstVaapiPictureH264 *other_field;
3076 for (i = 0; i < priv->long_ref_count; i++) {
3077 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3080 if (i != priv->long_ref_count) {
3081 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3082 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3085 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3086 gst_vaapi_picture_h264_set_reference(picture,
3087 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3088 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3090 /* Assign LongTermFrameIdx to the other field if it was also
3091 marked as "used for long-term reference */
3092 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3093 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3094 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3097 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3099 exec_ref_pic_marking_adaptive(
3100 GstVaapiDecoderH264 *decoder,
3101 GstVaapiPictureH264 *picture,
3102 GstH264DecRefPicMarking *dec_ref_pic_marking
3107 GST_DEBUG("reference picture marking process (adaptive memory control)");
3109 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3110 GstVaapiDecoderH264 *decoder,
3111 GstVaapiPictureH264 *picture,
3112 GstH264RefPicMarking *ref_pic_marking
3115 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3117 exec_ref_pic_marking_adaptive_mmco_1,
3118 exec_ref_pic_marking_adaptive_mmco_2,
3119 exec_ref_pic_marking_adaptive_mmco_3,
3120 exec_ref_pic_marking_adaptive_mmco_4,
3121 exec_ref_pic_marking_adaptive_mmco_5,
3122 exec_ref_pic_marking_adaptive_mmco_6,
3125 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3126 GstH264RefPicMarking * const ref_pic_marking =
3127 &dec_ref_pic_marking->ref_pic_marking[i];
3129 const guint mmco = ref_pic_marking->memory_management_control_operation;
3130 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3131 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3133 GST_ERROR("unhandled MMCO %u", mmco);
3140 /* 8.2.5 - Execute reference picture marking process */
3142 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3144 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3146 priv->prev_pic_has_mmco5 = FALSE;
3147 priv->prev_pic_structure = picture->structure;
3149 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3150 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3152 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3155 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3156 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3157 &picture->last_slice_hdr->dec_ref_pic_marking;
3158 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3159 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3163 if (!exec_ref_pic_marking_sliding_window(decoder))
3171 vaapi_init_picture(VAPictureH264 *pic)
3173 pic->picture_id = VA_INVALID_ID;
3175 pic->flags = VA_PICTURE_H264_INVALID;
3176 pic->TopFieldOrderCnt = 0;
3177 pic->BottomFieldOrderCnt = 0;
3181 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3182 guint picture_structure)
3184 if (!picture_structure)
3185 picture_structure = picture->structure;
3187 pic->picture_id = picture->base.surface_id;
3190 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3191 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3192 pic->frame_idx = picture->long_term_frame_idx;
3195 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3196 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3197 pic->frame_idx = picture->frame_num;
3200 switch (picture_structure) {
3201 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3202 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3203 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3205 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3206 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3207 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3208 pic->BottomFieldOrderCnt = 0;
3210 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3211 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3212 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3213 pic->TopFieldOrderCnt = 0;
3219 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3220 GstVaapiPictureH264 *picture)
3222 vaapi_fill_picture(pic, picture, 0);
3224 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3225 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3226 /* The inter-view reference components and inter-view only
3227 reference components that are included in the reference
3228 picture lists are considered as not being marked as "used for
3229 short-term reference" or "used for long-term reference" */
3230 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3231 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3236 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3238 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3239 GstVaapiPicture * const base_picture = &picture->base;
3240 GstH264PPS * const pps = get_pps(decoder);
3241 GstH264SPS * const sps = get_sps(decoder);
3242 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3245 /* Fill in VAPictureParameterBufferH264 */
3246 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3248 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3249 GstVaapiFrameStore * const fs = priv->dpb[i];
3250 if ((gst_vaapi_frame_store_has_reference(fs) &&
3251 fs->view_id == picture->base.view_id) ||
3252 (gst_vaapi_frame_store_has_inter_view(fs) &&
3253 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3254 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3255 fs->buffers[0], fs->structure);
3256 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3259 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3260 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3262 #define COPY_FIELD(s, f) \
3263 pic_param->f = (s)->f
3265 #define COPY_BFM(a, s, f) \
3266 pic_param->a.bits.f = (s)->f
3268 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3269 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3270 pic_param->frame_num = priv->frame_num;
3272 COPY_FIELD(sps, bit_depth_luma_minus8);
3273 COPY_FIELD(sps, bit_depth_chroma_minus8);
3274 COPY_FIELD(sps, num_ref_frames);
3275 COPY_FIELD(pps, num_slice_groups_minus1);
3276 COPY_FIELD(pps, slice_group_map_type);
3277 COPY_FIELD(pps, slice_group_change_rate_minus1);
3278 COPY_FIELD(pps, pic_init_qp_minus26);
3279 COPY_FIELD(pps, pic_init_qs_minus26);
3280 COPY_FIELD(pps, chroma_qp_index_offset);
3281 COPY_FIELD(pps, second_chroma_qp_index_offset);
3283 pic_param->seq_fields.value = 0; /* reset all bits */
3284 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3285 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3287 COPY_BFM(seq_fields, sps, chroma_format_idc);
3288 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3289 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3290 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3291 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3292 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3293 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3294 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3295 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3297 pic_param->pic_fields.value = 0; /* reset all bits */
3298 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3299 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3301 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3302 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3303 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3304 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3305 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3306 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3307 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3308 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3312 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3314 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3316 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3317 GstH264PPS * const pps = slice_hdr->pps;
3318 GstH264SPS * const sps = pps->sequence;
3319 GstH264SliceHdr *prev_slice_hdr;
3323 prev_slice_hdr = &prev_pi->data.slice_hdr;
3325 #define CHECK_EXPR(expr, field_name) do { \
3327 GST_DEBUG(field_name " differs in value"); \
3332 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3333 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3335 /* view_id differs in value and VOIdx of current slice_hdr is less
3336 than the VOIdx of the prev_slice_hdr */
3337 CHECK_VALUE(pi, prev_pi, view_id);
3339 /* frame_num differs in value, regardless of inferred values to 0 */
3340 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3342 /* pic_parameter_set_id differs in value */
3343 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3345 /* field_pic_flag differs in value */
3346 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3348 /* bottom_field_flag is present in both and differs in value */
3349 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3350 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3352 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3353 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3354 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3356 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3357 value or delta_pic_order_cnt_bottom differs in value */
3358 if (sps->pic_order_cnt_type == 0) {
3359 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3360 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3361 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3364 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3365 differs in value or delta_pic_order_cnt[1] differs in value */
3366 else if (sps->pic_order_cnt_type == 1) {
3367 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3368 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3371 /* IdrPicFlag differs in value */
3372 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3374 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3375 if (pi->nalu.idr_pic_flag)
3376 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3383 /* Detection of a new access unit, assuming we are already in presence
3385 static inline gboolean
3386 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3388 if (!prev_pi || prev_pi->view_id == pi->view_id)
3390 return pi->voc < prev_pi->voc;
3393 /* Finds the first field picture corresponding to the supplied picture */
3394 static GstVaapiPictureH264 *
3395 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3397 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3398 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3399 GstVaapiFrameStore *fs;
3401 if (!slice_hdr->field_pic_flag)
3404 fs = priv->prev_frames[pi->voc];
3405 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3408 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3409 return fs->buffers[0];
3413 static GstVaapiDecoderStatus
3414 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3416 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3417 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3418 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3419 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3420 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3421 GstVaapiPictureH264 *picture, *first_field;
3422 GstVaapiDecoderStatus status;
3424 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3425 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3427 /* Only decode base stream for MVC */
3428 switch (sps->profile_idc) {
3429 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3430 case GST_H264_PROFILE_STEREO_HIGH:
3432 GST_DEBUG("drop picture from substream");
3433 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3438 status = ensure_context(decoder, sps);
3439 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3442 priv->decoder_state = 0;
3444 first_field = find_first_field(decoder, pi);
3446 /* Re-use current picture where the first field was decoded */
3447 picture = gst_vaapi_picture_h264_new_field(first_field);
3449 GST_ERROR("failed to allocate field picture");
3450 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3454 /* Create new picture */
3455 picture = gst_vaapi_picture_h264_new(decoder);
3457 GST_ERROR("failed to allocate picture");
3458 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3461 gst_vaapi_picture_replace(&priv->current_picture, picture);
3462 gst_vaapi_picture_unref(picture);
3464 /* Clear inter-view references list if this is the primary coded
3465 picture of the current access unit */
3466 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3467 g_ptr_array_set_size(priv->inter_views, 0);
3469 /* Update cropping rectangle */
3470 if (sps->frame_cropping_flag) {
3471 GstVaapiRectangle crop_rect;
3472 crop_rect.x = sps->crop_rect_x;
3473 crop_rect.y = sps->crop_rect_y;
3474 crop_rect.width = sps->crop_rect_width;
3475 crop_rect.height = sps->crop_rect_height;
3476 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3479 status = ensure_quant_matrix(decoder, picture);
3480 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3481 GST_ERROR("failed to reset quantizer matrix");
3485 if (!init_picture(decoder, picture, pi))
3486 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3487 if (!fill_picture(decoder, picture))
3488 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3490 priv->decoder_state = pi->state;
3491 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3495 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3499 epb_count = slice_hdr->n_emulation_prevention_bytes;
3500 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3504 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3505 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3507 VASliceParameterBufferH264 * const slice_param = slice->param;
3508 GstH264PPS * const pps = get_pps(decoder);
3509 GstH264SPS * const sps = get_sps(decoder);
3510 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3511 guint num_weight_tables = 0;
3514 if (pps->weighted_pred_flag &&
3515 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3516 num_weight_tables = 1;
3517 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3518 num_weight_tables = 2;
3520 num_weight_tables = 0;
3522 slice_param->luma_log2_weight_denom = 0;
3523 slice_param->chroma_log2_weight_denom = 0;
3524 slice_param->luma_weight_l0_flag = 0;
3525 slice_param->chroma_weight_l0_flag = 0;
3526 slice_param->luma_weight_l1_flag = 0;
3527 slice_param->chroma_weight_l1_flag = 0;
3529 if (num_weight_tables < 1)
3532 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3533 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3535 slice_param->luma_weight_l0_flag = 1;
3536 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3537 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3538 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3541 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3542 if (slice_param->chroma_weight_l0_flag) {
3543 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3544 for (j = 0; j < 2; j++) {
3545 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3546 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3551 if (num_weight_tables < 2)
3554 slice_param->luma_weight_l1_flag = 1;
3555 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3556 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3557 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3560 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3561 if (slice_param->chroma_weight_l1_flag) {
3562 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3563 for (j = 0; j < 2; j++) {
3564 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3565 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3573 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3574 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3576 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3577 VASliceParameterBufferH264 * const slice_param = slice->param;
3578 guint i, num_ref_lists = 0;
3580 slice_param->num_ref_idx_l0_active_minus1 = 0;
3581 slice_param->num_ref_idx_l1_active_minus1 = 0;
3583 if (GST_H264_IS_B_SLICE(slice_hdr))
3585 else if (GST_H264_IS_I_SLICE(slice_hdr))
3590 if (num_ref_lists < 1)
3593 slice_param->num_ref_idx_l0_active_minus1 =
3594 slice_hdr->num_ref_idx_l0_active_minus1;
3596 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3597 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3598 priv->RefPicList0[i]);
3599 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3600 vaapi_init_picture(&slice_param->RefPicList0[i]);
3602 if (num_ref_lists < 2)
3605 slice_param->num_ref_idx_l1_active_minus1 =
3606 slice_hdr->num_ref_idx_l1_active_minus1;
3608 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3609 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3610 priv->RefPicList1[i]);
3611 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3612 vaapi_init_picture(&slice_param->RefPicList1[i]);
3617 fill_slice(GstVaapiDecoderH264 *decoder,
3618 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3620 VASliceParameterBufferH264 * const slice_param = slice->param;
3621 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3623 /* Fill in VASliceParameterBufferH264 */
3624 slice_param->slice_data_bit_offset =
3625 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3626 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3627 slice_param->slice_type = slice_hdr->type % 5;
3628 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3629 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3630 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3631 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3632 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3633 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3635 if (!fill_RefPicList(decoder, slice, slice_hdr))
3637 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3642 static GstVaapiDecoderStatus
3643 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3645 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3646 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3647 GstVaapiPictureH264 * const picture = priv->current_picture;
3648 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3649 GstVaapiSlice *slice;
3650 GstBuffer * const buffer =
3651 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3652 GstMapInfo map_info;
3654 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3656 if (!is_valid_state(pi->state,
3657 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3658 GST_WARNING("failed to receive enough headers to decode slice");
3659 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3662 if (!ensure_pps(decoder, slice_hdr->pps)) {
3663 GST_ERROR("failed to activate PPS");
3664 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3667 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3668 GST_ERROR("failed to activate SPS");
3669 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3672 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3673 GST_ERROR("failed to map buffer");
3674 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3677 /* Check wether this is the first/last slice in the current access unit */
3678 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3679 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3680 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3681 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3683 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3684 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3685 gst_buffer_unmap(buffer, &map_info);
3687 GST_ERROR("failed to allocate slice");
3688 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3691 init_picture_refs(decoder, picture, slice_hdr);
3692 if (!fill_slice(decoder, slice, pi)) {
3693 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3694 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3697 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3698 picture->last_slice_hdr = slice_hdr;
3699 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3700 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3704 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3706 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3707 0xffffff00, 0x00000100,
3712 static GstVaapiDecoderStatus
3713 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3715 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3716 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3717 GstVaapiDecoderStatus status;
3719 priv->decoder_state |= pi->state;
3720 switch (pi->nalu.type) {
3721 case GST_H264_NAL_SPS:
3722 status = decode_sps(decoder, unit);
3724 case GST_H264_NAL_SUBSET_SPS:
3725 status = decode_subset_sps(decoder, unit);
3727 case GST_H264_NAL_PPS:
3728 status = decode_pps(decoder, unit);
3730 case GST_H264_NAL_SLICE_EXT:
3731 case GST_H264_NAL_SLICE_IDR:
3732 /* fall-through. IDR specifics are handled in init_picture() */
3733 case GST_H264_NAL_SLICE:
3734 status = decode_slice(decoder, unit);
3736 case GST_H264_NAL_SEQ_END:
3737 case GST_H264_NAL_STREAM_END:
3738 status = decode_sequence_end(decoder);
3740 case GST_H264_NAL_SEI:
3741 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3744 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3745 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3751 static GstVaapiDecoderStatus
3752 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3753 const guchar *buf, guint buf_size)
3755 GstVaapiDecoderH264 * const decoder =
3756 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3757 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3758 GstVaapiDecoderStatus status;
3759 GstVaapiDecoderUnit unit;
3760 GstVaapiParserInfoH264 *pi = NULL;
3761 GstH264ParserResult result;
3762 guint i, ofs, num_sps, num_pps;
3764 unit.parsed_info = NULL;
3767 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3770 GST_ERROR("failed to decode codec-data, not in avcC format");
3771 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3774 priv->nal_length_size = (buf[4] & 0x03) + 1;
3776 num_sps = buf[5] & 0x1f;
3779 for (i = 0; i < num_sps; i++) {
3780 pi = gst_vaapi_parser_info_h264_new();
3782 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3783 unit.parsed_info = pi;
3785 result = gst_h264_parser_identify_nalu_avc(
3787 buf, ofs, buf_size, 2,
3790 if (result != GST_H264_PARSER_OK) {
3791 status = get_status(result);
3795 status = parse_sps(decoder, &unit);
3796 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3798 ofs = pi->nalu.offset + pi->nalu.size;
3800 status = decode_sps(decoder, &unit);
3801 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3803 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3809 for (i = 0; i < num_pps; i++) {
3810 pi = gst_vaapi_parser_info_h264_new();
3812 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3813 unit.parsed_info = pi;
3815 result = gst_h264_parser_identify_nalu_avc(
3817 buf, ofs, buf_size, 2,
3820 if (result != GST_H264_PARSER_OK) {
3821 status = get_status(result);
3825 status = parse_pps(decoder, &unit);
3826 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3828 ofs = pi->nalu.offset + pi->nalu.size;
3830 status = decode_pps(decoder, &unit);
3831 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3833 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3836 priv->is_avcC = TRUE;
3837 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3840 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3844 static GstVaapiDecoderStatus
3845 ensure_decoder(GstVaapiDecoderH264 *decoder)
3847 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3848 GstVaapiDecoderStatus status;
3850 if (!priv->is_opened) {
3851 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3852 if (!priv->is_opened)
3853 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3855 status = gst_vaapi_decoder_decode_codec_data(
3856 GST_VAAPI_DECODER_CAST(decoder));
3857 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3860 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3863 static GstVaapiDecoderStatus
3864 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3865 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3867 GstVaapiDecoderH264 * const decoder =
3868 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3869 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3870 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3871 GstVaapiParserInfoH264 *pi;
3872 GstVaapiDecoderStatus status;
3873 GstH264ParserResult result;
3875 guint i, size, buf_size, nalu_size, flags;
3879 status = ensure_decoder(decoder);
3880 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3883 size = gst_adapter_available(adapter);
3885 if (priv->is_avcC) {
3886 if (size < priv->nal_length_size)
3887 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3889 buf = (guchar *)&start_code;
3890 g_assert(priv->nal_length_size <= sizeof(start_code));
3891 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3894 for (i = 0; i < priv->nal_length_size; i++)
3895 nalu_size = (nalu_size << 8) | buf[i];
3897 buf_size = priv->nal_length_size + nalu_size;
3898 if (size < buf_size)
3899 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3903 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3905 ofs = scan_for_start_code(adapter, 0, size, NULL);
3907 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3910 gst_adapter_flush(adapter, ofs);
3914 ofs2 = ps->input_offset2 - ofs - 4;
3918 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3919 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3921 // Assume the whole NAL unit is present if end-of-stream
3923 ps->input_offset2 = size;
3924 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3930 ps->input_offset2 = 0;
3932 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3934 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3936 unit->size = buf_size;
3938 pi = gst_vaapi_parser_info_h264_new();
3940 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3942 gst_vaapi_decoder_unit_set_parsed_info(unit,
3943 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3946 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3947 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3949 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3950 buf, 0, buf_size, &pi->nalu);
3951 status = get_status(result);
3952 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3955 switch (pi->nalu.type) {
3956 case GST_H264_NAL_SPS:
3957 status = parse_sps(decoder, unit);
3959 case GST_H264_NAL_SUBSET_SPS:
3960 status = parse_subset_sps(decoder, unit);
3962 case GST_H264_NAL_PPS:
3963 status = parse_pps(decoder, unit);
3965 case GST_H264_NAL_SEI:
3966 status = parse_sei(decoder, unit);
3968 case GST_H264_NAL_SLICE_EXT:
3969 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3970 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3974 case GST_H264_NAL_SLICE_IDR:
3975 case GST_H264_NAL_SLICE:
3976 status = parse_slice(decoder, unit);
3979 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3982 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3986 switch (pi->nalu.type) {
3987 case GST_H264_NAL_AU_DELIMITER:
3988 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3989 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3991 case GST_H264_NAL_FILLER_DATA:
3992 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3994 case GST_H264_NAL_STREAM_END:
3995 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3997 case GST_H264_NAL_SEQ_END:
3998 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
3999 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4001 case GST_H264_NAL_SPS:
4002 case GST_H264_NAL_SUBSET_SPS:
4003 case GST_H264_NAL_PPS:
4004 case GST_H264_NAL_SEI:
4005 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4006 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4008 case GST_H264_NAL_SLICE_EXT:
4009 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4010 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4014 case GST_H264_NAL_SLICE_IDR:
4015 case GST_H264_NAL_SLICE:
4016 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4017 if (is_new_picture(pi, priv->prev_slice_pi)) {
4018 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4019 if (is_new_access_unit(pi, priv->prev_slice_pi))
4020 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4022 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4024 case GST_H264_NAL_SPS_EXT:
4025 case GST_H264_NAL_SLICE_AUX:
4026 /* skip SPS extension and auxiliary slice for now */
4027 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4029 case GST_H264_NAL_PREFIX_UNIT:
4030 /* skip Prefix NAL units for now */
4031 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4032 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4033 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4036 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4037 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4038 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4041 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4042 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4043 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4045 pi->nalu.data = NULL;
4046 pi->state = priv->parser_state;
4048 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4049 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4052 static GstVaapiDecoderStatus
4053 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4054 GstVaapiDecoderUnit *unit)
4056 GstVaapiDecoderH264 * const decoder =
4057 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4058 GstVaapiDecoderStatus status;
4060 status = ensure_decoder(decoder);
4061 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4063 return decode_unit(decoder, unit);
4066 static GstVaapiDecoderStatus
4067 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4068 GstVaapiDecoderUnit *unit)
4070 GstVaapiDecoderH264 * const decoder =
4071 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4073 return decode_picture(decoder, unit);
4076 static GstVaapiDecoderStatus
4077 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4079 GstVaapiDecoderH264 * const decoder =
4080 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4082 return decode_current_picture(decoder);
4085 static GstVaapiDecoderStatus
4086 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4088 GstVaapiDecoderH264 * const decoder =
4089 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4091 dpb_flush(decoder, NULL);
4092 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4096 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4098 GstVaapiMiniObjectClass * const object_class =
4099 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4100 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4102 object_class->size = sizeof(GstVaapiDecoderH264);
4103 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4105 decoder_class->create = gst_vaapi_decoder_h264_create;
4106 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4107 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4108 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4109 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4110 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4111 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4113 decoder_class->decode_codec_data =
4114 gst_vaapi_decoder_h264_decode_codec_data;
4117 static inline const GstVaapiDecoderClass *
4118 gst_vaapi_decoder_h264_class(void)
4120 static GstVaapiDecoderH264Class g_class;
4121 static gsize g_class_init = FALSE;
4123 if (g_once_init_enter(&g_class_init)) {
4124 gst_vaapi_decoder_h264_class_init(&g_class);
4125 g_once_init_leave(&g_class_init, TRUE);
4127 return GST_VAAPI_DECODER_CLASS(&g_class);
4131 * gst_vaapi_decoder_h264_new:
4132 * @display: a #GstVaapiDisplay
4133 * @caps: a #GstCaps holding codec information
4135 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4136 * hold extra information like codec-data and pictured coded size.
4138 * Return value: the newly allocated #GstVaapiDecoder object
4141 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4143 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);