2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiPictureH264 *current_picture;
449 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
450 GstVaapiParserInfoH264 *active_sps;
451 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
452 GstVaapiParserInfoH264 *active_pps;
453 GstVaapiParserInfoH264 *prev_pi;
454 GstVaapiParserInfoH264 *prev_slice_pi;
455 GstVaapiFrameStore **prev_frames;
456 guint prev_frames_alloc;
457 GstVaapiFrameStore **dpb;
462 GstVaapiProfile profile;
463 GstVaapiEntrypoint entrypoint;
464 GstVaapiChromaType chroma_type;
465 GPtrArray *inter_views;
466 GstVaapiPictureH264 *short_ref[32];
467 guint short_ref_count;
468 GstVaapiPictureH264 *long_ref[32];
469 guint long_ref_count;
470 GstVaapiPictureH264 *RefPicList0[32];
471 guint RefPicList0_count;
472 GstVaapiPictureH264 *RefPicList1[32];
473 guint RefPicList1_count;
474 guint nal_length_size;
477 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
478 gint32 poc_msb; // PicOrderCntMsb
479 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
480 gint32 prev_poc_msb; // prevPicOrderCntMsb
481 gint32 prev_poc_lsb; // prevPicOrderCntLsb
482 gint32 frame_num_offset; // FrameNumOffset
483 gint32 frame_num; // frame_num (from slice_header())
484 gint32 prev_frame_num; // prevFrameNum
485 gboolean prev_pic_has_mmco5; // prevMmco5Pic
486 gboolean prev_pic_structure; // previous picture structure
489 guint has_context : 1;
490 guint progressive_sequence : 1;
494 * GstVaapiDecoderH264:
496 * A decoder based on H264.
498 struct _GstVaapiDecoderH264 {
500 GstVaapiDecoder parent_instance;
501 GstVaapiDecoderH264Private priv;
505 * GstVaapiDecoderH264Class:
507 * A decoder class based on H264.
509 struct _GstVaapiDecoderH264Class {
511 GstVaapiDecoderClass parent_class;
515 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
518 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
519 GstVaapiPictureH264 *picture);
521 static inline gboolean
522 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
523 GstVaapiFrameStore *fs)
525 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
528 /* Determines if the supplied profile is one of the MVC set */
530 is_mvc_profile(GstH264Profile profile)
532 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
533 profile == GST_H264_PROFILE_STEREO_HIGH;
536 /* Determines the view_id from the supplied NAL unit */
538 get_view_id(GstH264NalUnit *nalu)
540 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
543 /* Determines the view order index (VOIdx) from the supplied view_id */
545 get_view_order_index(GstH264SPS *sps, guint16 view_id)
547 GstH264SPSExtMVC *mvc;
550 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
553 mvc = &sps->extension.mvc;
554 for (i = 0; i <= mvc->num_views_minus1; i++) {
555 if (mvc->view[i].view_id == view_id)
558 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
562 /* Determines NumViews */
564 get_num_views(GstH264SPS *sps)
566 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
567 sps->extension.mvc.num_views_minus1 : 0);
570 /* Get number of reference frames to use */
572 get_max_dec_frame_buffering(GstH264SPS *sps)
574 guint num_views, max_dpb_frames;
575 guint max_dec_frame_buffering, PicSizeMbs;
576 GstVaapiLevelH264 level;
577 const GstVaapiH264LevelLimits *level_limits;
579 /* Table A-1 - Level limits */
580 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
581 level = GST_VAAPI_LEVEL_H264_L1b;
583 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
584 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
585 if (G_UNLIKELY(!level_limits)) {
586 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
587 max_dec_frame_buffering = 16;
590 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
591 (sps->pic_height_in_map_units_minus1 + 1) *
592 (sps->frame_mbs_only_flag ? 1 : 2));
593 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
595 if (is_mvc_profile(sps->profile_idc))
596 max_dec_frame_buffering <<= 1;
599 if (sps->vui_parameters_present_flag) {
600 GstH264VUIParams * const vui_params = &sps->vui_parameters;
601 if (vui_params->bitstream_restriction_flag)
602 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
604 switch (sps->profile_idc) {
605 case 44: // CAVLC 4:4:4 Intra profile
606 case GST_H264_PROFILE_SCALABLE_HIGH:
607 case GST_H264_PROFILE_HIGH:
608 case GST_H264_PROFILE_HIGH10:
609 case GST_H264_PROFILE_HIGH_422:
610 case GST_H264_PROFILE_HIGH_444:
611 if (sps->constraint_set3_flag)
612 max_dec_frame_buffering = 0;
618 num_views = get_num_views(sps);
619 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
620 if (max_dec_frame_buffering > max_dpb_frames)
621 max_dec_frame_buffering = max_dpb_frames;
622 else if (max_dec_frame_buffering < sps->num_ref_frames)
623 max_dec_frame_buffering = sps->num_ref_frames;
624 return MAX(1, max_dec_frame_buffering);
628 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
630 gpointer * const entries = array;
631 guint num_entries = *array_length_ptr;
633 g_return_if_fail(index < num_entries);
635 if (index != --num_entries)
636 entries[index] = entries[num_entries];
637 entries[num_entries] = NULL;
638 *array_length_ptr = num_entries;
643 array_remove_index(void *array, guint *array_length_ptr, guint index)
645 array_remove_index_fast(array, array_length_ptr, index);
649 array_remove_index(void *array, guint *array_length_ptr, guint index)
651 gpointer * const entries = array;
652 const guint num_entries = *array_length_ptr - 1;
655 g_return_if_fail(index <= num_entries);
657 for (i = index; i < num_entries; i++)
658 entries[i] = entries[i + 1];
659 entries[num_entries] = NULL;
660 *array_length_ptr = num_entries;
664 #define ARRAY_REMOVE_INDEX(array, index) \
665 array_remove_index(array, &array##_count, index)
668 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
670 GstVaapiDecoderH264Private * const priv = &decoder->priv;
671 guint i, num_frames = --priv->dpb_count;
673 if (USE_STRICT_DPB_ORDERING) {
674 for (i = index; i < num_frames; i++)
675 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
677 else if (index != num_frames)
678 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
679 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
684 GstVaapiDecoderH264 *decoder,
685 GstVaapiFrameStore *fs,
686 GstVaapiPictureH264 *picture
689 picture->output_needed = FALSE;
692 if (--fs->output_needed > 0)
694 picture = fs->buffers[0];
696 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
700 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
702 GstVaapiDecoderH264Private * const priv = &decoder->priv;
703 GstVaapiFrameStore * const fs = priv->dpb[i];
705 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
706 dpb_remove_index(decoder, i);
709 /* Finds the frame store holding the supplied picture */
711 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
713 GstVaapiDecoderH264Private * const priv = &decoder->priv;
716 for (i = 0; i < priv->dpb_count; i++) {
717 GstVaapiFrameStore * const fs = priv->dpb[i];
718 for (j = 0; j < fs->num_buffers; j++) {
719 if (fs->buffers[j] == picture)
726 /* Finds the picture with the lowest POC that needs to be output */
728 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
729 GstVaapiPictureH264 **found_picture_ptr)
731 GstVaapiDecoderH264Private * const priv = &decoder->priv;
732 GstVaapiPictureH264 *found_picture = NULL;
733 guint i, j, found_index;
735 for (i = 0; i < priv->dpb_count; i++) {
736 GstVaapiFrameStore * const fs = priv->dpb[i];
737 if (!fs->output_needed)
739 if (picture && picture->base.view_id != fs->view_id)
741 for (j = 0; j < fs->num_buffers; j++) {
742 GstVaapiPictureH264 * const pic = fs->buffers[j];
743 if (!pic->output_needed)
745 if (!found_picture || found_picture->base.poc > pic->base.poc ||
746 (found_picture->base.poc == pic->base.poc &&
747 found_picture->base.voc > pic->base.voc))
748 found_picture = pic, found_index = i;
752 if (found_picture_ptr)
753 *found_picture_ptr = found_picture;
754 return found_picture ? found_index : -1;
757 /* Finds the picture with the lowest VOC that needs to be output */
759 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
760 GstVaapiPictureH264 **found_picture_ptr)
762 GstVaapiDecoderH264Private * const priv = &decoder->priv;
763 GstVaapiPictureH264 *found_picture = NULL;
764 guint i, j, found_index;
766 for (i = 0; i < priv->dpb_count; i++) {
767 GstVaapiFrameStore * const fs = priv->dpb[i];
768 if (!fs->output_needed || fs->view_id == picture->base.view_id)
770 for (j = 0; j < fs->num_buffers; j++) {
771 GstVaapiPictureH264 * const pic = fs->buffers[j];
772 if (!pic->output_needed || pic->base.poc != picture->base.poc)
774 if (!found_picture || found_picture->base.voc > pic->base.voc)
775 found_picture = pic, found_index = i;
779 if (found_picture_ptr)
780 *found_picture_ptr = found_picture;
781 return found_picture ? found_index : -1;
785 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
786 GstVaapiPictureH264 *picture, guint voc)
788 GstVaapiDecoderH264Private * const priv = &decoder->priv;
789 GstVaapiPictureH264 *found_picture;
793 if (priv->max_views == 1)
796 /* Emit all other view components that were in the same access
797 unit than the picture we have just found */
798 found_picture = picture;
800 found_index = dpb_find_lowest_voc(decoder, found_picture,
802 if (found_index < 0 || found_picture->base.voc >= voc)
804 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
805 dpb_evict(decoder, found_picture, found_index);
813 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
815 GstVaapiDecoderH264Private * const priv = &decoder->priv;
816 GstVaapiPictureH264 *found_picture;
820 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
824 if (picture && picture->base.poc != found_picture->base.poc)
825 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
827 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
828 dpb_evict(decoder, found_picture, found_index);
829 if (priv->max_views == 1)
832 if (picture && picture->base.poc != found_picture->base.poc)
833 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
838 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 GstVaapiDecoderH264Private * const priv = &decoder->priv;
843 for (i = 0; i < priv->dpb_count; i++) {
844 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
846 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
849 for (i = 0, n = 0; i < priv->dpb_count; i++) {
851 priv->dpb[n++] = priv->dpb[i];
855 /* Clear previous frame buffers only if this is a "flush-all" operation,
856 or if the picture is the first one in the access unit */
857 if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
858 GST_VAAPI_PICTURE_FLAG_AU_START)) {
859 for (i = 0; i < priv->max_views; i++)
860 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
865 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
867 while (dpb_bump(decoder, picture))
869 dpb_clear(decoder, picture);
873 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
875 GstVaapiDecoderH264Private * const priv = &decoder->priv;
876 const gboolean is_last_picture = /* in the access unit */
877 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
880 // Remove all unused inter-view only reference components of the current AU
882 while (i < priv->dpb_count) {
883 GstVaapiFrameStore * const fs = priv->dpb[i];
884 if (fs->view_id != picture->base.view_id &&
885 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
887 !is_inter_view_reference_for_next_frames(decoder, fs)))
888 dpb_remove_index(decoder, i);
895 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
897 GstVaapiDecoderH264Private * const priv = &decoder->priv;
898 GstVaapiFrameStore *fs;
901 if (priv->max_views > 1)
902 dpb_prune_mvc(decoder, picture);
904 // Remove all unused pictures
905 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
907 while (i < priv->dpb_count) {
908 GstVaapiFrameStore * const fs = priv->dpb[i];
909 if (fs->view_id == picture->base.view_id &&
910 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
911 dpb_remove_index(decoder, i);
917 // Check if picture is the second field and the first field is still in DPB
918 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
919 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
920 const gint found_index = dpb_find_picture(decoder,
921 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
922 if (found_index >= 0)
923 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
926 // Create new frame store, and split fields if necessary
927 fs = gst_vaapi_frame_store_new(picture);
930 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
931 gst_vaapi_frame_store_unref(fs);
933 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
934 if (!gst_vaapi_frame_store_split_fields(fs))
938 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
939 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
940 while (priv->dpb_count == priv->dpb_size) {
941 if (!dpb_bump(decoder, picture))
946 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
948 const gboolean StoreInterViewOnlyRefFlag =
949 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
950 GST_VAAPI_PICTURE_FLAG_AU_END) &&
951 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
952 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
953 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
955 while (priv->dpb_count == priv->dpb_size) {
956 if (!StoreInterViewOnlyRefFlag) {
957 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
958 return dpb_output(decoder, NULL, picture);
960 if (!dpb_bump(decoder, picture))
965 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
966 if (picture->output_flag) {
967 picture->output_needed = TRUE;
974 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
976 GstVaapiDecoderH264Private * const priv = &decoder->priv;
978 if (dpb_size < priv->dpb_count)
981 if (dpb_size > priv->dpb_size_max) {
982 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
985 memset(&priv->dpb[priv->dpb_size_max], 0,
986 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
987 priv->dpb_size_max = dpb_size;
990 if (priv->dpb_size < dpb_size)
991 priv->dpb_size = dpb_size;
992 else if (dpb_size < priv->dpb_count)
995 GST_DEBUG("DPB size %u", priv->dpb_size);
1000 unref_inter_view(GstVaapiPictureH264 *picture)
1004 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1005 gst_vaapi_picture_unref(picture);
1008 /* Resets MVC resources */
1010 mvc_reset(GstVaapiDecoderH264 *decoder)
1012 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1015 // Resize array of inter-view references
1016 if (!priv->inter_views) {
1017 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1018 (GDestroyNotify)unref_inter_view);
1019 if (!priv->inter_views)
1023 // Resize array of previous frame buffers
1024 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1025 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
1027 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1028 sizeof(*priv->prev_frames));
1029 if (!priv->prev_frames) {
1030 priv->prev_frames_alloc = 0;
1033 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1034 priv->prev_frames[i] = NULL;
1035 priv->prev_frames_alloc = priv->max_views;
1039 static GstVaapiDecoderStatus
1040 get_status(GstH264ParserResult result)
1042 GstVaapiDecoderStatus status;
1045 case GST_H264_PARSER_OK:
1046 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1048 case GST_H264_PARSER_NO_NAL_END:
1049 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1051 case GST_H264_PARSER_ERROR:
1052 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1055 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1062 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1064 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1066 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1067 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1068 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1070 dpb_clear(decoder, NULL);
1072 if (priv->inter_views) {
1073 g_ptr_array_unref(priv->inter_views);
1074 priv->inter_views = NULL;
1078 gst_h264_nal_parser_free(priv->parser);
1079 priv->parser = NULL;
1084 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1086 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1088 gst_vaapi_decoder_h264_close(decoder);
1090 priv->parser = gst_h264_nal_parser_new();
1097 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1099 GstVaapiDecoderH264 * const decoder =
1100 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1101 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1104 gst_vaapi_decoder_h264_close(decoder);
1110 g_free(priv->prev_frames);
1111 priv->prev_frames = NULL;
1112 priv->prev_frames_alloc = 0;
1114 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1115 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1116 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1118 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1119 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1120 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1124 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1126 GstVaapiDecoderH264 * const decoder =
1127 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1128 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1130 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1131 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1132 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1133 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1134 priv->progressive_sequence = TRUE;
1138 /* Activates the supplied PPS */
1140 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1142 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1143 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1145 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1146 return pi ? &pi->data.pps : NULL;
1149 /* Returns the active PPS */
1150 static inline GstH264PPS *
1151 get_pps(GstVaapiDecoderH264 *decoder)
1153 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1155 return pi ? &pi->data.pps : NULL;
1158 /* Activate the supplied SPS */
1160 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1162 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1163 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1165 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1166 return pi ? &pi->data.sps : NULL;
1169 /* Returns the active SPS */
1170 static inline GstH264SPS *
1171 get_sps(GstVaapiDecoderH264 *decoder)
1173 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1175 return pi ? &pi->data.sps : NULL;
1179 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1180 GstVaapiProfile profile)
1182 guint n_profiles = *n_profiles_ptr;
1184 profiles[n_profiles++] = profile;
1186 case GST_VAAPI_PROFILE_H264_MAIN:
1187 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1192 *n_profiles_ptr = n_profiles;
1195 /* Fills in compatible profiles for MVC decoding */
1197 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1198 guint *n_profiles_ptr, guint dpb_size)
1200 const gchar * const vendor_string =
1201 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1203 gboolean add_high_profile = FALSE;
1208 const struct map *m;
1210 // Drivers that support slice level decoding
1211 if (vendor_string && dpb_size <= 16) {
1212 static const struct map drv_names[] = {
1213 { "Intel i965 driver", 17 },
1216 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1217 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1218 add_high_profile = TRUE;
1222 if (add_high_profile)
1223 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1226 static GstVaapiProfile
1227 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1229 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1230 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1231 GstVaapiProfile profile, profiles[4];
1232 guint i, n_profiles = 0;
1234 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1236 return GST_VAAPI_PROFILE_UNKNOWN;
1238 fill_profiles(profiles, &n_profiles, profile);
1240 case GST_VAAPI_PROFILE_H264_BASELINE:
1241 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1242 fill_profiles(profiles, &n_profiles,
1243 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1244 fill_profiles(profiles, &n_profiles,
1245 GST_VAAPI_PROFILE_H264_MAIN);
1248 case GST_VAAPI_PROFILE_H264_EXTENDED:
1249 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1250 fill_profiles(profiles, &n_profiles,
1251 GST_VAAPI_PROFILE_H264_MAIN);
1254 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1255 if (priv->max_views == 2) {
1256 fill_profiles(profiles, &n_profiles,
1257 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1259 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1261 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1262 if (sps->frame_mbs_only_flag) {
1263 fill_profiles(profiles, &n_profiles,
1264 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1266 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1272 /* If the preferred profile (profiles[0]) matches one that we already
1273 found, then just return it now instead of searching for it again */
1274 if (profiles[0] == priv->profile)
1275 return priv->profile;
1277 for (i = 0; i < n_profiles; i++) {
1278 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1281 return GST_VAAPI_PROFILE_UNKNOWN;
1284 static GstVaapiDecoderStatus
1285 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1287 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1288 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1289 GstVaapiContextInfo info;
1290 GstVaapiProfile profile;
1291 GstVaapiChromaType chroma_type;
1292 gboolean reset_context = FALSE;
1293 guint mb_width, mb_height, dpb_size;
1295 dpb_size = get_max_dec_frame_buffering(sps);
1296 if (priv->dpb_size < dpb_size) {
1297 GST_DEBUG("DPB size increased");
1298 reset_context = TRUE;
1301 profile = get_profile(decoder, sps, dpb_size);
1303 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1304 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1307 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1308 GST_DEBUG("profile changed");
1309 reset_context = TRUE;
1310 priv->profile = profile;
1313 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1315 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1316 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1319 if (priv->chroma_type != chroma_type) {
1320 GST_DEBUG("chroma format changed");
1321 reset_context = TRUE;
1322 priv->chroma_type = chroma_type;
1325 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1326 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1327 !sps->frame_mbs_only_flag;
1328 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1329 GST_DEBUG("size changed");
1330 reset_context = TRUE;
1331 priv->mb_width = mb_width;
1332 priv->mb_height = mb_height;
1335 priv->progressive_sequence = sps->frame_mbs_only_flag;
1337 /* XXX: we only output complete frames for now */
1338 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1341 gst_vaapi_decoder_set_pixel_aspect_ratio(
1343 sps->vui_parameters.par_n,
1344 sps->vui_parameters.par_d
1347 if (!reset_context && priv->has_context)
1348 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1350 /* XXX: fix surface size when cropping is implemented */
1351 info.profile = priv->profile;
1352 info.entrypoint = priv->entrypoint;
1353 info.chroma_type = priv->chroma_type;
1354 info.width = sps->width;
1355 info.height = sps->height;
1356 info.ref_frames = dpb_size;
1358 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1359 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1360 priv->has_context = TRUE;
1363 if (!dpb_reset(decoder, dpb_size))
1364 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1366 /* Reset MVC data */
1367 if (!mvc_reset(decoder))
1368 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1369 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1373 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1374 const GstH264SPS *sps)
1378 /* There are always 6 4x4 scaling lists */
1379 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1380 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1382 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1383 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1384 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1388 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1389 const GstH264SPS *sps)
1393 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1394 if (!pps->transform_8x8_mode_flag)
1397 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1398 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1400 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1401 for (i = 0; i < n; i++) {
1402 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1403 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1407 static GstVaapiDecoderStatus
1408 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1410 GstVaapiPicture * const base_picture = &picture->base;
1411 GstH264PPS * const pps = get_pps(decoder);
1412 GstH264SPS * const sps = get_sps(decoder);
1413 VAIQMatrixBufferH264 *iq_matrix;
1415 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1416 if (!base_picture->iq_matrix) {
1417 GST_ERROR("failed to allocate IQ matrix");
1418 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1420 iq_matrix = base_picture->iq_matrix->param;
1422 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1423 is not large enough to hold lists for 4:4:4 */
1424 if (sps->chroma_format_idc == 3)
1425 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1427 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1428 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1430 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1433 static inline gboolean
1434 is_valid_state(guint state, guint ref_state)
1436 return (state & ref_state) == ref_state;
1439 static GstVaapiDecoderStatus
1440 decode_current_picture(GstVaapiDecoderH264 *decoder)
1442 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1443 GstVaapiPictureH264 * const picture = priv->current_picture;
1445 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1447 priv->decoder_state = 0;
1450 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1452 if (!exec_ref_pic_marking(decoder, picture))
1454 if (!dpb_add(decoder, picture))
1456 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1458 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1459 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1462 /* XXX: fix for cases where first field failed to be decoded */
1463 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1464 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1467 priv->decoder_state = 0;
1468 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1471 static GstVaapiDecoderStatus
1472 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1474 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1475 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1476 GstH264SPS * const sps = &pi->data.sps;
1477 GstH264ParserResult result;
1479 GST_DEBUG("parse SPS");
1481 priv->parser_state = 0;
1483 /* Variables that don't have inferred values per the H.264
1484 standard but that should get a default value anyway */
1485 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1487 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1488 if (result != GST_H264_PARSER_OK)
1489 return get_status(result);
1491 /* Reset defaults */
1492 priv->max_views = 1;
1494 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1495 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1498 static GstVaapiDecoderStatus
1499 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1501 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1502 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1503 GstH264SPS * const sps = &pi->data.sps;
1504 GstH264ParserResult result;
1506 GST_DEBUG("parse subset SPS");
1508 /* Variables that don't have inferred values per the H.264
1509 standard but that should get a default value anyway */
1510 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1512 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1514 if (result != GST_H264_PARSER_OK)
1515 return get_status(result);
1517 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1518 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1521 static GstVaapiDecoderStatus
1522 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1524 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1525 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1526 GstH264PPS * const pps = &pi->data.pps;
1527 GstH264ParserResult result;
1529 GST_DEBUG("parse PPS");
1531 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1533 /* Variables that don't have inferred values per the H.264
1534 standard but that should get a default value anyway */
1535 pps->slice_group_map_type = 0;
1536 pps->slice_group_change_rate_minus1 = 0;
1538 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1539 if (result != GST_H264_PARSER_OK)
1540 return get_status(result);
1542 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1543 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1546 static GstVaapiDecoderStatus
1547 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1549 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1550 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1551 GArray ** const sei_ptr = &pi->data.sei;
1552 GstH264ParserResult result;
1554 GST_DEBUG("parse SEI");
1556 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1557 if (result != GST_H264_PARSER_OK) {
1558 GST_WARNING("failed to parse SEI messages");
1559 return get_status(result);
1561 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1564 static GstVaapiDecoderStatus
1565 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1567 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1568 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1569 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1570 GstH264NalUnit * const nalu = &pi->nalu;
1572 GstH264ParserResult result;
1575 GST_DEBUG("parse slice");
1577 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1578 GST_H264_VIDEO_STATE_GOT_PPS);
1580 /* Propagate Prefix NAL unit info, if necessary */
1581 switch (nalu->type) {
1582 case GST_H264_NAL_SLICE:
1583 case GST_H264_NAL_SLICE_IDR: {
1584 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1585 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1586 /* MVC sequences shall have a Prefix NAL unit immediately
1587 preceding this NAL unit */
1588 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1589 pi->nalu.extension = prev_pi->nalu.extension;
1592 /* In the very unlikely case there is no Prefix NAL unit
1593 immediately preceding this NAL unit, try to infer some
1594 defaults (H.7.4.1.1) */
1595 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1596 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1597 nalu->idr_pic_flag = !mvc->non_idr_flag;
1598 mvc->priority_id = 0;
1600 mvc->temporal_id = 0;
1601 mvc->anchor_pic_flag = 0;
1602 mvc->inter_view_flag = 1;
1608 /* Variables that don't have inferred values per the H.264
1609 standard but that should get a default value anyway */
1610 slice_hdr->cabac_init_idc = 0;
1611 slice_hdr->direct_spatial_mv_pred_flag = 0;
1613 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1614 slice_hdr, TRUE, TRUE);
1615 if (result != GST_H264_PARSER_OK)
1616 return get_status(result);
1618 sps = slice_hdr->pps->sequence;
1620 /* Update MVC data */
1621 num_views = get_num_views(sps);
1622 if (priv->max_views < num_views) {
1623 priv->max_views = num_views;
1624 GST_DEBUG("maximum number of views changed to %u", num_views);
1626 pi->view_id = get_view_id(&pi->nalu);
1627 pi->voc = get_view_order_index(sps, pi->view_id);
1629 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1630 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1633 static GstVaapiDecoderStatus
1634 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1636 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1637 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1638 GstH264SPS * const sps = &pi->data.sps;
1640 GST_DEBUG("decode SPS");
1642 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1643 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1646 static GstVaapiDecoderStatus
1647 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1649 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1650 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1651 GstH264SPS * const sps = &pi->data.sps;
1653 GST_DEBUG("decode subset SPS");
1655 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1656 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1659 static GstVaapiDecoderStatus
1660 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1662 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1663 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1664 GstH264PPS * const pps = &pi->data.pps;
1666 GST_DEBUG("decode PPS");
1668 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1669 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1672 static GstVaapiDecoderStatus
1673 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1675 GstVaapiDecoderStatus status;
1677 GST_DEBUG("decode sequence-end");
1679 status = decode_current_picture(decoder);
1680 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1683 dpb_flush(decoder, NULL);
1684 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1687 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1690 GstVaapiDecoderH264 *decoder,
1691 GstVaapiPictureH264 *picture,
1692 GstH264SliceHdr *slice_hdr
1695 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1696 GstH264SPS * const sps = get_sps(decoder);
1697 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1700 GST_DEBUG("decode picture order count type 0");
1702 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1703 priv->prev_poc_msb = 0;
1704 priv->prev_poc_lsb = 0;
1706 else if (priv->prev_pic_has_mmco5) {
1707 priv->prev_poc_msb = 0;
1708 priv->prev_poc_lsb =
1709 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1710 0 : priv->field_poc[TOP_FIELD]);
1713 priv->prev_poc_msb = priv->poc_msb;
1714 priv->prev_poc_lsb = priv->poc_lsb;
1718 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1719 if (priv->poc_lsb < priv->prev_poc_lsb &&
1720 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1721 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1722 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1723 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1724 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1726 priv->poc_msb = priv->prev_poc_msb;
1728 temp_poc = priv->poc_msb + priv->poc_lsb;
1729 switch (picture->structure) {
1730 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1732 priv->field_poc[TOP_FIELD] = temp_poc;
1733 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1734 slice_hdr->delta_pic_order_cnt_bottom;
1736 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1738 priv->field_poc[TOP_FIELD] = temp_poc;
1740 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1742 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1747 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1750 GstVaapiDecoderH264 *decoder,
1751 GstVaapiPictureH264 *picture,
1752 GstH264SliceHdr *slice_hdr
1755 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1756 GstH264SPS * const sps = get_sps(decoder);
1757 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1758 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1761 GST_DEBUG("decode picture order count type 1");
1763 if (priv->prev_pic_has_mmco5)
1764 prev_frame_num_offset = 0;
1766 prev_frame_num_offset = priv->frame_num_offset;
1769 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1770 priv->frame_num_offset = 0;
1771 else if (priv->prev_frame_num > priv->frame_num)
1772 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1774 priv->frame_num_offset = prev_frame_num_offset;
1777 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1778 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1781 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1782 abs_frame_num = abs_frame_num - 1;
1784 if (abs_frame_num > 0) {
1785 gint32 expected_delta_per_poc_cycle;
1786 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1788 expected_delta_per_poc_cycle = 0;
1789 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1790 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1793 poc_cycle_cnt = (abs_frame_num - 1) /
1794 sps->num_ref_frames_in_pic_order_cnt_cycle;
1795 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1796 sps->num_ref_frames_in_pic_order_cnt_cycle;
1799 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1800 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1801 expected_poc += sps->offset_for_ref_frame[i];
1805 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1806 expected_poc += sps->offset_for_non_ref_pic;
1809 switch (picture->structure) {
1810 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1811 priv->field_poc[TOP_FIELD] = expected_poc +
1812 slice_hdr->delta_pic_order_cnt[0];
1813 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1814 sps->offset_for_top_to_bottom_field +
1815 slice_hdr->delta_pic_order_cnt[1];
1817 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1818 priv->field_poc[TOP_FIELD] = expected_poc +
1819 slice_hdr->delta_pic_order_cnt[0];
1821 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1822 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1823 sps->offset_for_top_to_bottom_field +
1824 slice_hdr->delta_pic_order_cnt[0];
1829 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1832 GstVaapiDecoderH264 *decoder,
1833 GstVaapiPictureH264 *picture,
1834 GstH264SliceHdr *slice_hdr
1837 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1838 GstH264SPS * const sps = get_sps(decoder);
1839 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1840 gint32 prev_frame_num_offset, temp_poc;
1842 GST_DEBUG("decode picture order count type 2");
1844 if (priv->prev_pic_has_mmco5)
1845 prev_frame_num_offset = 0;
1847 prev_frame_num_offset = priv->frame_num_offset;
1850 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1851 priv->frame_num_offset = 0;
1852 else if (priv->prev_frame_num > priv->frame_num)
1853 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1855 priv->frame_num_offset = prev_frame_num_offset;
1858 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1860 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1861 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1863 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1866 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1867 priv->field_poc[TOP_FIELD] = temp_poc;
1868 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1869 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1872 /* 8.2.1 - Decoding process for picture order count */
1875 GstVaapiDecoderH264 *decoder,
1876 GstVaapiPictureH264 *picture,
1877 GstH264SliceHdr *slice_hdr
1880 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1881 GstH264SPS * const sps = get_sps(decoder);
1883 switch (sps->pic_order_cnt_type) {
1885 init_picture_poc_0(decoder, picture, slice_hdr);
1888 init_picture_poc_1(decoder, picture, slice_hdr);
1891 init_picture_poc_2(decoder, picture, slice_hdr);
1895 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1896 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1897 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1898 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1899 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1903 compare_picture_pic_num_dec(const void *a, const void *b)
1905 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1906 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1908 return picB->pic_num - picA->pic_num;
1912 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1914 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1915 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1917 return picA->long_term_pic_num - picB->long_term_pic_num;
1921 compare_picture_poc_dec(const void *a, const void *b)
1923 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1924 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1926 return picB->base.poc - picA->base.poc;
1930 compare_picture_poc_inc(const void *a, const void *b)
1932 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1933 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1935 return picA->base.poc - picB->base.poc;
1939 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1941 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1942 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1944 return picB->frame_num_wrap - picA->frame_num_wrap;
1948 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1950 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1951 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1953 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1956 /* 8.2.4.1 - Decoding process for picture numbers */
1958 init_picture_refs_pic_num(
1959 GstVaapiDecoderH264 *decoder,
1960 GstVaapiPictureH264 *picture,
1961 GstH264SliceHdr *slice_hdr
1964 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1965 GstH264SPS * const sps = get_sps(decoder);
1966 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1969 GST_DEBUG("decode picture numbers");
1971 for (i = 0; i < priv->short_ref_count; i++) {
1972 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1975 if (pic->base.view_id != picture->base.view_id)
1979 if (pic->frame_num > priv->frame_num)
1980 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1982 pic->frame_num_wrap = pic->frame_num;
1984 // (8-28, 8-30, 8-31)
1985 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1986 pic->pic_num = pic->frame_num_wrap;
1988 if (pic->structure == picture->structure)
1989 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1991 pic->pic_num = 2 * pic->frame_num_wrap;
1995 for (i = 0; i < priv->long_ref_count; i++) {
1996 GstVaapiPictureH264 * const pic = priv->long_ref[i];
1999 if (pic->base.view_id != picture->base.view_id)
2002 // (8-29, 8-32, 8-33)
2003 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2004 pic->long_term_pic_num = pic->long_term_frame_idx;
2006 if (pic->structure == picture->structure)
2007 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2009 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2014 #define SORT_REF_LIST(list, n, compare_func) \
2015 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2018 init_picture_refs_fields_1(
2019 guint picture_structure,
2020 GstVaapiPictureH264 *RefPicList[32],
2021 guint *RefPicList_count,
2022 GstVaapiPictureH264 *ref_list[32],
2023 guint ref_list_count
2030 n = *RefPicList_count;
2033 for (; i < ref_list_count; i++) {
2034 if (ref_list[i]->structure == picture_structure) {
2035 RefPicList[n++] = ref_list[i++];
2039 for (; j < ref_list_count; j++) {
2040 if (ref_list[j]->structure != picture_structure) {
2041 RefPicList[n++] = ref_list[j++];
2045 } while (i < ref_list_count || j < ref_list_count);
2046 *RefPicList_count = n;
2050 init_picture_refs_fields(
2051 GstVaapiPictureH264 *picture,
2052 GstVaapiPictureH264 *RefPicList[32],
2053 guint *RefPicList_count,
2054 GstVaapiPictureH264 *short_ref[32],
2055 guint short_ref_count,
2056 GstVaapiPictureH264 *long_ref[32],
2057 guint long_ref_count
2062 /* 8.2.4.2.5 - reference picture lists in fields */
2063 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2064 short_ref, short_ref_count);
2065 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2066 long_ref, long_ref_count);
2067 *RefPicList_count = n;
2070 /* Finds the inter-view reference picture with the supplied view id */
2071 static GstVaapiPictureH264 *
2072 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2074 GPtrArray * const inter_views = decoder->priv.inter_views;
2077 for (i = 0; i < inter_views->len; i++) {
2078 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2079 if (picture->base.view_id == view_id)
2083 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2088 /* Checks whether the view id exists in the supplied list of view ids */
2090 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2094 for (i = 0; i < num_view_ids; i++) {
2095 if (view_ids[i] == view_id)
2102 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2106 return (find_view_id(view_id, view->anchor_ref_l0,
2107 view->num_anchor_refs_l0) ||
2108 find_view_id(view_id, view->anchor_ref_l1,
2109 view->num_anchor_refs_l1));
2111 return (find_view_id(view_id, view->non_anchor_ref_l0,
2112 view->num_non_anchor_refs_l0) ||
2113 find_view_id(view_id, view->non_anchor_ref_l1,
2114 view->num_non_anchor_refs_l1));
2117 /* Checks whether the inter-view reference picture with the supplied
2118 view id is used for decoding the current view component picture */
2120 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2121 guint16 view_id, GstVaapiPictureH264 *picture)
2123 const GstH264SPS * const sps = get_sps(decoder);
2126 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2127 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2130 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2131 return find_view_id_in_view(view_id,
2132 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2135 /* Checks whether the supplied inter-view reference picture is used
2136 for decoding the next view component pictures */
2138 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2139 GstVaapiPictureH264 *picture)
2141 const GstH264SPS * const sps = get_sps(decoder);
2145 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2146 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2149 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2150 num_views = sps->extension.mvc.num_views_minus1 + 1;
2151 for (i = picture->base.voc + 1; i < num_views; i++) {
2152 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2153 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2159 /* H.8.2.1 - Initialization process for inter-view prediction references */
2161 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2162 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2163 const guint16 *view_ids, guint num_view_ids)
2167 n = *ref_list_count_ptr;
2168 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2169 GstVaapiPictureH264 * const pic =
2170 find_inter_view_reference(decoder, view_ids[j]);
2172 ref_list[n++] = pic;
2174 *ref_list_count_ptr = n;
2178 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2179 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2181 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2182 const GstH264SPS * const sps = get_sps(decoder);
2183 const GstH264SPSExtMVCView *view;
2185 GST_DEBUG("initialize reference picture list for inter-view prediction");
2187 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2189 view = &sps->extension.mvc.view[picture->base.voc];
2191 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2192 init_picture_refs_mvc_1(decoder, \
2193 priv->RefPicList##ref_list, \
2194 &priv->RefPicList##ref_list##_count, \
2195 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2196 view->view_list##_l##ref_list, \
2197 view->num_##view_list##s_l##ref_list); \
2201 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2202 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2204 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2207 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2208 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2210 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2213 #undef INVOKE_INIT_PICTURE_REFS_MVC
2217 init_picture_refs_p_slice(
2218 GstVaapiDecoderH264 *decoder,
2219 GstVaapiPictureH264 *picture,
2220 GstH264SliceHdr *slice_hdr
2223 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2224 GstVaapiPictureH264 **ref_list;
2227 GST_DEBUG("decode reference picture list for P and SP slices");
2229 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2230 /* 8.2.4.2.1 - P and SP slices in frames */
2231 if (priv->short_ref_count > 0) {
2232 ref_list = priv->RefPicList0;
2233 for (i = 0; i < priv->short_ref_count; i++)
2234 ref_list[i] = priv->short_ref[i];
2235 SORT_REF_LIST(ref_list, i, pic_num_dec);
2236 priv->RefPicList0_count += i;
2239 if (priv->long_ref_count > 0) {
2240 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2241 for (i = 0; i < priv->long_ref_count; i++)
2242 ref_list[i] = priv->long_ref[i];
2243 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2244 priv->RefPicList0_count += i;
2248 /* 8.2.4.2.2 - P and SP slices in fields */
2249 GstVaapiPictureH264 *short_ref[32];
2250 guint short_ref_count = 0;
2251 GstVaapiPictureH264 *long_ref[32];
2252 guint long_ref_count = 0;
2254 if (priv->short_ref_count > 0) {
2255 for (i = 0; i < priv->short_ref_count; i++)
2256 short_ref[i] = priv->short_ref[i];
2257 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2258 short_ref_count = i;
2261 if (priv->long_ref_count > 0) {
2262 for (i = 0; i < priv->long_ref_count; i++)
2263 long_ref[i] = priv->long_ref[i];
2264 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2268 init_picture_refs_fields(
2270 priv->RefPicList0, &priv->RefPicList0_count,
2271 short_ref, short_ref_count,
2272 long_ref, long_ref_count
2276 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2278 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2283 init_picture_refs_b_slice(
2284 GstVaapiDecoderH264 *decoder,
2285 GstVaapiPictureH264 *picture,
2286 GstH264SliceHdr *slice_hdr
2289 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2290 GstVaapiPictureH264 **ref_list;
2293 GST_DEBUG("decode reference picture list for B slices");
2295 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2296 /* 8.2.4.2.3 - B slices in frames */
2299 if (priv->short_ref_count > 0) {
2300 // 1. Short-term references
2301 ref_list = priv->RefPicList0;
2302 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2303 if (priv->short_ref[i]->base.poc < picture->base.poc)
2304 ref_list[n++] = priv->short_ref[i];
2306 SORT_REF_LIST(ref_list, n, poc_dec);
2307 priv->RefPicList0_count += n;
2309 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2310 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2311 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2312 ref_list[n++] = priv->short_ref[i];
2314 SORT_REF_LIST(ref_list, n, poc_inc);
2315 priv->RefPicList0_count += n;
2318 if (priv->long_ref_count > 0) {
2319 // 2. Long-term references
2320 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2321 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2322 ref_list[n++] = priv->long_ref[i];
2323 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2324 priv->RefPicList0_count += n;
2328 if (priv->short_ref_count > 0) {
2329 // 1. Short-term references
2330 ref_list = priv->RefPicList1;
2331 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2332 if (priv->short_ref[i]->base.poc > picture->base.poc)
2333 ref_list[n++] = priv->short_ref[i];
2335 SORT_REF_LIST(ref_list, n, poc_inc);
2336 priv->RefPicList1_count += n;
2338 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2339 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2340 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2341 ref_list[n++] = priv->short_ref[i];
2343 SORT_REF_LIST(ref_list, n, poc_dec);
2344 priv->RefPicList1_count += n;
2347 if (priv->long_ref_count > 0) {
2348 // 2. Long-term references
2349 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2350 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2351 ref_list[n++] = priv->long_ref[i];
2352 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2353 priv->RefPicList1_count += n;
2357 /* 8.2.4.2.4 - B slices in fields */
2358 GstVaapiPictureH264 *short_ref0[32];
2359 guint short_ref0_count = 0;
2360 GstVaapiPictureH264 *short_ref1[32];
2361 guint short_ref1_count = 0;
2362 GstVaapiPictureH264 *long_ref[32];
2363 guint long_ref_count = 0;
2365 /* refFrameList0ShortTerm */
2366 if (priv->short_ref_count > 0) {
2367 ref_list = short_ref0;
2368 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2369 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2370 ref_list[n++] = priv->short_ref[i];
2372 SORT_REF_LIST(ref_list, n, poc_dec);
2373 short_ref0_count += n;
2375 ref_list = &short_ref0[short_ref0_count];
2376 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2377 if (priv->short_ref[i]->base.poc > picture->base.poc)
2378 ref_list[n++] = priv->short_ref[i];
2380 SORT_REF_LIST(ref_list, n, poc_inc);
2381 short_ref0_count += n;
2384 /* refFrameList1ShortTerm */
2385 if (priv->short_ref_count > 0) {
2386 ref_list = short_ref1;
2387 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2388 if (priv->short_ref[i]->base.poc > picture->base.poc)
2389 ref_list[n++] = priv->short_ref[i];
2391 SORT_REF_LIST(ref_list, n, poc_inc);
2392 short_ref1_count += n;
2394 ref_list = &short_ref1[short_ref1_count];
2395 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2396 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2397 ref_list[n++] = priv->short_ref[i];
2399 SORT_REF_LIST(ref_list, n, poc_dec);
2400 short_ref1_count += n;
2403 /* refFrameListLongTerm */
2404 if (priv->long_ref_count > 0) {
2405 for (i = 0; i < priv->long_ref_count; i++)
2406 long_ref[i] = priv->long_ref[i];
2407 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2411 init_picture_refs_fields(
2413 priv->RefPicList0, &priv->RefPicList0_count,
2414 short_ref0, short_ref0_count,
2415 long_ref, long_ref_count
2418 init_picture_refs_fields(
2420 priv->RefPicList1, &priv->RefPicList1_count,
2421 short_ref1, short_ref1_count,
2422 long_ref, long_ref_count
2426 /* Check whether RefPicList1 is identical to RefPicList0, then
2427 swap if necessary */
2428 if (priv->RefPicList1_count > 1 &&
2429 priv->RefPicList1_count == priv->RefPicList0_count &&
2430 memcmp(priv->RefPicList0, priv->RefPicList1,
2431 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2432 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2433 priv->RefPicList1[0] = priv->RefPicList1[1];
2434 priv->RefPicList1[1] = tmp;
2437 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2439 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2442 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2446 #undef SORT_REF_LIST
2449 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2451 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2454 for (i = 0; i < priv->short_ref_count; i++) {
2455 if (priv->short_ref[i]->pic_num == pic_num)
2458 GST_ERROR("found no short-term reference picture with PicNum = %d",
2464 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2466 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2469 for (i = 0; i < priv->long_ref_count; i++) {
2470 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2473 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2479 exec_picture_refs_modification_1(
2480 GstVaapiDecoderH264 *decoder,
2481 GstVaapiPictureH264 *picture,
2482 GstH264SliceHdr *slice_hdr,
2486 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2487 GstH264SPS * const sps = get_sps(decoder);
2488 GstH264RefPicListModification *ref_pic_list_modification;
2489 guint num_ref_pic_list_modifications;
2490 GstVaapiPictureH264 **ref_list;
2491 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2492 const guint16 *view_ids = NULL;
2493 guint i, j, n, num_refs, num_view_ids = 0;
2495 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2497 GST_DEBUG("modification process of reference picture list %u", list);
2500 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2501 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2502 ref_list = priv->RefPicList0;
2503 ref_list_count_ptr = &priv->RefPicList0_count;
2504 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2506 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2507 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2508 const GstH264SPSExtMVCView * const view =
2509 &sps->extension.mvc.view[picture->base.voc];
2510 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2511 view_ids = view->anchor_ref_l0;
2512 num_view_ids = view->num_anchor_refs_l0;
2515 view_ids = view->non_anchor_ref_l0;
2516 num_view_ids = view->num_non_anchor_refs_l0;
2521 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2522 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2523 ref_list = priv->RefPicList1;
2524 ref_list_count_ptr = &priv->RefPicList1_count;
2525 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2527 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2528 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2529 const GstH264SPSExtMVCView * const view =
2530 &sps->extension.mvc.view[picture->base.voc];
2531 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2532 view_ids = view->anchor_ref_l1;
2533 num_view_ids = view->num_anchor_refs_l1;
2536 view_ids = view->non_anchor_ref_l1;
2537 num_view_ids = view->num_non_anchor_refs_l1;
2541 ref_list_count = *ref_list_count_ptr;
2543 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2544 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2545 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2548 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2549 CurrPicNum = slice_hdr->frame_num; // frame_num
2552 picNumPred = CurrPicNum;
2553 picViewIdxPred = -1;
2555 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2556 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2557 if (l->modification_of_pic_nums_idc == 3)
2560 /* 8.2.4.3.1 - Short-term reference pictures */
2561 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2562 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2563 gint32 picNum, picNumNoWrap;
2566 if (l->modification_of_pic_nums_idc == 0) {
2567 picNumNoWrap = picNumPred - abs_diff_pic_num;
2568 if (picNumNoWrap < 0)
2569 picNumNoWrap += MaxPicNum;
2574 picNumNoWrap = picNumPred + abs_diff_pic_num;
2575 if (picNumNoWrap >= MaxPicNum)
2576 picNumNoWrap -= MaxPicNum;
2578 picNumPred = picNumNoWrap;
2581 picNum = picNumNoWrap;
2582 if (picNum > CurrPicNum)
2583 picNum -= MaxPicNum;
2586 for (j = num_refs; j > ref_list_idx; j--)
2587 ref_list[j] = ref_list[j - 1];
2588 found_ref_idx = find_short_term_reference(decoder, picNum);
2589 ref_list[ref_list_idx++] =
2590 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2592 for (j = ref_list_idx; j <= num_refs; j++) {
2597 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2598 ref_list[j]->pic_num : MaxPicNum;
2599 if (PicNumF != picNum ||
2600 ref_list[j]->base.view_id != picture->base.view_id)
2601 ref_list[n++] = ref_list[j];
2605 /* 8.2.4.3.2 - Long-term reference pictures */
2606 else if (l->modification_of_pic_nums_idc == 2) {
2608 for (j = num_refs; j > ref_list_idx; j--)
2609 ref_list[j] = ref_list[j - 1];
2611 find_long_term_reference(decoder, l->value.long_term_pic_num);
2612 ref_list[ref_list_idx++] =
2613 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2615 for (j = ref_list_idx; j <= num_refs; j++) {
2616 gint32 LongTermPicNumF;
2620 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2621 ref_list[j]->long_term_pic_num : INT_MAX;
2622 if (LongTermPicNumF != l->value.long_term_pic_num ||
2623 ref_list[j]->base.view_id != picture->base.view_id)
2624 ref_list[n++] = ref_list[j];
2628 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2629 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2630 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2631 (l->modification_of_pic_nums_idc == 4 ||
2632 l->modification_of_pic_nums_idc == 5)) {
2633 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2634 gint32 picViewIdx, targetViewId;
2637 if (l->modification_of_pic_nums_idc == 4) {
2638 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2640 picViewIdx += num_view_ids;
2645 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2646 if (picViewIdx >= num_view_ids)
2647 picViewIdx -= num_view_ids;
2649 picViewIdxPred = picViewIdx;
2652 targetViewId = view_ids[picViewIdx];
2655 for (j = num_refs; j > ref_list_idx; j--)
2656 ref_list[j] = ref_list[j - 1];
2657 ref_list[ref_list_idx++] =
2658 find_inter_view_reference(decoder, targetViewId);
2660 for (j = ref_list_idx; j <= num_refs; j++) {
2663 if (ref_list[j]->base.view_id != targetViewId ||
2664 ref_list[j]->base.poc != picture->base.poc)
2665 ref_list[n++] = ref_list[j];
2671 for (i = 0; i < num_refs; i++)
2673 GST_ERROR("list %u entry %u is empty", list, i);
2675 *ref_list_count_ptr = num_refs;
2678 /* 8.2.4.3 - Modification process for reference picture lists */
2680 exec_picture_refs_modification(
2681 GstVaapiDecoderH264 *decoder,
2682 GstVaapiPictureH264 *picture,
2683 GstH264SliceHdr *slice_hdr
2686 GST_DEBUG("execute ref_pic_list_modification()");
2689 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2690 slice_hdr->ref_pic_list_modification_flag_l0)
2691 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2694 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2695 slice_hdr->ref_pic_list_modification_flag_l1)
2696 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2700 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2701 GstVaapiPictureH264 *picture)
2703 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2704 guint i, j, short_ref_count, long_ref_count;
2706 short_ref_count = 0;
2708 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2709 for (i = 0; i < priv->dpb_count; i++) {
2710 GstVaapiFrameStore * const fs = priv->dpb[i];
2711 GstVaapiPictureH264 *pic;
2712 if (!gst_vaapi_frame_store_has_frame(fs))
2714 pic = fs->buffers[0];
2715 if (pic->base.view_id != picture->base.view_id)
2717 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2718 priv->short_ref[short_ref_count++] = pic;
2719 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2720 priv->long_ref[long_ref_count++] = pic;
2721 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2722 pic->other_field = fs->buffers[1];
2726 for (i = 0; i < priv->dpb_count; i++) {
2727 GstVaapiFrameStore * const fs = priv->dpb[i];
2728 for (j = 0; j < fs->num_buffers; j++) {
2729 GstVaapiPictureH264 * const pic = fs->buffers[j];
2730 if (pic->base.view_id != picture->base.view_id)
2732 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2733 priv->short_ref[short_ref_count++] = pic;
2734 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2735 priv->long_ref[long_ref_count++] = pic;
2736 pic->structure = pic->base.structure;
2737 pic->other_field = fs->buffers[j ^ 1];
2742 for (i = short_ref_count; i < priv->short_ref_count; i++)
2743 priv->short_ref[i] = NULL;
2744 priv->short_ref_count = short_ref_count;
2746 for (i = long_ref_count; i < priv->long_ref_count; i++)
2747 priv->long_ref[i] = NULL;
2748 priv->long_ref_count = long_ref_count;
2753 GstVaapiDecoderH264 *decoder,
2754 GstVaapiPictureH264 *picture,
2755 GstH264SliceHdr *slice_hdr
2758 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2761 init_picture_ref_lists(decoder, picture);
2762 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2764 priv->RefPicList0_count = 0;
2765 priv->RefPicList1_count = 0;
2767 switch (slice_hdr->type % 5) {
2768 case GST_H264_P_SLICE:
2769 case GST_H264_SP_SLICE:
2770 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2772 case GST_H264_B_SLICE:
2773 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2779 exec_picture_refs_modification(decoder, picture, slice_hdr);
2781 switch (slice_hdr->type % 5) {
2782 case GST_H264_B_SLICE:
2783 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2784 for (i = priv->RefPicList1_count; i < num_refs; i++)
2785 priv->RefPicList1[i] = NULL;
2786 priv->RefPicList1_count = num_refs;
2789 case GST_H264_P_SLICE:
2790 case GST_H264_SP_SLICE:
2791 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2792 for (i = priv->RefPicList0_count; i < num_refs; i++)
2793 priv->RefPicList0[i] = NULL;
2794 priv->RefPicList0_count = num_refs;
2803 GstVaapiDecoderH264 *decoder,
2804 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2806 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2807 GstVaapiPicture * const base_picture = &picture->base;
2808 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2810 priv->prev_frame_num = priv->frame_num;
2811 priv->frame_num = slice_hdr->frame_num;
2812 picture->frame_num = priv->frame_num;
2813 picture->frame_num_wrap = priv->frame_num;
2814 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2815 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2816 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2817 base_picture->view_id = pi->view_id;
2818 base_picture->voc = pi->voc;
2820 /* Initialize extensions */
2821 switch (pi->nalu.extension_type) {
2822 case GST_H264_NAL_EXTENSION_MVC: {
2823 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2825 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2826 if (mvc->inter_view_flag)
2827 GST_VAAPI_PICTURE_FLAG_SET(picture,
2828 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2829 if (mvc->anchor_pic_flag)
2830 GST_VAAPI_PICTURE_FLAG_SET(picture,
2831 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2836 /* Reset decoder state for IDR pictures */
2837 if (pi->nalu.idr_pic_flag) {
2839 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2840 dpb_flush(decoder, picture);
2843 /* Initialize picture structure */
2844 if (!slice_hdr->field_pic_flag)
2845 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2847 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2848 if (!slice_hdr->bottom_field_flag)
2849 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2851 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2853 picture->structure = base_picture->structure;
2855 /* Initialize reference flags */
2856 if (pi->nalu.ref_idc) {
2857 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2858 &slice_hdr->dec_ref_pic_marking;
2860 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2861 dec_ref_pic_marking->long_term_reference_flag)
2862 GST_VAAPI_PICTURE_FLAG_SET(picture,
2863 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2865 GST_VAAPI_PICTURE_FLAG_SET(picture,
2866 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2869 init_picture_poc(decoder, picture, slice_hdr);
2873 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2875 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2877 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2878 GstH264SPS * const sps = get_sps(decoder);
2879 GstVaapiPictureH264 *ref_picture;
2880 guint i, m, max_num_ref_frames;
2882 GST_DEBUG("reference picture marking process (sliding window)");
2884 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2887 max_num_ref_frames = sps->num_ref_frames;
2888 if (max_num_ref_frames == 0)
2889 max_num_ref_frames = 1;
2890 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2891 max_num_ref_frames <<= 1;
2893 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2895 if (priv->short_ref_count < 1)
2898 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2899 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2900 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2904 ref_picture = priv->short_ref[m];
2905 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2906 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2908 /* Both fields need to be marked as "unused for reference", so
2909 remove the other field from the short_ref[] list as well */
2910 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2911 for (i = 0; i < priv->short_ref_count; i++) {
2912 if (priv->short_ref[i] == ref_picture->other_field) {
2913 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2921 static inline gint32
2922 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2926 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2927 pic_num = picture->frame_num_wrap;
2929 pic_num = 2 * picture->frame_num_wrap + 1;
2930 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2934 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2936 exec_ref_pic_marking_adaptive_mmco_1(
2937 GstVaapiDecoderH264 *decoder,
2938 GstVaapiPictureH264 *picture,
2939 GstH264RefPicMarking *ref_pic_marking
2942 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2945 picNumX = get_picNumX(picture, ref_pic_marking);
2946 i = find_short_term_reference(decoder, picNumX);
2950 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2951 GST_VAAPI_PICTURE_IS_FRAME(picture));
2952 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2955 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2957 exec_ref_pic_marking_adaptive_mmco_2(
2958 GstVaapiDecoderH264 *decoder,
2959 GstVaapiPictureH264 *picture,
2960 GstH264RefPicMarking *ref_pic_marking
2963 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2966 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2970 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2971 GST_VAAPI_PICTURE_IS_FRAME(picture));
2972 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2975 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2977 exec_ref_pic_marking_adaptive_mmco_3(
2978 GstVaapiDecoderH264 *decoder,
2979 GstVaapiPictureH264 *picture,
2980 GstH264RefPicMarking *ref_pic_marking
2983 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2984 GstVaapiPictureH264 *ref_picture, *other_field;
2987 for (i = 0; i < priv->long_ref_count; i++) {
2988 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2991 if (i != priv->long_ref_count) {
2992 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2993 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2996 picNumX = get_picNumX(picture, ref_pic_marking);
2997 i = find_short_term_reference(decoder, picNumX);
3001 ref_picture = priv->short_ref[i];
3002 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3003 priv->long_ref[priv->long_ref_count++] = ref_picture;
3005 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3006 gst_vaapi_picture_h264_set_reference(ref_picture,
3007 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3008 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3010 /* Assign LongTermFrameIdx to the other field if it was also
3011 marked as "used for long-term reference */
3012 other_field = ref_picture->other_field;
3013 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3014 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3017 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3018 * as "unused for reference" */
3020 exec_ref_pic_marking_adaptive_mmco_4(
3021 GstVaapiDecoderH264 *decoder,
3022 GstVaapiPictureH264 *picture,
3023 GstH264RefPicMarking *ref_pic_marking
3026 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3027 gint32 i, long_term_frame_idx;
3029 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3031 for (i = 0; i < priv->long_ref_count; i++) {
3032 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3034 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3035 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3040 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3042 exec_ref_pic_marking_adaptive_mmco_5(
3043 GstVaapiDecoderH264 *decoder,
3044 GstVaapiPictureH264 *picture,
3045 GstH264RefPicMarking *ref_pic_marking
3048 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3050 dpb_flush(decoder, picture);
3052 priv->prev_pic_has_mmco5 = TRUE;
3054 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3055 priv->frame_num = 0;
3056 priv->frame_num_offset = 0;
3057 picture->frame_num = 0;
3059 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3060 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3061 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3062 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3063 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3064 picture->base.poc = 0;
3067 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3069 exec_ref_pic_marking_adaptive_mmco_6(
3070 GstVaapiDecoderH264 *decoder,
3071 GstVaapiPictureH264 *picture,
3072 GstH264RefPicMarking *ref_pic_marking
3075 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3076 GstVaapiPictureH264 *other_field;
3079 for (i = 0; i < priv->long_ref_count; i++) {
3080 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3083 if (i != priv->long_ref_count) {
3084 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3085 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3088 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3089 gst_vaapi_picture_h264_set_reference(picture,
3090 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3091 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3093 /* Assign LongTermFrameIdx to the other field if it was also
3094 marked as "used for long-term reference */
3095 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3096 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3097 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3100 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3102 exec_ref_pic_marking_adaptive(
3103 GstVaapiDecoderH264 *decoder,
3104 GstVaapiPictureH264 *picture,
3105 GstH264DecRefPicMarking *dec_ref_pic_marking
3110 GST_DEBUG("reference picture marking process (adaptive memory control)");
3112 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3113 GstVaapiDecoderH264 *decoder,
3114 GstVaapiPictureH264 *picture,
3115 GstH264RefPicMarking *ref_pic_marking
3118 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3120 exec_ref_pic_marking_adaptive_mmco_1,
3121 exec_ref_pic_marking_adaptive_mmco_2,
3122 exec_ref_pic_marking_adaptive_mmco_3,
3123 exec_ref_pic_marking_adaptive_mmco_4,
3124 exec_ref_pic_marking_adaptive_mmco_5,
3125 exec_ref_pic_marking_adaptive_mmco_6,
3128 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3129 GstH264RefPicMarking * const ref_pic_marking =
3130 &dec_ref_pic_marking->ref_pic_marking[i];
3132 const guint mmco = ref_pic_marking->memory_management_control_operation;
3133 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3134 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3136 GST_ERROR("unhandled MMCO %u", mmco);
3143 /* 8.2.5 - Execute reference picture marking process */
3145 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3147 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3149 priv->prev_pic_has_mmco5 = FALSE;
3150 priv->prev_pic_structure = picture->structure;
3152 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3153 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3155 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3158 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3159 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3160 &picture->last_slice_hdr->dec_ref_pic_marking;
3161 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3162 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3166 if (!exec_ref_pic_marking_sliding_window(decoder))
3174 vaapi_init_picture(VAPictureH264 *pic)
3176 pic->picture_id = VA_INVALID_ID;
3178 pic->flags = VA_PICTURE_H264_INVALID;
3179 pic->TopFieldOrderCnt = 0;
3180 pic->BottomFieldOrderCnt = 0;
3184 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3185 guint picture_structure)
3187 if (!picture_structure)
3188 picture_structure = picture->structure;
3190 pic->picture_id = picture->base.surface_id;
3193 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3194 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3195 pic->frame_idx = picture->long_term_frame_idx;
3198 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3199 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3200 pic->frame_idx = picture->frame_num;
3203 switch (picture_structure) {
3204 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3205 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3206 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3208 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3209 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3210 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3211 pic->BottomFieldOrderCnt = 0;
3213 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3214 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3215 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3216 pic->TopFieldOrderCnt = 0;
3222 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3223 GstVaapiPictureH264 *picture)
3225 vaapi_fill_picture(pic, picture, 0);
3227 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3228 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3229 /* The inter-view reference components and inter-view only
3230 reference components that are included in the reference
3231 picture lists are considered as not being marked as "used for
3232 short-term reference" or "used for long-term reference" */
3233 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3234 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3239 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3241 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3242 GstVaapiPicture * const base_picture = &picture->base;
3243 GstH264PPS * const pps = get_pps(decoder);
3244 GstH264SPS * const sps = get_sps(decoder);
3245 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3248 /* Fill in VAPictureParameterBufferH264 */
3249 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3251 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3252 GstVaapiFrameStore * const fs = priv->dpb[i];
3253 if ((gst_vaapi_frame_store_has_reference(fs) &&
3254 fs->view_id == picture->base.view_id) ||
3255 (gst_vaapi_frame_store_has_inter_view(fs) &&
3256 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3257 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3258 fs->buffers[0], fs->structure);
3259 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3262 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3263 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3265 #define COPY_FIELD(s, f) \
3266 pic_param->f = (s)->f
3268 #define COPY_BFM(a, s, f) \
3269 pic_param->a.bits.f = (s)->f
3271 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3272 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3273 pic_param->frame_num = priv->frame_num;
3275 COPY_FIELD(sps, bit_depth_luma_minus8);
3276 COPY_FIELD(sps, bit_depth_chroma_minus8);
3277 COPY_FIELD(sps, num_ref_frames);
3278 COPY_FIELD(pps, num_slice_groups_minus1);
3279 COPY_FIELD(pps, slice_group_map_type);
3280 COPY_FIELD(pps, slice_group_change_rate_minus1);
3281 COPY_FIELD(pps, pic_init_qp_minus26);
3282 COPY_FIELD(pps, pic_init_qs_minus26);
3283 COPY_FIELD(pps, chroma_qp_index_offset);
3284 COPY_FIELD(pps, second_chroma_qp_index_offset);
3286 pic_param->seq_fields.value = 0; /* reset all bits */
3287 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3288 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3290 COPY_BFM(seq_fields, sps, chroma_format_idc);
3291 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3292 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3293 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3294 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3295 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3296 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3297 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3298 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3300 pic_param->pic_fields.value = 0; /* reset all bits */
3301 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3302 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3304 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3305 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3306 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3307 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3308 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3309 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3310 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3311 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3315 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3317 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3319 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3320 GstH264PPS * const pps = slice_hdr->pps;
3321 GstH264SPS * const sps = pps->sequence;
3322 GstH264SliceHdr *prev_slice_hdr;
3326 prev_slice_hdr = &prev_pi->data.slice_hdr;
3328 #define CHECK_EXPR(expr, field_name) do { \
3330 GST_DEBUG(field_name " differs in value"); \
3335 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3336 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3338 /* view_id differs in value and VOIdx of current slice_hdr is less
3339 than the VOIdx of the prev_slice_hdr */
3340 CHECK_VALUE(pi, prev_pi, view_id);
3342 /* frame_num differs in value, regardless of inferred values to 0 */
3343 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3345 /* pic_parameter_set_id differs in value */
3346 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3348 /* field_pic_flag differs in value */
3349 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3351 /* bottom_field_flag is present in both and differs in value */
3352 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3353 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3355 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3356 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3357 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3359 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3360 value or delta_pic_order_cnt_bottom differs in value */
3361 if (sps->pic_order_cnt_type == 0) {
3362 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3363 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3364 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3367 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3368 differs in value or delta_pic_order_cnt[1] differs in value */
3369 else if (sps->pic_order_cnt_type == 1) {
3370 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3371 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3374 /* IdrPicFlag differs in value */
3375 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3377 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3378 if (pi->nalu.idr_pic_flag)
3379 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3386 /* Detection of a new access unit, assuming we are already in presence
3388 static inline gboolean
3389 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3391 if (!prev_pi || prev_pi->view_id == pi->view_id)
3393 return pi->voc < prev_pi->voc;
3396 /* Finds the first field picture corresponding to the supplied picture */
3397 static GstVaapiPictureH264 *
3398 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3400 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3401 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3402 GstVaapiFrameStore *fs;
3404 if (!slice_hdr->field_pic_flag)
3407 fs = priv->prev_frames[pi->voc];
3408 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3411 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3412 return fs->buffers[0];
3416 static GstVaapiDecoderStatus
3417 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3419 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3420 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3421 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3422 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3423 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3424 GstVaapiPictureH264 *picture, *first_field;
3425 GstVaapiDecoderStatus status;
3427 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3428 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3430 /* Only decode base stream for MVC */
3431 switch (sps->profile_idc) {
3432 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3433 case GST_H264_PROFILE_STEREO_HIGH:
3435 GST_DEBUG("drop picture from substream");
3436 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3441 status = ensure_context(decoder, sps);
3442 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3445 priv->decoder_state = 0;
3447 first_field = find_first_field(decoder, pi);
3449 /* Re-use current picture where the first field was decoded */
3450 picture = gst_vaapi_picture_h264_new_field(first_field);
3452 GST_ERROR("failed to allocate field picture");
3453 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3457 /* Create new picture */
3458 picture = gst_vaapi_picture_h264_new(decoder);
3460 GST_ERROR("failed to allocate picture");
3461 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3464 gst_vaapi_picture_replace(&priv->current_picture, picture);
3465 gst_vaapi_picture_unref(picture);
3467 /* Clear inter-view references list if this is the primary coded
3468 picture of the current access unit */
3469 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3470 g_ptr_array_set_size(priv->inter_views, 0);
3472 /* Update cropping rectangle */
3473 if (sps->frame_cropping_flag) {
3474 GstVaapiRectangle crop_rect;
3475 crop_rect.x = sps->crop_rect_x;
3476 crop_rect.y = sps->crop_rect_y;
3477 crop_rect.width = sps->crop_rect_width;
3478 crop_rect.height = sps->crop_rect_height;
3479 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3482 status = ensure_quant_matrix(decoder, picture);
3483 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3484 GST_ERROR("failed to reset quantizer matrix");
3488 if (!init_picture(decoder, picture, pi))
3489 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3490 if (!fill_picture(decoder, picture))
3491 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3493 priv->decoder_state = pi->state;
3494 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3498 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3502 epb_count = slice_hdr->n_emulation_prevention_bytes;
3503 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3507 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3508 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3510 VASliceParameterBufferH264 * const slice_param = slice->param;
3511 GstH264PPS * const pps = get_pps(decoder);
3512 GstH264SPS * const sps = get_sps(decoder);
3513 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3514 guint num_weight_tables = 0;
3517 if (pps->weighted_pred_flag &&
3518 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3519 num_weight_tables = 1;
3520 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3521 num_weight_tables = 2;
3523 num_weight_tables = 0;
3525 slice_param->luma_log2_weight_denom = 0;
3526 slice_param->chroma_log2_weight_denom = 0;
3527 slice_param->luma_weight_l0_flag = 0;
3528 slice_param->chroma_weight_l0_flag = 0;
3529 slice_param->luma_weight_l1_flag = 0;
3530 slice_param->chroma_weight_l1_flag = 0;
3532 if (num_weight_tables < 1)
3535 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3536 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3538 slice_param->luma_weight_l0_flag = 1;
3539 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3540 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3541 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3544 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3545 if (slice_param->chroma_weight_l0_flag) {
3546 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3547 for (j = 0; j < 2; j++) {
3548 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3549 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3554 if (num_weight_tables < 2)
3557 slice_param->luma_weight_l1_flag = 1;
3558 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3559 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3560 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3563 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3564 if (slice_param->chroma_weight_l1_flag) {
3565 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3566 for (j = 0; j < 2; j++) {
3567 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3568 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3576 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3577 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3579 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3580 VASliceParameterBufferH264 * const slice_param = slice->param;
3581 guint i, num_ref_lists = 0;
3583 slice_param->num_ref_idx_l0_active_minus1 = 0;
3584 slice_param->num_ref_idx_l1_active_minus1 = 0;
3586 if (GST_H264_IS_B_SLICE(slice_hdr))
3588 else if (GST_H264_IS_I_SLICE(slice_hdr))
3593 if (num_ref_lists < 1)
3596 slice_param->num_ref_idx_l0_active_minus1 =
3597 slice_hdr->num_ref_idx_l0_active_minus1;
3599 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3600 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3601 priv->RefPicList0[i]);
3602 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3603 vaapi_init_picture(&slice_param->RefPicList0[i]);
3605 if (num_ref_lists < 2)
3608 slice_param->num_ref_idx_l1_active_minus1 =
3609 slice_hdr->num_ref_idx_l1_active_minus1;
3611 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3612 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3613 priv->RefPicList1[i]);
3614 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3615 vaapi_init_picture(&slice_param->RefPicList1[i]);
3620 fill_slice(GstVaapiDecoderH264 *decoder,
3621 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3623 VASliceParameterBufferH264 * const slice_param = slice->param;
3624 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3626 /* Fill in VASliceParameterBufferH264 */
3627 slice_param->slice_data_bit_offset =
3628 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3629 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3630 slice_param->slice_type = slice_hdr->type % 5;
3631 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3632 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3633 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3634 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3635 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3636 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3638 if (!fill_RefPicList(decoder, slice, slice_hdr))
3640 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3645 static GstVaapiDecoderStatus
3646 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3648 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3649 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3650 GstVaapiPictureH264 * const picture = priv->current_picture;
3651 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3652 GstVaapiSlice *slice;
3653 GstBuffer * const buffer =
3654 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3655 GstMapInfo map_info;
3657 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3659 if (!is_valid_state(pi->state,
3660 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3661 GST_WARNING("failed to receive enough headers to decode slice");
3662 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3665 if (!ensure_pps(decoder, slice_hdr->pps)) {
3666 GST_ERROR("failed to activate PPS");
3667 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3670 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3671 GST_ERROR("failed to activate SPS");
3672 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3675 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3676 GST_ERROR("failed to map buffer");
3677 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3680 /* Check wether this is the first/last slice in the current access unit */
3681 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3682 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3683 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3684 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3686 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3687 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3688 gst_buffer_unmap(buffer, &map_info);
3690 GST_ERROR("failed to allocate slice");
3691 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3694 init_picture_refs(decoder, picture, slice_hdr);
3695 if (!fill_slice(decoder, slice, pi)) {
3696 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3697 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3700 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3701 picture->last_slice_hdr = slice_hdr;
3702 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3703 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3707 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3709 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3710 0xffffff00, 0x00000100,
3715 static GstVaapiDecoderStatus
3716 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3718 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3719 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3720 GstVaapiDecoderStatus status;
3722 priv->decoder_state |= pi->state;
3723 switch (pi->nalu.type) {
3724 case GST_H264_NAL_SPS:
3725 status = decode_sps(decoder, unit);
3727 case GST_H264_NAL_SUBSET_SPS:
3728 status = decode_subset_sps(decoder, unit);
3730 case GST_H264_NAL_PPS:
3731 status = decode_pps(decoder, unit);
3733 case GST_H264_NAL_SLICE_EXT:
3734 case GST_H264_NAL_SLICE_IDR:
3735 /* fall-through. IDR specifics are handled in init_picture() */
3736 case GST_H264_NAL_SLICE:
3737 status = decode_slice(decoder, unit);
3739 case GST_H264_NAL_SEQ_END:
3740 case GST_H264_NAL_STREAM_END:
3741 status = decode_sequence_end(decoder);
3743 case GST_H264_NAL_SEI:
3744 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3747 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3748 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3754 static GstVaapiDecoderStatus
3755 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3756 const guchar *buf, guint buf_size)
3758 GstVaapiDecoderH264 * const decoder =
3759 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3760 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3761 GstVaapiDecoderStatus status;
3762 GstVaapiDecoderUnit unit;
3763 GstVaapiParserInfoH264 *pi = NULL;
3764 GstH264ParserResult result;
3765 guint i, ofs, num_sps, num_pps;
3767 unit.parsed_info = NULL;
3770 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3773 GST_ERROR("failed to decode codec-data, not in avcC format");
3774 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3777 priv->nal_length_size = (buf[4] & 0x03) + 1;
3779 num_sps = buf[5] & 0x1f;
3782 for (i = 0; i < num_sps; i++) {
3783 pi = gst_vaapi_parser_info_h264_new();
3785 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3786 unit.parsed_info = pi;
3788 result = gst_h264_parser_identify_nalu_avc(
3790 buf, ofs, buf_size, 2,
3793 if (result != GST_H264_PARSER_OK) {
3794 status = get_status(result);
3798 status = parse_sps(decoder, &unit);
3799 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3801 ofs = pi->nalu.offset + pi->nalu.size;
3803 status = decode_sps(decoder, &unit);
3804 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3806 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3812 for (i = 0; i < num_pps; i++) {
3813 pi = gst_vaapi_parser_info_h264_new();
3815 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3816 unit.parsed_info = pi;
3818 result = gst_h264_parser_identify_nalu_avc(
3820 buf, ofs, buf_size, 2,
3823 if (result != GST_H264_PARSER_OK) {
3824 status = get_status(result);
3828 status = parse_pps(decoder, &unit);
3829 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3831 ofs = pi->nalu.offset + pi->nalu.size;
3833 status = decode_pps(decoder, &unit);
3834 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3836 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3839 priv->is_avcC = TRUE;
3840 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3843 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3847 static GstVaapiDecoderStatus
3848 ensure_decoder(GstVaapiDecoderH264 *decoder)
3850 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3851 GstVaapiDecoderStatus status;
3853 if (!priv->is_opened) {
3854 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3855 if (!priv->is_opened)
3856 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3858 status = gst_vaapi_decoder_decode_codec_data(
3859 GST_VAAPI_DECODER_CAST(decoder));
3860 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3863 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3866 static GstVaapiDecoderStatus
3867 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3868 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3870 GstVaapiDecoderH264 * const decoder =
3871 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3872 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3873 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3874 GstVaapiParserInfoH264 *pi;
3875 GstVaapiDecoderStatus status;
3876 GstH264ParserResult result;
3878 guint i, size, buf_size, nalu_size, flags;
3882 status = ensure_decoder(decoder);
3883 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3886 size = gst_adapter_available(adapter);
3888 if (priv->is_avcC) {
3889 if (size < priv->nal_length_size)
3890 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3892 buf = (guchar *)&start_code;
3893 g_assert(priv->nal_length_size <= sizeof(start_code));
3894 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3897 for (i = 0; i < priv->nal_length_size; i++)
3898 nalu_size = (nalu_size << 8) | buf[i];
3900 buf_size = priv->nal_length_size + nalu_size;
3901 if (size < buf_size)
3902 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3906 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3908 ofs = scan_for_start_code(adapter, 0, size, NULL);
3910 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3913 gst_adapter_flush(adapter, ofs);
3917 ofs2 = ps->input_offset2 - ofs - 4;
3921 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3922 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3924 // Assume the whole NAL unit is present if end-of-stream
3926 ps->input_offset2 = size;
3927 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3933 ps->input_offset2 = 0;
3935 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3937 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3939 unit->size = buf_size;
3941 pi = gst_vaapi_parser_info_h264_new();
3943 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3945 gst_vaapi_decoder_unit_set_parsed_info(unit,
3946 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3949 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3950 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3952 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3953 buf, 0, buf_size, &pi->nalu);
3954 status = get_status(result);
3955 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3958 switch (pi->nalu.type) {
3959 case GST_H264_NAL_SPS:
3960 status = parse_sps(decoder, unit);
3962 case GST_H264_NAL_SUBSET_SPS:
3963 status = parse_subset_sps(decoder, unit);
3965 case GST_H264_NAL_PPS:
3966 status = parse_pps(decoder, unit);
3968 case GST_H264_NAL_SEI:
3969 status = parse_sei(decoder, unit);
3971 case GST_H264_NAL_SLICE_EXT:
3972 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3973 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3977 case GST_H264_NAL_SLICE_IDR:
3978 case GST_H264_NAL_SLICE:
3979 status = parse_slice(decoder, unit);
3982 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3985 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3989 switch (pi->nalu.type) {
3990 case GST_H264_NAL_AU_DELIMITER:
3991 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3992 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3994 case GST_H264_NAL_FILLER_DATA:
3995 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3997 case GST_H264_NAL_STREAM_END:
3998 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4000 case GST_H264_NAL_SEQ_END:
4001 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4002 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4004 case GST_H264_NAL_SPS:
4005 case GST_H264_NAL_SUBSET_SPS:
4006 case GST_H264_NAL_PPS:
4007 case GST_H264_NAL_SEI:
4008 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4009 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4011 case GST_H264_NAL_SLICE_EXT:
4012 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4013 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4017 case GST_H264_NAL_SLICE_IDR:
4018 case GST_H264_NAL_SLICE:
4019 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4020 if (is_new_picture(pi, priv->prev_slice_pi)) {
4021 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4022 if (is_new_access_unit(pi, priv->prev_slice_pi))
4023 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4025 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4027 case GST_H264_NAL_SPS_EXT:
4028 case GST_H264_NAL_SLICE_AUX:
4029 /* skip SPS extension and auxiliary slice for now */
4030 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4032 case GST_H264_NAL_PREFIX_UNIT:
4033 /* skip Prefix NAL units for now */
4034 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4035 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4036 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4039 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4040 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4041 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4044 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4045 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4046 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4048 pi->nalu.data = NULL;
4049 pi->state = priv->parser_state;
4051 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4052 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4055 static GstVaapiDecoderStatus
4056 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4057 GstVaapiDecoderUnit *unit)
4059 GstVaapiDecoderH264 * const decoder =
4060 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4061 GstVaapiDecoderStatus status;
4063 status = ensure_decoder(decoder);
4064 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4066 return decode_unit(decoder, unit);
4069 static GstVaapiDecoderStatus
4070 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4071 GstVaapiDecoderUnit *unit)
4073 GstVaapiDecoderH264 * const decoder =
4074 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4076 return decode_picture(decoder, unit);
4079 static GstVaapiDecoderStatus
4080 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4082 GstVaapiDecoderH264 * const decoder =
4083 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4085 return decode_current_picture(decoder);
4088 static GstVaapiDecoderStatus
4089 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4091 GstVaapiDecoderH264 * const decoder =
4092 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4094 dpb_flush(decoder, NULL);
4095 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4099 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4101 GstVaapiMiniObjectClass * const object_class =
4102 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4103 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4105 object_class->size = sizeof(GstVaapiDecoderH264);
4106 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4108 decoder_class->create = gst_vaapi_decoder_h264_create;
4109 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4110 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4111 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4112 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4113 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4114 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4116 decoder_class->decode_codec_data =
4117 gst_vaapi_decoder_h264_decode_codec_data;
4120 static inline const GstVaapiDecoderClass *
4121 gst_vaapi_decoder_h264_class(void)
4123 static GstVaapiDecoderH264Class g_class;
4124 static gsize g_class_init = FALSE;
4126 if (g_once_init_enter(&g_class_init)) {
4127 gst_vaapi_decoder_h264_class_init(&g_class);
4128 g_once_init_leave(&g_class_init, TRUE);
4130 return GST_VAAPI_DECODER_CLASS(&g_class);
4134 * gst_vaapi_decoder_h264_new:
4135 * @display: a #GstVaapiDisplay
4136 * @caps: a #GstCaps holding codec information
4138 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4139 * hold extra information like codec-data and pictured coded size.
4141 * Return value: the newly allocated #GstVaapiDecoder object
4144 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4146 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);