2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiStreamAlignH264 stream_alignment;
449 GstVaapiPictureH264 *current_picture;
450 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
451 GstVaapiParserInfoH264 *active_sps;
452 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
453 GstVaapiParserInfoH264 *active_pps;
454 GstVaapiParserInfoH264 *prev_pi;
455 GstVaapiParserInfoH264 *prev_slice_pi;
456 GstVaapiFrameStore **prev_frames;
457 guint prev_frames_alloc;
458 GstVaapiFrameStore **dpb;
463 GstVaapiProfile profile;
464 GstVaapiEntrypoint entrypoint;
465 GstVaapiChromaType chroma_type;
466 GPtrArray *inter_views;
467 GstVaapiPictureH264 *short_ref[32];
468 guint short_ref_count;
469 GstVaapiPictureH264 *long_ref[32];
470 guint long_ref_count;
471 GstVaapiPictureH264 *RefPicList0[32];
472 guint RefPicList0_count;
473 GstVaapiPictureH264 *RefPicList1[32];
474 guint RefPicList1_count;
475 guint nal_length_size;
478 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479 gint32 poc_msb; // PicOrderCntMsb
480 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
481 gint32 prev_poc_msb; // prevPicOrderCntMsb
482 gint32 prev_poc_lsb; // prevPicOrderCntLsb
483 gint32 frame_num_offset; // FrameNumOffset
484 gint32 frame_num; // frame_num (from slice_header())
485 gint32 prev_frame_num; // prevFrameNum
486 gboolean prev_pic_has_mmco5; // prevMmco5Pic
487 gboolean prev_pic_structure; // previous picture structure
490 guint has_context : 1;
491 guint progressive_sequence : 1;
495 * GstVaapiDecoderH264:
497 * A decoder based on H264.
499 struct _GstVaapiDecoderH264 {
501 GstVaapiDecoder parent_instance;
502 GstVaapiDecoderH264Private priv;
506 * GstVaapiDecoderH264Class:
508 * A decoder class based on H264.
510 struct _GstVaapiDecoderH264Class {
512 GstVaapiDecoderClass parent_class;
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520 GstVaapiPictureH264 *picture);
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524 GstVaapiFrameStore *fs)
526 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
529 /* Determines if the supplied profile is one of the MVC set */
531 is_mvc_profile(GstH264Profile profile)
533 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534 profile == GST_H264_PROFILE_STEREO_HIGH;
537 /* Determines the view_id from the supplied NAL unit */
539 get_view_id(GstH264NalUnit *nalu)
541 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
544 /* Determines the view order index (VOIdx) from the supplied view_id */
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
548 GstH264SPSExtMVC *mvc;
551 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
554 mvc = &sps->extension.mvc;
555 for (i = 0; i <= mvc->num_views_minus1; i++) {
556 if (mvc->view[i].view_id == view_id)
559 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
563 /* Determines NumViews */
565 get_num_views(GstH264SPS *sps)
567 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568 sps->extension.mvc.num_views_minus1 : 0);
571 /* Get number of reference frames to use */
573 get_max_dec_frame_buffering(GstH264SPS *sps)
575 guint num_views, max_dpb_frames;
576 guint max_dec_frame_buffering, PicSizeMbs;
577 GstVaapiLevelH264 level;
578 const GstVaapiH264LevelLimits *level_limits;
580 /* Table A-1 - Level limits */
581 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582 level = GST_VAAPI_LEVEL_H264_L1b;
584 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586 if (G_UNLIKELY(!level_limits)) {
587 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588 max_dec_frame_buffering = 16;
591 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592 (sps->pic_height_in_map_units_minus1 + 1) *
593 (sps->frame_mbs_only_flag ? 1 : 2));
594 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
596 if (is_mvc_profile(sps->profile_idc))
597 max_dec_frame_buffering <<= 1;
600 if (sps->vui_parameters_present_flag) {
601 GstH264VUIParams * const vui_params = &sps->vui_parameters;
602 if (vui_params->bitstream_restriction_flag)
603 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
605 switch (sps->profile_idc) {
606 case 44: // CAVLC 4:4:4 Intra profile
607 case GST_H264_PROFILE_SCALABLE_HIGH:
608 case GST_H264_PROFILE_HIGH:
609 case GST_H264_PROFILE_HIGH10:
610 case GST_H264_PROFILE_HIGH_422:
611 case GST_H264_PROFILE_HIGH_444:
612 if (sps->constraint_set3_flag)
613 max_dec_frame_buffering = 0;
619 num_views = get_num_views(sps);
620 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621 if (max_dec_frame_buffering > max_dpb_frames)
622 max_dec_frame_buffering = max_dpb_frames;
623 else if (max_dec_frame_buffering < sps->num_ref_frames)
624 max_dec_frame_buffering = sps->num_ref_frames;
625 return MAX(1, max_dec_frame_buffering);
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
631 gpointer * const entries = array;
632 guint num_entries = *array_length_ptr;
634 g_return_if_fail(index < num_entries);
636 if (index != --num_entries)
637 entries[index] = entries[num_entries];
638 entries[num_entries] = NULL;
639 *array_length_ptr = num_entries;
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
646 array_remove_index_fast(array, array_length_ptr, index);
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
652 gpointer * const entries = array;
653 const guint num_entries = *array_length_ptr - 1;
656 g_return_if_fail(index <= num_entries);
658 for (i = index; i < num_entries; i++)
659 entries[i] = entries[i + 1];
660 entries[num_entries] = NULL;
661 *array_length_ptr = num_entries;
665 #define ARRAY_REMOVE_INDEX(array, index) \
666 array_remove_index(array, &array##_count, index)
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
671 GstVaapiDecoderH264Private * const priv = &decoder->priv;
672 guint i, num_frames = --priv->dpb_count;
674 if (USE_STRICT_DPB_ORDERING) {
675 for (i = index; i < num_frames; i++)
676 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
678 else if (index != num_frames)
679 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
685 GstVaapiDecoderH264 *decoder,
686 GstVaapiFrameStore *fs,
687 GstVaapiPictureH264 *picture
690 picture->output_needed = FALSE;
693 if (--fs->output_needed > 0)
695 picture = fs->buffers[0];
697 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
703 GstVaapiDecoderH264Private * const priv = &decoder->priv;
704 GstVaapiFrameStore * const fs = priv->dpb[i];
706 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707 dpb_remove_index(decoder, i);
710 /* Finds the frame store holding the supplied picture */
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
714 GstVaapiDecoderH264Private * const priv = &decoder->priv;
717 for (i = 0; i < priv->dpb_count; i++) {
718 GstVaapiFrameStore * const fs = priv->dpb[i];
719 for (j = 0; j < fs->num_buffers; j++) {
720 if (fs->buffers[j] == picture)
727 /* Finds the picture with the lowest POC that needs to be output */
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730 GstVaapiPictureH264 **found_picture_ptr)
732 GstVaapiDecoderH264Private * const priv = &decoder->priv;
733 GstVaapiPictureH264 *found_picture = NULL;
734 guint i, j, found_index;
736 for (i = 0; i < priv->dpb_count; i++) {
737 GstVaapiFrameStore * const fs = priv->dpb[i];
738 if (!fs->output_needed)
740 if (picture && picture->base.view_id != fs->view_id)
742 for (j = 0; j < fs->num_buffers; j++) {
743 GstVaapiPictureH264 * const pic = fs->buffers[j];
744 if (!pic->output_needed)
746 if (!found_picture || found_picture->base.poc > pic->base.poc ||
747 (found_picture->base.poc == pic->base.poc &&
748 found_picture->base.voc > pic->base.voc))
749 found_picture = pic, found_index = i;
753 if (found_picture_ptr)
754 *found_picture_ptr = found_picture;
755 return found_picture ? found_index : -1;
758 /* Finds the picture with the lowest VOC that needs to be output */
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761 GstVaapiPictureH264 **found_picture_ptr)
763 GstVaapiDecoderH264Private * const priv = &decoder->priv;
764 GstVaapiPictureH264 *found_picture = NULL;
765 guint i, j, found_index;
767 for (i = 0; i < priv->dpb_count; i++) {
768 GstVaapiFrameStore * const fs = priv->dpb[i];
769 if (!fs->output_needed || fs->view_id == picture->base.view_id)
771 for (j = 0; j < fs->num_buffers; j++) {
772 GstVaapiPictureH264 * const pic = fs->buffers[j];
773 if (!pic->output_needed || pic->base.poc != picture->base.poc)
775 if (!found_picture || found_picture->base.voc > pic->base.voc)
776 found_picture = pic, found_index = i;
780 if (found_picture_ptr)
781 *found_picture_ptr = found_picture;
782 return found_picture ? found_index : -1;
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787 GstVaapiPictureH264 *picture, guint voc)
789 GstVaapiDecoderH264Private * const priv = &decoder->priv;
790 GstVaapiPictureH264 *found_picture;
794 if (priv->max_views == 1)
797 /* Emit all other view components that were in the same access
798 unit than the picture we have just found */
799 found_picture = picture;
801 found_index = dpb_find_lowest_voc(decoder, found_picture,
803 if (found_index < 0 || found_picture->base.voc >= voc)
805 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806 dpb_evict(decoder, found_picture, found_index);
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
816 GstVaapiDecoderH264Private * const priv = &decoder->priv;
817 GstVaapiPictureH264 *found_picture;
821 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
825 if (picture && picture->base.poc != found_picture->base.poc)
826 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
828 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829 dpb_evict(decoder, found_picture, found_index);
830 if (priv->max_views == 1)
833 if (picture && picture->base.poc != found_picture->base.poc)
834 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
841 GstVaapiDecoderH264Private * const priv = &decoder->priv;
844 for (i = 0; i < priv->dpb_count; i++) {
845 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
847 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
850 for (i = 0, n = 0; i < priv->dpb_count; i++) {
852 priv->dpb[n++] = priv->dpb[i];
856 /* Clear previous frame buffers only if this is a "flush-all" operation,
857 or if the picture is the first one in the access unit */
858 if (priv->prev_frames && (!picture ||
859 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
860 GST_VAAPI_PICTURE_FLAG_AU_START))) {
861 for (i = 0; i < priv->max_views; i++)
862 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
867 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
869 while (dpb_bump(decoder, picture))
871 dpb_clear(decoder, picture);
875 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
877 GstVaapiDecoderH264Private * const priv = &decoder->priv;
878 const gboolean is_last_picture = /* in the access unit */
879 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
882 // Remove all unused inter-view only reference components of the current AU
884 while (i < priv->dpb_count) {
885 GstVaapiFrameStore * const fs = priv->dpb[i];
886 if (fs->view_id != picture->base.view_id &&
887 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
889 !is_inter_view_reference_for_next_frames(decoder, fs)))
890 dpb_remove_index(decoder, i);
897 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
899 GstVaapiDecoderH264Private * const priv = &decoder->priv;
900 GstVaapiFrameStore *fs;
903 if (priv->max_views > 1)
904 dpb_prune_mvc(decoder, picture);
906 // Remove all unused pictures
907 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
909 while (i < priv->dpb_count) {
910 GstVaapiFrameStore * const fs = priv->dpb[i];
911 if (fs->view_id == picture->base.view_id &&
912 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
913 dpb_remove_index(decoder, i);
919 // Check if picture is the second field and the first field is still in DPB
920 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
921 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
922 const gint found_index = dpb_find_picture(decoder,
923 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
924 if (found_index >= 0)
925 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
928 // Create new frame store, and split fields if necessary
929 fs = gst_vaapi_frame_store_new(picture);
932 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
933 gst_vaapi_frame_store_unref(fs);
935 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
936 if (!gst_vaapi_frame_store_split_fields(fs))
940 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
941 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
942 while (priv->dpb_count == priv->dpb_size) {
943 if (!dpb_bump(decoder, picture))
948 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
950 const gboolean StoreInterViewOnlyRefFlag =
951 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
952 GST_VAAPI_PICTURE_FLAG_AU_END) &&
953 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
954 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
955 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
957 while (priv->dpb_count == priv->dpb_size) {
958 if (!StoreInterViewOnlyRefFlag) {
959 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
960 return dpb_output(decoder, NULL, picture);
962 if (!dpb_bump(decoder, picture))
967 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
968 if (picture->output_flag) {
969 picture->output_needed = TRUE;
976 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
978 GstVaapiDecoderH264Private * const priv = &decoder->priv;
980 if (dpb_size < priv->dpb_count)
983 if (dpb_size > priv->dpb_size_max) {
984 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
987 memset(&priv->dpb[priv->dpb_size_max], 0,
988 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
989 priv->dpb_size_max = dpb_size;
992 if (priv->dpb_size < dpb_size)
993 priv->dpb_size = dpb_size;
994 else if (dpb_size < priv->dpb_count)
997 GST_DEBUG("DPB size %u", priv->dpb_size);
1002 unref_inter_view(GstVaapiPictureH264 *picture)
1006 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1007 gst_vaapi_picture_unref(picture);
1010 /* Resets MVC resources */
1012 mvc_reset(GstVaapiDecoderH264 *decoder)
1014 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1017 // Resize array of inter-view references
1018 if (!priv->inter_views) {
1019 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1020 (GDestroyNotify)unref_inter_view);
1021 if (!priv->inter_views)
1025 // Resize array of previous frame buffers
1026 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1027 gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
1029 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1030 sizeof(*priv->prev_frames));
1031 if (!priv->prev_frames) {
1032 priv->prev_frames_alloc = 0;
1035 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1036 priv->prev_frames[i] = NULL;
1037 priv->prev_frames_alloc = priv->max_views;
1041 static GstVaapiDecoderStatus
1042 get_status(GstH264ParserResult result)
1044 GstVaapiDecoderStatus status;
1047 case GST_H264_PARSER_OK:
1048 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1050 case GST_H264_PARSER_NO_NAL_END:
1051 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1053 case GST_H264_PARSER_ERROR:
1054 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1057 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1064 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1066 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1068 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1069 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1070 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1072 dpb_clear(decoder, NULL);
1074 if (priv->inter_views) {
1075 g_ptr_array_unref(priv->inter_views);
1076 priv->inter_views = NULL;
1080 gst_h264_nal_parser_free(priv->parser);
1081 priv->parser = NULL;
1086 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1088 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1090 gst_vaapi_decoder_h264_close(decoder);
1092 priv->parser = gst_h264_nal_parser_new();
1099 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1101 GstVaapiDecoderH264 * const decoder =
1102 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1103 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1106 gst_vaapi_decoder_h264_close(decoder);
1112 g_free(priv->prev_frames);
1113 priv->prev_frames = NULL;
1114 priv->prev_frames_alloc = 0;
1116 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1117 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1118 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1120 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1121 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1122 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1126 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1128 GstVaapiDecoderH264 * const decoder =
1129 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1130 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1132 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1133 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1134 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1135 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1136 priv->progressive_sequence = TRUE;
1140 /* Activates the supplied PPS */
1142 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1144 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1145 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1147 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1148 return pi ? &pi->data.pps : NULL;
1151 /* Returns the active PPS */
1152 static inline GstH264PPS *
1153 get_pps(GstVaapiDecoderH264 *decoder)
1155 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1157 return pi ? &pi->data.pps : NULL;
1160 /* Activate the supplied SPS */
1162 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1164 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1165 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1167 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1168 return pi ? &pi->data.sps : NULL;
1171 /* Returns the active SPS */
1172 static inline GstH264SPS *
1173 get_sps(GstVaapiDecoderH264 *decoder)
1175 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1177 return pi ? &pi->data.sps : NULL;
1181 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1182 GstVaapiProfile profile)
1184 guint n_profiles = *n_profiles_ptr;
1186 profiles[n_profiles++] = profile;
1188 case GST_VAAPI_PROFILE_H264_MAIN:
1189 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1194 *n_profiles_ptr = n_profiles;
1197 /* Fills in compatible profiles for MVC decoding */
1199 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1200 guint *n_profiles_ptr, guint dpb_size)
1202 const gchar * const vendor_string =
1203 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1205 gboolean add_high_profile = FALSE;
1210 const struct map *m;
1212 // Drivers that support slice level decoding
1213 if (vendor_string && dpb_size <= 16) {
1214 static const struct map drv_names[] = {
1215 { "Intel i965 driver", 17 },
1218 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1219 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1220 add_high_profile = TRUE;
1224 if (add_high_profile)
1225 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1228 static GstVaapiProfile
1229 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1231 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1232 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1233 GstVaapiProfile profile, profiles[4];
1234 guint i, n_profiles = 0;
1236 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1238 return GST_VAAPI_PROFILE_UNKNOWN;
1240 fill_profiles(profiles, &n_profiles, profile);
1242 case GST_VAAPI_PROFILE_H264_BASELINE:
1243 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1244 fill_profiles(profiles, &n_profiles,
1245 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1246 fill_profiles(profiles, &n_profiles,
1247 GST_VAAPI_PROFILE_H264_MAIN);
1250 case GST_VAAPI_PROFILE_H264_EXTENDED:
1251 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1252 fill_profiles(profiles, &n_profiles,
1253 GST_VAAPI_PROFILE_H264_MAIN);
1256 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1257 if (priv->max_views == 2) {
1258 fill_profiles(profiles, &n_profiles,
1259 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1261 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1263 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1264 if (sps->frame_mbs_only_flag) {
1265 fill_profiles(profiles, &n_profiles,
1266 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1268 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1274 /* If the preferred profile (profiles[0]) matches one that we already
1275 found, then just return it now instead of searching for it again */
1276 if (profiles[0] == priv->profile)
1277 return priv->profile;
1279 for (i = 0; i < n_profiles; i++) {
1280 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1283 return GST_VAAPI_PROFILE_UNKNOWN;
1286 static GstVaapiDecoderStatus
1287 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1289 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1290 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1291 GstVaapiContextInfo info;
1292 GstVaapiProfile profile;
1293 GstVaapiChromaType chroma_type;
1294 gboolean reset_context = FALSE;
1295 guint mb_width, mb_height, dpb_size;
1297 dpb_size = get_max_dec_frame_buffering(sps);
1298 if (priv->dpb_size < dpb_size) {
1299 GST_DEBUG("DPB size increased");
1300 reset_context = TRUE;
1303 profile = get_profile(decoder, sps, dpb_size);
1305 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1306 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1309 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1310 GST_DEBUG("profile changed");
1311 reset_context = TRUE;
1312 priv->profile = profile;
1315 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1317 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1318 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1321 if (priv->chroma_type != chroma_type) {
1322 GST_DEBUG("chroma format changed");
1323 reset_context = TRUE;
1324 priv->chroma_type = chroma_type;
1327 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1328 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1329 !sps->frame_mbs_only_flag;
1330 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1331 GST_DEBUG("size changed");
1332 reset_context = TRUE;
1333 priv->mb_width = mb_width;
1334 priv->mb_height = mb_height;
1337 priv->progressive_sequence = sps->frame_mbs_only_flag;
1338 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1340 gst_vaapi_decoder_set_pixel_aspect_ratio(
1342 sps->vui_parameters.par_n,
1343 sps->vui_parameters.par_d
1346 if (!reset_context && priv->has_context)
1347 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1349 /* XXX: fix surface size when cropping is implemented */
1350 info.profile = priv->profile;
1351 info.entrypoint = priv->entrypoint;
1352 info.chroma_type = priv->chroma_type;
1353 info.width = sps->width;
1354 info.height = sps->height;
1355 info.ref_frames = dpb_size;
1357 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1358 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1359 priv->has_context = TRUE;
1362 if (!dpb_reset(decoder, dpb_size))
1363 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1365 /* Reset MVC data */
1366 if (!mvc_reset(decoder))
1367 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1368 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1372 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1373 const GstH264SPS *sps)
1377 /* There are always 6 4x4 scaling lists */
1378 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1379 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1381 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1382 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1383 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1387 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1388 const GstH264SPS *sps)
1392 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1393 if (!pps->transform_8x8_mode_flag)
1396 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1397 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1399 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1400 for (i = 0; i < n; i++) {
1401 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1402 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1406 static GstVaapiDecoderStatus
1407 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1409 GstVaapiPicture * const base_picture = &picture->base;
1410 GstH264PPS * const pps = get_pps(decoder);
1411 GstH264SPS * const sps = get_sps(decoder);
1412 VAIQMatrixBufferH264 *iq_matrix;
1414 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1415 if (!base_picture->iq_matrix) {
1416 GST_ERROR("failed to allocate IQ matrix");
1417 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1419 iq_matrix = base_picture->iq_matrix->param;
1421 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1422 is not large enough to hold lists for 4:4:4 */
1423 if (sps->chroma_format_idc == 3)
1424 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1426 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1427 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1429 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1432 static inline gboolean
1433 is_valid_state(guint state, guint ref_state)
1435 return (state & ref_state) == ref_state;
1438 static GstVaapiDecoderStatus
1439 decode_current_picture(GstVaapiDecoderH264 *decoder)
1441 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1442 GstVaapiPictureH264 * const picture = priv->current_picture;
1444 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1446 priv->decoder_state = 0;
1449 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1451 if (!exec_ref_pic_marking(decoder, picture))
1453 if (!dpb_add(decoder, picture))
1455 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1457 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1458 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1461 /* XXX: fix for cases where first field failed to be decoded */
1462 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1463 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1466 priv->decoder_state = 0;
1467 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1470 static GstVaapiDecoderStatus
1471 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1473 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1474 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1475 GstH264SPS * const sps = &pi->data.sps;
1476 GstH264ParserResult result;
1478 GST_DEBUG("parse SPS");
1480 priv->parser_state = 0;
1482 /* Variables that don't have inferred values per the H.264
1483 standard but that should get a default value anyway */
1484 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1486 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1487 if (result != GST_H264_PARSER_OK)
1488 return get_status(result);
1490 /* Reset defaults */
1491 priv->max_views = 1;
1493 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1494 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1497 static GstVaapiDecoderStatus
1498 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1500 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1501 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1502 GstH264SPS * const sps = &pi->data.sps;
1503 GstH264ParserResult result;
1505 GST_DEBUG("parse subset SPS");
1507 /* Variables that don't have inferred values per the H.264
1508 standard but that should get a default value anyway */
1509 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1511 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1513 if (result != GST_H264_PARSER_OK)
1514 return get_status(result);
1516 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1517 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1520 static GstVaapiDecoderStatus
1521 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1523 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1524 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1525 GstH264PPS * const pps = &pi->data.pps;
1526 GstH264ParserResult result;
1528 GST_DEBUG("parse PPS");
1530 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1532 /* Variables that don't have inferred values per the H.264
1533 standard but that should get a default value anyway */
1534 pps->slice_group_map_type = 0;
1535 pps->slice_group_change_rate_minus1 = 0;
1537 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1538 if (result != GST_H264_PARSER_OK)
1539 return get_status(result);
1541 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1542 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1545 static GstVaapiDecoderStatus
1546 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1548 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1549 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1550 GArray ** const sei_ptr = &pi->data.sei;
1551 GstH264ParserResult result;
1553 GST_DEBUG("parse SEI");
1555 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1556 if (result != GST_H264_PARSER_OK) {
1557 GST_WARNING("failed to parse SEI messages");
1558 return get_status(result);
1560 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1563 static GstVaapiDecoderStatus
1564 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1566 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1567 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1568 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1569 GstH264NalUnit * const nalu = &pi->nalu;
1571 GstH264ParserResult result;
1574 GST_DEBUG("parse slice");
1576 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1577 GST_H264_VIDEO_STATE_GOT_PPS);
1579 /* Propagate Prefix NAL unit info, if necessary */
1580 switch (nalu->type) {
1581 case GST_H264_NAL_SLICE:
1582 case GST_H264_NAL_SLICE_IDR: {
1583 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1584 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1585 /* MVC sequences shall have a Prefix NAL unit immediately
1586 preceding this NAL unit */
1587 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1588 pi->nalu.extension = prev_pi->nalu.extension;
1591 /* In the very unlikely case there is no Prefix NAL unit
1592 immediately preceding this NAL unit, try to infer some
1593 defaults (H.7.4.1.1) */
1594 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1595 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1596 nalu->idr_pic_flag = !mvc->non_idr_flag;
1597 mvc->priority_id = 0;
1599 mvc->temporal_id = 0;
1600 mvc->anchor_pic_flag = 0;
1601 mvc->inter_view_flag = 1;
1607 /* Variables that don't have inferred values per the H.264
1608 standard but that should get a default value anyway */
1609 slice_hdr->cabac_init_idc = 0;
1610 slice_hdr->direct_spatial_mv_pred_flag = 0;
1612 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1613 slice_hdr, TRUE, TRUE);
1614 if (result != GST_H264_PARSER_OK)
1615 return get_status(result);
1617 sps = slice_hdr->pps->sequence;
1619 /* Update MVC data */
1620 num_views = get_num_views(sps);
1621 if (priv->max_views < num_views) {
1622 priv->max_views = num_views;
1623 GST_DEBUG("maximum number of views changed to %u", num_views);
1625 pi->view_id = get_view_id(&pi->nalu);
1626 pi->voc = get_view_order_index(sps, pi->view_id);
1628 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1629 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1632 static GstVaapiDecoderStatus
1633 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1635 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1636 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1637 GstH264SPS * const sps = &pi->data.sps;
1639 GST_DEBUG("decode SPS");
1641 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1642 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1645 static GstVaapiDecoderStatus
1646 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1648 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1649 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1650 GstH264SPS * const sps = &pi->data.sps;
1652 GST_DEBUG("decode subset SPS");
1654 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1655 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1658 static GstVaapiDecoderStatus
1659 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1661 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1662 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1663 GstH264PPS * const pps = &pi->data.pps;
1665 GST_DEBUG("decode PPS");
1667 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1668 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1671 static GstVaapiDecoderStatus
1672 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1674 GstVaapiDecoderStatus status;
1676 GST_DEBUG("decode sequence-end");
1678 status = decode_current_picture(decoder);
1679 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1682 dpb_flush(decoder, NULL);
1683 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1686 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1689 GstVaapiDecoderH264 *decoder,
1690 GstVaapiPictureH264 *picture,
1691 GstH264SliceHdr *slice_hdr
1694 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1695 GstH264SPS * const sps = get_sps(decoder);
1696 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1699 GST_DEBUG("decode picture order count type 0");
1701 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1702 priv->prev_poc_msb = 0;
1703 priv->prev_poc_lsb = 0;
1705 else if (priv->prev_pic_has_mmco5) {
1706 priv->prev_poc_msb = 0;
1707 priv->prev_poc_lsb =
1708 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1709 0 : priv->field_poc[TOP_FIELD]);
1712 priv->prev_poc_msb = priv->poc_msb;
1713 priv->prev_poc_lsb = priv->poc_lsb;
1717 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1718 if (priv->poc_lsb < priv->prev_poc_lsb &&
1719 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1720 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1721 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1722 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1723 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1725 priv->poc_msb = priv->prev_poc_msb;
1727 temp_poc = priv->poc_msb + priv->poc_lsb;
1728 switch (picture->structure) {
1729 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1731 priv->field_poc[TOP_FIELD] = temp_poc;
1732 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1733 slice_hdr->delta_pic_order_cnt_bottom;
1735 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1737 priv->field_poc[TOP_FIELD] = temp_poc;
1739 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1741 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1746 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1749 GstVaapiDecoderH264 *decoder,
1750 GstVaapiPictureH264 *picture,
1751 GstH264SliceHdr *slice_hdr
1754 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1755 GstH264SPS * const sps = get_sps(decoder);
1756 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1757 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1760 GST_DEBUG("decode picture order count type 1");
1762 if (priv->prev_pic_has_mmco5)
1763 prev_frame_num_offset = 0;
1765 prev_frame_num_offset = priv->frame_num_offset;
1768 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1769 priv->frame_num_offset = 0;
1770 else if (priv->prev_frame_num > priv->frame_num)
1771 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1773 priv->frame_num_offset = prev_frame_num_offset;
1776 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1777 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1780 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1781 abs_frame_num = abs_frame_num - 1;
1783 if (abs_frame_num > 0) {
1784 gint32 expected_delta_per_poc_cycle;
1785 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1787 expected_delta_per_poc_cycle = 0;
1788 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1789 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1792 poc_cycle_cnt = (abs_frame_num - 1) /
1793 sps->num_ref_frames_in_pic_order_cnt_cycle;
1794 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1795 sps->num_ref_frames_in_pic_order_cnt_cycle;
1798 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1799 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1800 expected_poc += sps->offset_for_ref_frame[i];
1804 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1805 expected_poc += sps->offset_for_non_ref_pic;
1808 switch (picture->structure) {
1809 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1810 priv->field_poc[TOP_FIELD] = expected_poc +
1811 slice_hdr->delta_pic_order_cnt[0];
1812 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1813 sps->offset_for_top_to_bottom_field +
1814 slice_hdr->delta_pic_order_cnt[1];
1816 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1817 priv->field_poc[TOP_FIELD] = expected_poc +
1818 slice_hdr->delta_pic_order_cnt[0];
1820 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1821 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1822 sps->offset_for_top_to_bottom_field +
1823 slice_hdr->delta_pic_order_cnt[0];
1828 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1831 GstVaapiDecoderH264 *decoder,
1832 GstVaapiPictureH264 *picture,
1833 GstH264SliceHdr *slice_hdr
1836 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1837 GstH264SPS * const sps = get_sps(decoder);
1838 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1839 gint32 prev_frame_num_offset, temp_poc;
1841 GST_DEBUG("decode picture order count type 2");
1843 if (priv->prev_pic_has_mmco5)
1844 prev_frame_num_offset = 0;
1846 prev_frame_num_offset = priv->frame_num_offset;
1849 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1850 priv->frame_num_offset = 0;
1851 else if (priv->prev_frame_num > priv->frame_num)
1852 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1854 priv->frame_num_offset = prev_frame_num_offset;
1857 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1859 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1860 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1862 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1865 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1866 priv->field_poc[TOP_FIELD] = temp_poc;
1867 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1868 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1871 /* 8.2.1 - Decoding process for picture order count */
1874 GstVaapiDecoderH264 *decoder,
1875 GstVaapiPictureH264 *picture,
1876 GstH264SliceHdr *slice_hdr
1879 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1880 GstH264SPS * const sps = get_sps(decoder);
1882 switch (sps->pic_order_cnt_type) {
1884 init_picture_poc_0(decoder, picture, slice_hdr);
1887 init_picture_poc_1(decoder, picture, slice_hdr);
1890 init_picture_poc_2(decoder, picture, slice_hdr);
1894 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1895 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1896 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1897 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1898 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1902 compare_picture_pic_num_dec(const void *a, const void *b)
1904 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1905 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1907 return picB->pic_num - picA->pic_num;
1911 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1913 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1914 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1916 return picA->long_term_pic_num - picB->long_term_pic_num;
1920 compare_picture_poc_dec(const void *a, const void *b)
1922 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1923 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1925 return picB->base.poc - picA->base.poc;
1929 compare_picture_poc_inc(const void *a, const void *b)
1931 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1932 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1934 return picA->base.poc - picB->base.poc;
1938 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1940 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1941 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1943 return picB->frame_num_wrap - picA->frame_num_wrap;
1947 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1949 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1950 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1952 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1955 /* 8.2.4.1 - Decoding process for picture numbers */
1957 init_picture_refs_pic_num(
1958 GstVaapiDecoderH264 *decoder,
1959 GstVaapiPictureH264 *picture,
1960 GstH264SliceHdr *slice_hdr
1963 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1964 GstH264SPS * const sps = get_sps(decoder);
1965 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1968 GST_DEBUG("decode picture numbers");
1970 for (i = 0; i < priv->short_ref_count; i++) {
1971 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1974 if (pic->base.view_id != picture->base.view_id)
1978 if (pic->frame_num > priv->frame_num)
1979 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1981 pic->frame_num_wrap = pic->frame_num;
1983 // (8-28, 8-30, 8-31)
1984 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1985 pic->pic_num = pic->frame_num_wrap;
1987 if (pic->structure == picture->structure)
1988 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1990 pic->pic_num = 2 * pic->frame_num_wrap;
1994 for (i = 0; i < priv->long_ref_count; i++) {
1995 GstVaapiPictureH264 * const pic = priv->long_ref[i];
1998 if (pic->base.view_id != picture->base.view_id)
2001 // (8-29, 8-32, 8-33)
2002 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2003 pic->long_term_pic_num = pic->long_term_frame_idx;
2005 if (pic->structure == picture->structure)
2006 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2008 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2013 #define SORT_REF_LIST(list, n, compare_func) \
2014 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2017 init_picture_refs_fields_1(
2018 guint picture_structure,
2019 GstVaapiPictureH264 *RefPicList[32],
2020 guint *RefPicList_count,
2021 GstVaapiPictureH264 *ref_list[32],
2022 guint ref_list_count
2029 n = *RefPicList_count;
2032 for (; i < ref_list_count; i++) {
2033 if (ref_list[i]->structure == picture_structure) {
2034 RefPicList[n++] = ref_list[i++];
2038 for (; j < ref_list_count; j++) {
2039 if (ref_list[j]->structure != picture_structure) {
2040 RefPicList[n++] = ref_list[j++];
2044 } while (i < ref_list_count || j < ref_list_count);
2045 *RefPicList_count = n;
2049 init_picture_refs_fields(
2050 GstVaapiPictureH264 *picture,
2051 GstVaapiPictureH264 *RefPicList[32],
2052 guint *RefPicList_count,
2053 GstVaapiPictureH264 *short_ref[32],
2054 guint short_ref_count,
2055 GstVaapiPictureH264 *long_ref[32],
2056 guint long_ref_count
2061 /* 8.2.4.2.5 - reference picture lists in fields */
2062 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2063 short_ref, short_ref_count);
2064 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2065 long_ref, long_ref_count);
2066 *RefPicList_count = n;
2069 /* Finds the inter-view reference picture with the supplied view id */
2070 static GstVaapiPictureH264 *
2071 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2073 GPtrArray * const inter_views = decoder->priv.inter_views;
2076 for (i = 0; i < inter_views->len; i++) {
2077 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2078 if (picture->base.view_id == view_id)
2082 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2087 /* Checks whether the view id exists in the supplied list of view ids */
2089 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2093 for (i = 0; i < num_view_ids; i++) {
2094 if (view_ids[i] == view_id)
2101 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2105 return (find_view_id(view_id, view->anchor_ref_l0,
2106 view->num_anchor_refs_l0) ||
2107 find_view_id(view_id, view->anchor_ref_l1,
2108 view->num_anchor_refs_l1));
2110 return (find_view_id(view_id, view->non_anchor_ref_l0,
2111 view->num_non_anchor_refs_l0) ||
2112 find_view_id(view_id, view->non_anchor_ref_l1,
2113 view->num_non_anchor_refs_l1));
2116 /* Checks whether the inter-view reference picture with the supplied
2117 view id is used for decoding the current view component picture */
2119 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2120 guint16 view_id, GstVaapiPictureH264 *picture)
2122 const GstH264SPS * const sps = get_sps(decoder);
2125 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2126 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2129 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2130 return find_view_id_in_view(view_id,
2131 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2134 /* Checks whether the supplied inter-view reference picture is used
2135 for decoding the next view component pictures */
2137 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2138 GstVaapiPictureH264 *picture)
2140 const GstH264SPS * const sps = get_sps(decoder);
2144 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2145 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2148 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2149 num_views = sps->extension.mvc.num_views_minus1 + 1;
2150 for (i = picture->base.voc + 1; i < num_views; i++) {
2151 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2152 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2158 /* H.8.2.1 - Initialization process for inter-view prediction references */
2160 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2161 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2162 const guint16 *view_ids, guint num_view_ids)
2166 n = *ref_list_count_ptr;
2167 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2168 GstVaapiPictureH264 * const pic =
2169 find_inter_view_reference(decoder, view_ids[j]);
2171 ref_list[n++] = pic;
2173 *ref_list_count_ptr = n;
2177 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2178 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2180 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2181 const GstH264SPS * const sps = get_sps(decoder);
2182 const GstH264SPSExtMVCView *view;
2184 GST_DEBUG("initialize reference picture list for inter-view prediction");
2186 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2188 view = &sps->extension.mvc.view[picture->base.voc];
2190 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2191 init_picture_refs_mvc_1(decoder, \
2192 priv->RefPicList##ref_list, \
2193 &priv->RefPicList##ref_list##_count, \
2194 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2195 view->view_list##_l##ref_list, \
2196 view->num_##view_list##s_l##ref_list); \
2200 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2201 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2203 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2206 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2207 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2209 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2212 #undef INVOKE_INIT_PICTURE_REFS_MVC
2216 init_picture_refs_p_slice(
2217 GstVaapiDecoderH264 *decoder,
2218 GstVaapiPictureH264 *picture,
2219 GstH264SliceHdr *slice_hdr
2222 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2223 GstVaapiPictureH264 **ref_list;
2226 GST_DEBUG("decode reference picture list for P and SP slices");
2228 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2229 /* 8.2.4.2.1 - P and SP slices in frames */
2230 if (priv->short_ref_count > 0) {
2231 ref_list = priv->RefPicList0;
2232 for (i = 0; i < priv->short_ref_count; i++)
2233 ref_list[i] = priv->short_ref[i];
2234 SORT_REF_LIST(ref_list, i, pic_num_dec);
2235 priv->RefPicList0_count += i;
2238 if (priv->long_ref_count > 0) {
2239 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2240 for (i = 0; i < priv->long_ref_count; i++)
2241 ref_list[i] = priv->long_ref[i];
2242 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2243 priv->RefPicList0_count += i;
2247 /* 8.2.4.2.2 - P and SP slices in fields */
2248 GstVaapiPictureH264 *short_ref[32];
2249 guint short_ref_count = 0;
2250 GstVaapiPictureH264 *long_ref[32];
2251 guint long_ref_count = 0;
2253 if (priv->short_ref_count > 0) {
2254 for (i = 0; i < priv->short_ref_count; i++)
2255 short_ref[i] = priv->short_ref[i];
2256 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2257 short_ref_count = i;
2260 if (priv->long_ref_count > 0) {
2261 for (i = 0; i < priv->long_ref_count; i++)
2262 long_ref[i] = priv->long_ref[i];
2263 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2267 init_picture_refs_fields(
2269 priv->RefPicList0, &priv->RefPicList0_count,
2270 short_ref, short_ref_count,
2271 long_ref, long_ref_count
2275 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2277 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2282 init_picture_refs_b_slice(
2283 GstVaapiDecoderH264 *decoder,
2284 GstVaapiPictureH264 *picture,
2285 GstH264SliceHdr *slice_hdr
2288 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2289 GstVaapiPictureH264 **ref_list;
2292 GST_DEBUG("decode reference picture list for B slices");
2294 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2295 /* 8.2.4.2.3 - B slices in frames */
2298 if (priv->short_ref_count > 0) {
2299 // 1. Short-term references
2300 ref_list = priv->RefPicList0;
2301 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2302 if (priv->short_ref[i]->base.poc < picture->base.poc)
2303 ref_list[n++] = priv->short_ref[i];
2305 SORT_REF_LIST(ref_list, n, poc_dec);
2306 priv->RefPicList0_count += n;
2308 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2309 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2310 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2311 ref_list[n++] = priv->short_ref[i];
2313 SORT_REF_LIST(ref_list, n, poc_inc);
2314 priv->RefPicList0_count += n;
2317 if (priv->long_ref_count > 0) {
2318 // 2. Long-term references
2319 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2320 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2321 ref_list[n++] = priv->long_ref[i];
2322 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2323 priv->RefPicList0_count += n;
2327 if (priv->short_ref_count > 0) {
2328 // 1. Short-term references
2329 ref_list = priv->RefPicList1;
2330 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2331 if (priv->short_ref[i]->base.poc > picture->base.poc)
2332 ref_list[n++] = priv->short_ref[i];
2334 SORT_REF_LIST(ref_list, n, poc_inc);
2335 priv->RefPicList1_count += n;
2337 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2338 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2339 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2340 ref_list[n++] = priv->short_ref[i];
2342 SORT_REF_LIST(ref_list, n, poc_dec);
2343 priv->RefPicList1_count += n;
2346 if (priv->long_ref_count > 0) {
2347 // 2. Long-term references
2348 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2349 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2350 ref_list[n++] = priv->long_ref[i];
2351 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2352 priv->RefPicList1_count += n;
2356 /* 8.2.4.2.4 - B slices in fields */
2357 GstVaapiPictureH264 *short_ref0[32];
2358 guint short_ref0_count = 0;
2359 GstVaapiPictureH264 *short_ref1[32];
2360 guint short_ref1_count = 0;
2361 GstVaapiPictureH264 *long_ref[32];
2362 guint long_ref_count = 0;
2364 /* refFrameList0ShortTerm */
2365 if (priv->short_ref_count > 0) {
2366 ref_list = short_ref0;
2367 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2368 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2369 ref_list[n++] = priv->short_ref[i];
2371 SORT_REF_LIST(ref_list, n, poc_dec);
2372 short_ref0_count += n;
2374 ref_list = &short_ref0[short_ref0_count];
2375 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2376 if (priv->short_ref[i]->base.poc > picture->base.poc)
2377 ref_list[n++] = priv->short_ref[i];
2379 SORT_REF_LIST(ref_list, n, poc_inc);
2380 short_ref0_count += n;
2383 /* refFrameList1ShortTerm */
2384 if (priv->short_ref_count > 0) {
2385 ref_list = short_ref1;
2386 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2387 if (priv->short_ref[i]->base.poc > picture->base.poc)
2388 ref_list[n++] = priv->short_ref[i];
2390 SORT_REF_LIST(ref_list, n, poc_inc);
2391 short_ref1_count += n;
2393 ref_list = &short_ref1[short_ref1_count];
2394 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2395 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2396 ref_list[n++] = priv->short_ref[i];
2398 SORT_REF_LIST(ref_list, n, poc_dec);
2399 short_ref1_count += n;
2402 /* refFrameListLongTerm */
2403 if (priv->long_ref_count > 0) {
2404 for (i = 0; i < priv->long_ref_count; i++)
2405 long_ref[i] = priv->long_ref[i];
2406 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2410 init_picture_refs_fields(
2412 priv->RefPicList0, &priv->RefPicList0_count,
2413 short_ref0, short_ref0_count,
2414 long_ref, long_ref_count
2417 init_picture_refs_fields(
2419 priv->RefPicList1, &priv->RefPicList1_count,
2420 short_ref1, short_ref1_count,
2421 long_ref, long_ref_count
2425 /* Check whether RefPicList1 is identical to RefPicList0, then
2426 swap if necessary */
2427 if (priv->RefPicList1_count > 1 &&
2428 priv->RefPicList1_count == priv->RefPicList0_count &&
2429 memcmp(priv->RefPicList0, priv->RefPicList1,
2430 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2431 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2432 priv->RefPicList1[0] = priv->RefPicList1[1];
2433 priv->RefPicList1[1] = tmp;
2436 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2438 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2441 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2445 #undef SORT_REF_LIST
2448 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2450 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2453 for (i = 0; i < priv->short_ref_count; i++) {
2454 if (priv->short_ref[i]->pic_num == pic_num)
2457 GST_ERROR("found no short-term reference picture with PicNum = %d",
2463 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2465 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2468 for (i = 0; i < priv->long_ref_count; i++) {
2469 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2472 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2478 exec_picture_refs_modification_1(
2479 GstVaapiDecoderH264 *decoder,
2480 GstVaapiPictureH264 *picture,
2481 GstH264SliceHdr *slice_hdr,
2485 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2486 GstH264SPS * const sps = get_sps(decoder);
2487 GstH264RefPicListModification *ref_pic_list_modification;
2488 guint num_ref_pic_list_modifications;
2489 GstVaapiPictureH264 **ref_list;
2490 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2491 const guint16 *view_ids = NULL;
2492 guint i, j, n, num_refs, num_view_ids = 0;
2494 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2496 GST_DEBUG("modification process of reference picture list %u", list);
2499 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2500 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2501 ref_list = priv->RefPicList0;
2502 ref_list_count_ptr = &priv->RefPicList0_count;
2503 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2505 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2506 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2507 const GstH264SPSExtMVCView * const view =
2508 &sps->extension.mvc.view[picture->base.voc];
2509 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2510 view_ids = view->anchor_ref_l0;
2511 num_view_ids = view->num_anchor_refs_l0;
2514 view_ids = view->non_anchor_ref_l0;
2515 num_view_ids = view->num_non_anchor_refs_l0;
2520 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2521 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2522 ref_list = priv->RefPicList1;
2523 ref_list_count_ptr = &priv->RefPicList1_count;
2524 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2526 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2527 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2528 const GstH264SPSExtMVCView * const view =
2529 &sps->extension.mvc.view[picture->base.voc];
2530 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2531 view_ids = view->anchor_ref_l1;
2532 num_view_ids = view->num_anchor_refs_l1;
2535 view_ids = view->non_anchor_ref_l1;
2536 num_view_ids = view->num_non_anchor_refs_l1;
2540 ref_list_count = *ref_list_count_ptr;
2542 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2543 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2544 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2547 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2548 CurrPicNum = slice_hdr->frame_num; // frame_num
2551 picNumPred = CurrPicNum;
2552 picViewIdxPred = -1;
2554 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2555 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2556 if (l->modification_of_pic_nums_idc == 3)
2559 /* 8.2.4.3.1 - Short-term reference pictures */
2560 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2561 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2562 gint32 picNum, picNumNoWrap;
2565 if (l->modification_of_pic_nums_idc == 0) {
2566 picNumNoWrap = picNumPred - abs_diff_pic_num;
2567 if (picNumNoWrap < 0)
2568 picNumNoWrap += MaxPicNum;
2573 picNumNoWrap = picNumPred + abs_diff_pic_num;
2574 if (picNumNoWrap >= MaxPicNum)
2575 picNumNoWrap -= MaxPicNum;
2577 picNumPred = picNumNoWrap;
2580 picNum = picNumNoWrap;
2581 if (picNum > CurrPicNum)
2582 picNum -= MaxPicNum;
2585 for (j = num_refs; j > ref_list_idx; j--)
2586 ref_list[j] = ref_list[j - 1];
2587 found_ref_idx = find_short_term_reference(decoder, picNum);
2588 ref_list[ref_list_idx++] =
2589 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2591 for (j = ref_list_idx; j <= num_refs; j++) {
2596 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2597 ref_list[j]->pic_num : MaxPicNum;
2598 if (PicNumF != picNum ||
2599 ref_list[j]->base.view_id != picture->base.view_id)
2600 ref_list[n++] = ref_list[j];
2604 /* 8.2.4.3.2 - Long-term reference pictures */
2605 else if (l->modification_of_pic_nums_idc == 2) {
2607 for (j = num_refs; j > ref_list_idx; j--)
2608 ref_list[j] = ref_list[j - 1];
2610 find_long_term_reference(decoder, l->value.long_term_pic_num);
2611 ref_list[ref_list_idx++] =
2612 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2614 for (j = ref_list_idx; j <= num_refs; j++) {
2615 gint32 LongTermPicNumF;
2619 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2620 ref_list[j]->long_term_pic_num : INT_MAX;
2621 if (LongTermPicNumF != l->value.long_term_pic_num ||
2622 ref_list[j]->base.view_id != picture->base.view_id)
2623 ref_list[n++] = ref_list[j];
2627 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2628 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2629 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2630 (l->modification_of_pic_nums_idc == 4 ||
2631 l->modification_of_pic_nums_idc == 5)) {
2632 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2633 gint32 picViewIdx, targetViewId;
2636 if (l->modification_of_pic_nums_idc == 4) {
2637 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2639 picViewIdx += num_view_ids;
2644 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2645 if (picViewIdx >= num_view_ids)
2646 picViewIdx -= num_view_ids;
2648 picViewIdxPred = picViewIdx;
2651 targetViewId = view_ids[picViewIdx];
2654 for (j = num_refs; j > ref_list_idx; j--)
2655 ref_list[j] = ref_list[j - 1];
2656 ref_list[ref_list_idx++] =
2657 find_inter_view_reference(decoder, targetViewId);
2659 for (j = ref_list_idx; j <= num_refs; j++) {
2662 if (ref_list[j]->base.view_id != targetViewId ||
2663 ref_list[j]->base.poc != picture->base.poc)
2664 ref_list[n++] = ref_list[j];
2670 for (i = 0; i < num_refs; i++)
2672 GST_ERROR("list %u entry %u is empty", list, i);
2674 *ref_list_count_ptr = num_refs;
2677 /* 8.2.4.3 - Modification process for reference picture lists */
2679 exec_picture_refs_modification(
2680 GstVaapiDecoderH264 *decoder,
2681 GstVaapiPictureH264 *picture,
2682 GstH264SliceHdr *slice_hdr
2685 GST_DEBUG("execute ref_pic_list_modification()");
2688 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2689 slice_hdr->ref_pic_list_modification_flag_l0)
2690 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2693 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2694 slice_hdr->ref_pic_list_modification_flag_l1)
2695 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2699 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2700 GstVaapiPictureH264 *picture)
2702 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2703 guint i, j, short_ref_count, long_ref_count;
2705 short_ref_count = 0;
2707 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2708 for (i = 0; i < priv->dpb_count; i++) {
2709 GstVaapiFrameStore * const fs = priv->dpb[i];
2710 GstVaapiPictureH264 *pic;
2711 if (!gst_vaapi_frame_store_has_frame(fs))
2713 pic = fs->buffers[0];
2714 if (pic->base.view_id != picture->base.view_id)
2716 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2717 priv->short_ref[short_ref_count++] = pic;
2718 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2719 priv->long_ref[long_ref_count++] = pic;
2720 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2721 pic->other_field = fs->buffers[1];
2725 for (i = 0; i < priv->dpb_count; i++) {
2726 GstVaapiFrameStore * const fs = priv->dpb[i];
2727 for (j = 0; j < fs->num_buffers; j++) {
2728 GstVaapiPictureH264 * const pic = fs->buffers[j];
2729 if (pic->base.view_id != picture->base.view_id)
2731 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2732 priv->short_ref[short_ref_count++] = pic;
2733 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2734 priv->long_ref[long_ref_count++] = pic;
2735 pic->structure = pic->base.structure;
2736 pic->other_field = fs->buffers[j ^ 1];
2741 for (i = short_ref_count; i < priv->short_ref_count; i++)
2742 priv->short_ref[i] = NULL;
2743 priv->short_ref_count = short_ref_count;
2745 for (i = long_ref_count; i < priv->long_ref_count; i++)
2746 priv->long_ref[i] = NULL;
2747 priv->long_ref_count = long_ref_count;
2752 GstVaapiDecoderH264 *decoder,
2753 GstVaapiPictureH264 *picture,
2754 GstH264SliceHdr *slice_hdr
2757 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2760 init_picture_ref_lists(decoder, picture);
2761 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2763 priv->RefPicList0_count = 0;
2764 priv->RefPicList1_count = 0;
2766 switch (slice_hdr->type % 5) {
2767 case GST_H264_P_SLICE:
2768 case GST_H264_SP_SLICE:
2769 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2771 case GST_H264_B_SLICE:
2772 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2778 exec_picture_refs_modification(decoder, picture, slice_hdr);
2780 switch (slice_hdr->type % 5) {
2781 case GST_H264_B_SLICE:
2782 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2783 for (i = priv->RefPicList1_count; i < num_refs; i++)
2784 priv->RefPicList1[i] = NULL;
2785 priv->RefPicList1_count = num_refs;
2788 case GST_H264_P_SLICE:
2789 case GST_H264_SP_SLICE:
2790 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2791 for (i = priv->RefPicList0_count; i < num_refs; i++)
2792 priv->RefPicList0[i] = NULL;
2793 priv->RefPicList0_count = num_refs;
2802 GstVaapiDecoderH264 *decoder,
2803 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2805 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2806 GstVaapiPicture * const base_picture = &picture->base;
2807 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2809 priv->prev_frame_num = priv->frame_num;
2810 priv->frame_num = slice_hdr->frame_num;
2811 picture->frame_num = priv->frame_num;
2812 picture->frame_num_wrap = priv->frame_num;
2813 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2814 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2815 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2816 base_picture->view_id = pi->view_id;
2817 base_picture->voc = pi->voc;
2819 /* Initialize extensions */
2820 switch (pi->nalu.extension_type) {
2821 case GST_H264_NAL_EXTENSION_MVC: {
2822 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2824 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2825 if (mvc->inter_view_flag)
2826 GST_VAAPI_PICTURE_FLAG_SET(picture,
2827 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2828 if (mvc->anchor_pic_flag)
2829 GST_VAAPI_PICTURE_FLAG_SET(picture,
2830 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2835 /* Reset decoder state for IDR pictures */
2836 if (pi->nalu.idr_pic_flag) {
2838 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2839 dpb_flush(decoder, picture);
2842 /* Initialize picture structure */
2843 if (!slice_hdr->field_pic_flag)
2844 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2846 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2847 if (!slice_hdr->bottom_field_flag)
2848 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2850 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2852 picture->structure = base_picture->structure;
2854 /* Initialize reference flags */
2855 if (pi->nalu.ref_idc) {
2856 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2857 &slice_hdr->dec_ref_pic_marking;
2859 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2860 dec_ref_pic_marking->long_term_reference_flag)
2861 GST_VAAPI_PICTURE_FLAG_SET(picture,
2862 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2864 GST_VAAPI_PICTURE_FLAG_SET(picture,
2865 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2868 init_picture_poc(decoder, picture, slice_hdr);
2872 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2874 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2876 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2877 GstH264SPS * const sps = get_sps(decoder);
2878 GstVaapiPictureH264 *ref_picture;
2879 guint i, m, max_num_ref_frames;
2881 GST_DEBUG("reference picture marking process (sliding window)");
2883 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2886 max_num_ref_frames = sps->num_ref_frames;
2887 if (max_num_ref_frames == 0)
2888 max_num_ref_frames = 1;
2889 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2890 max_num_ref_frames <<= 1;
2892 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2894 if (priv->short_ref_count < 1)
2897 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2898 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2899 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2903 ref_picture = priv->short_ref[m];
2904 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2905 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2907 /* Both fields need to be marked as "unused for reference", so
2908 remove the other field from the short_ref[] list as well */
2909 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2910 for (i = 0; i < priv->short_ref_count; i++) {
2911 if (priv->short_ref[i] == ref_picture->other_field) {
2912 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2920 static inline gint32
2921 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2925 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2926 pic_num = picture->frame_num_wrap;
2928 pic_num = 2 * picture->frame_num_wrap + 1;
2929 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2933 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2935 exec_ref_pic_marking_adaptive_mmco_1(
2936 GstVaapiDecoderH264 *decoder,
2937 GstVaapiPictureH264 *picture,
2938 GstH264RefPicMarking *ref_pic_marking
2941 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2944 picNumX = get_picNumX(picture, ref_pic_marking);
2945 i = find_short_term_reference(decoder, picNumX);
2949 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2950 GST_VAAPI_PICTURE_IS_FRAME(picture));
2951 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2954 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2956 exec_ref_pic_marking_adaptive_mmco_2(
2957 GstVaapiDecoderH264 *decoder,
2958 GstVaapiPictureH264 *picture,
2959 GstH264RefPicMarking *ref_pic_marking
2962 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2965 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2969 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2970 GST_VAAPI_PICTURE_IS_FRAME(picture));
2971 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2974 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2976 exec_ref_pic_marking_adaptive_mmco_3(
2977 GstVaapiDecoderH264 *decoder,
2978 GstVaapiPictureH264 *picture,
2979 GstH264RefPicMarking *ref_pic_marking
2982 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2983 GstVaapiPictureH264 *ref_picture, *other_field;
2986 for (i = 0; i < priv->long_ref_count; i++) {
2987 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2990 if (i != priv->long_ref_count) {
2991 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2992 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2995 picNumX = get_picNumX(picture, ref_pic_marking);
2996 i = find_short_term_reference(decoder, picNumX);
3000 ref_picture = priv->short_ref[i];
3001 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3002 priv->long_ref[priv->long_ref_count++] = ref_picture;
3004 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3005 gst_vaapi_picture_h264_set_reference(ref_picture,
3006 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3007 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3009 /* Assign LongTermFrameIdx to the other field if it was also
3010 marked as "used for long-term reference */
3011 other_field = ref_picture->other_field;
3012 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3013 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3016 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3017 * as "unused for reference" */
3019 exec_ref_pic_marking_adaptive_mmco_4(
3020 GstVaapiDecoderH264 *decoder,
3021 GstVaapiPictureH264 *picture,
3022 GstH264RefPicMarking *ref_pic_marking
3025 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3026 gint32 i, long_term_frame_idx;
3028 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3030 for (i = 0; i < priv->long_ref_count; i++) {
3031 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3033 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3034 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3039 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3041 exec_ref_pic_marking_adaptive_mmco_5(
3042 GstVaapiDecoderH264 *decoder,
3043 GstVaapiPictureH264 *picture,
3044 GstH264RefPicMarking *ref_pic_marking
3047 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3049 dpb_flush(decoder, picture);
3051 priv->prev_pic_has_mmco5 = TRUE;
3053 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3054 priv->frame_num = 0;
3055 priv->frame_num_offset = 0;
3056 picture->frame_num = 0;
3058 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3059 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3060 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3061 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3062 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3063 picture->base.poc = 0;
3066 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3068 exec_ref_pic_marking_adaptive_mmco_6(
3069 GstVaapiDecoderH264 *decoder,
3070 GstVaapiPictureH264 *picture,
3071 GstH264RefPicMarking *ref_pic_marking
3074 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3075 GstVaapiPictureH264 *other_field;
3078 for (i = 0; i < priv->long_ref_count; i++) {
3079 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3082 if (i != priv->long_ref_count) {
3083 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3084 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3087 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3088 gst_vaapi_picture_h264_set_reference(picture,
3089 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3090 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3092 /* Assign LongTermFrameIdx to the other field if it was also
3093 marked as "used for long-term reference */
3094 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3095 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3096 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3099 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3101 exec_ref_pic_marking_adaptive(
3102 GstVaapiDecoderH264 *decoder,
3103 GstVaapiPictureH264 *picture,
3104 GstH264DecRefPicMarking *dec_ref_pic_marking
3109 GST_DEBUG("reference picture marking process (adaptive memory control)");
3111 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3112 GstVaapiDecoderH264 *decoder,
3113 GstVaapiPictureH264 *picture,
3114 GstH264RefPicMarking *ref_pic_marking
3117 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3119 exec_ref_pic_marking_adaptive_mmco_1,
3120 exec_ref_pic_marking_adaptive_mmco_2,
3121 exec_ref_pic_marking_adaptive_mmco_3,
3122 exec_ref_pic_marking_adaptive_mmco_4,
3123 exec_ref_pic_marking_adaptive_mmco_5,
3124 exec_ref_pic_marking_adaptive_mmco_6,
3127 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3128 GstH264RefPicMarking * const ref_pic_marking =
3129 &dec_ref_pic_marking->ref_pic_marking[i];
3131 const guint mmco = ref_pic_marking->memory_management_control_operation;
3132 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3133 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3135 GST_ERROR("unhandled MMCO %u", mmco);
3142 /* 8.2.5 - Execute reference picture marking process */
3144 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3146 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3148 priv->prev_pic_has_mmco5 = FALSE;
3149 priv->prev_pic_structure = picture->structure;
3151 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3152 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3154 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3157 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3158 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3159 &picture->last_slice_hdr->dec_ref_pic_marking;
3160 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3161 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3165 if (!exec_ref_pic_marking_sliding_window(decoder))
3173 vaapi_init_picture(VAPictureH264 *pic)
3175 pic->picture_id = VA_INVALID_ID;
3177 pic->flags = VA_PICTURE_H264_INVALID;
3178 pic->TopFieldOrderCnt = 0;
3179 pic->BottomFieldOrderCnt = 0;
3183 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3184 guint picture_structure)
3186 if (!picture_structure)
3187 picture_structure = picture->structure;
3189 pic->picture_id = picture->base.surface_id;
3192 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3193 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3194 pic->frame_idx = picture->long_term_frame_idx;
3197 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3198 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3199 pic->frame_idx = picture->frame_num;
3202 switch (picture_structure) {
3203 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3204 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3205 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3207 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3208 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3209 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3210 pic->BottomFieldOrderCnt = 0;
3212 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3213 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3214 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3215 pic->TopFieldOrderCnt = 0;
3221 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3222 GstVaapiPictureH264 *picture)
3224 vaapi_fill_picture(pic, picture, 0);
3226 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3227 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3228 /* The inter-view reference components and inter-view only
3229 reference components that are included in the reference
3230 picture lists are considered as not being marked as "used for
3231 short-term reference" or "used for long-term reference" */
3232 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3233 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3238 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3240 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3241 GstVaapiPicture * const base_picture = &picture->base;
3242 GstH264PPS * const pps = get_pps(decoder);
3243 GstH264SPS * const sps = get_sps(decoder);
3244 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3247 /* Fill in VAPictureParameterBufferH264 */
3248 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3250 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3251 GstVaapiFrameStore * const fs = priv->dpb[i];
3252 if ((gst_vaapi_frame_store_has_reference(fs) &&
3253 fs->view_id == picture->base.view_id) ||
3254 (gst_vaapi_frame_store_has_inter_view(fs) &&
3255 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3256 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3257 fs->buffers[0], fs->structure);
3258 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3261 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3262 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3264 #define COPY_FIELD(s, f) \
3265 pic_param->f = (s)->f
3267 #define COPY_BFM(a, s, f) \
3268 pic_param->a.bits.f = (s)->f
3270 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3271 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3272 pic_param->frame_num = priv->frame_num;
3274 COPY_FIELD(sps, bit_depth_luma_minus8);
3275 COPY_FIELD(sps, bit_depth_chroma_minus8);
3276 COPY_FIELD(sps, num_ref_frames);
3277 COPY_FIELD(pps, num_slice_groups_minus1);
3278 COPY_FIELD(pps, slice_group_map_type);
3279 COPY_FIELD(pps, slice_group_change_rate_minus1);
3280 COPY_FIELD(pps, pic_init_qp_minus26);
3281 COPY_FIELD(pps, pic_init_qs_minus26);
3282 COPY_FIELD(pps, chroma_qp_index_offset);
3283 COPY_FIELD(pps, second_chroma_qp_index_offset);
3285 pic_param->seq_fields.value = 0; /* reset all bits */
3286 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3287 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3289 COPY_BFM(seq_fields, sps, chroma_format_idc);
3290 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3291 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3292 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3293 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3294 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3295 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3296 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3297 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3299 pic_param->pic_fields.value = 0; /* reset all bits */
3300 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3301 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3303 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3304 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3305 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3306 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3307 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3308 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3309 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3310 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3314 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3316 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3318 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3319 GstH264PPS * const pps = slice_hdr->pps;
3320 GstH264SPS * const sps = pps->sequence;
3321 GstH264SliceHdr *prev_slice_hdr;
3325 prev_slice_hdr = &prev_pi->data.slice_hdr;
3327 #define CHECK_EXPR(expr, field_name) do { \
3329 GST_DEBUG(field_name " differs in value"); \
3334 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3335 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3337 /* view_id differs in value and VOIdx of current slice_hdr is less
3338 than the VOIdx of the prev_slice_hdr */
3339 CHECK_VALUE(pi, prev_pi, view_id);
3341 /* frame_num differs in value, regardless of inferred values to 0 */
3342 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3344 /* pic_parameter_set_id differs in value */
3345 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3347 /* field_pic_flag differs in value */
3348 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3350 /* bottom_field_flag is present in both and differs in value */
3351 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3352 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3354 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3355 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3356 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3358 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3359 value or delta_pic_order_cnt_bottom differs in value */
3360 if (sps->pic_order_cnt_type == 0) {
3361 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3362 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3363 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3366 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3367 differs in value or delta_pic_order_cnt[1] differs in value */
3368 else if (sps->pic_order_cnt_type == 1) {
3369 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3370 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3373 /* IdrPicFlag differs in value */
3374 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3376 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3377 if (pi->nalu.idr_pic_flag)
3378 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3385 /* Detection of a new access unit, assuming we are already in presence
3387 static inline gboolean
3388 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3390 if (!prev_pi || prev_pi->view_id == pi->view_id)
3392 return pi->voc < prev_pi->voc;
3395 /* Finds the first field picture corresponding to the supplied picture */
3396 static GstVaapiPictureH264 *
3397 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3399 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3400 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3401 GstVaapiFrameStore *fs;
3403 if (!slice_hdr->field_pic_flag)
3406 fs = priv->prev_frames[pi->voc];
3407 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3410 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3411 return fs->buffers[0];
3415 static GstVaapiDecoderStatus
3416 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3418 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3419 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3420 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3421 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3422 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3423 GstVaapiPictureH264 *picture, *first_field;
3424 GstVaapiDecoderStatus status;
3426 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3427 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3429 /* Only decode base stream for MVC */
3430 switch (sps->profile_idc) {
3431 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3432 case GST_H264_PROFILE_STEREO_HIGH:
3434 GST_DEBUG("drop picture from substream");
3435 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3440 status = ensure_context(decoder, sps);
3441 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3444 priv->decoder_state = 0;
3446 first_field = find_first_field(decoder, pi);
3448 /* Re-use current picture where the first field was decoded */
3449 picture = gst_vaapi_picture_h264_new_field(first_field);
3451 GST_ERROR("failed to allocate field picture");
3452 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3456 /* Create new picture */
3457 picture = gst_vaapi_picture_h264_new(decoder);
3459 GST_ERROR("failed to allocate picture");
3460 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3463 gst_vaapi_picture_replace(&priv->current_picture, picture);
3464 gst_vaapi_picture_unref(picture);
3466 /* Clear inter-view references list if this is the primary coded
3467 picture of the current access unit */
3468 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3469 g_ptr_array_set_size(priv->inter_views, 0);
3471 /* Update cropping rectangle */
3472 if (sps->frame_cropping_flag) {
3473 GstVaapiRectangle crop_rect;
3474 crop_rect.x = sps->crop_rect_x;
3475 crop_rect.y = sps->crop_rect_y;
3476 crop_rect.width = sps->crop_rect_width;
3477 crop_rect.height = sps->crop_rect_height;
3478 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3481 status = ensure_quant_matrix(decoder, picture);
3482 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3483 GST_ERROR("failed to reset quantizer matrix");
3487 if (!init_picture(decoder, picture, pi))
3488 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3489 if (!fill_picture(decoder, picture))
3490 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3492 priv->decoder_state = pi->state;
3493 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3497 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3501 epb_count = slice_hdr->n_emulation_prevention_bytes;
3502 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3506 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3507 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3509 VASliceParameterBufferH264 * const slice_param = slice->param;
3510 GstH264PPS * const pps = get_pps(decoder);
3511 GstH264SPS * const sps = get_sps(decoder);
3512 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3513 guint num_weight_tables = 0;
3516 if (pps->weighted_pred_flag &&
3517 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3518 num_weight_tables = 1;
3519 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3520 num_weight_tables = 2;
3522 num_weight_tables = 0;
3524 slice_param->luma_log2_weight_denom = 0;
3525 slice_param->chroma_log2_weight_denom = 0;
3526 slice_param->luma_weight_l0_flag = 0;
3527 slice_param->chroma_weight_l0_flag = 0;
3528 slice_param->luma_weight_l1_flag = 0;
3529 slice_param->chroma_weight_l1_flag = 0;
3531 if (num_weight_tables < 1)
3534 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3535 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3537 slice_param->luma_weight_l0_flag = 1;
3538 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3539 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3540 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3543 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3544 if (slice_param->chroma_weight_l0_flag) {
3545 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3546 for (j = 0; j < 2; j++) {
3547 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3548 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3553 if (num_weight_tables < 2)
3556 slice_param->luma_weight_l1_flag = 1;
3557 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3558 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3559 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3562 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3563 if (slice_param->chroma_weight_l1_flag) {
3564 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3565 for (j = 0; j < 2; j++) {
3566 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3567 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3575 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3576 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3578 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3579 VASliceParameterBufferH264 * const slice_param = slice->param;
3580 guint i, num_ref_lists = 0;
3582 slice_param->num_ref_idx_l0_active_minus1 = 0;
3583 slice_param->num_ref_idx_l1_active_minus1 = 0;
3585 if (GST_H264_IS_B_SLICE(slice_hdr))
3587 else if (GST_H264_IS_I_SLICE(slice_hdr))
3592 if (num_ref_lists < 1)
3595 slice_param->num_ref_idx_l0_active_minus1 =
3596 slice_hdr->num_ref_idx_l0_active_minus1;
3598 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3599 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3600 priv->RefPicList0[i]);
3601 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3602 vaapi_init_picture(&slice_param->RefPicList0[i]);
3604 if (num_ref_lists < 2)
3607 slice_param->num_ref_idx_l1_active_minus1 =
3608 slice_hdr->num_ref_idx_l1_active_minus1;
3610 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3611 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3612 priv->RefPicList1[i]);
3613 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3614 vaapi_init_picture(&slice_param->RefPicList1[i]);
3619 fill_slice(GstVaapiDecoderH264 *decoder,
3620 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3622 VASliceParameterBufferH264 * const slice_param = slice->param;
3623 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3625 /* Fill in VASliceParameterBufferH264 */
3626 slice_param->slice_data_bit_offset =
3627 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3628 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3629 slice_param->slice_type = slice_hdr->type % 5;
3630 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3631 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3632 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3633 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3634 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3635 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3637 if (!fill_RefPicList(decoder, slice, slice_hdr))
3639 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3644 static GstVaapiDecoderStatus
3645 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3647 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3648 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3649 GstVaapiPictureH264 * const picture = priv->current_picture;
3650 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3651 GstVaapiSlice *slice;
3652 GstBuffer * const buffer =
3653 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3654 GstMapInfo map_info;
3656 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3658 if (!is_valid_state(pi->state,
3659 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3660 GST_WARNING("failed to receive enough headers to decode slice");
3661 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3664 if (!ensure_pps(decoder, slice_hdr->pps)) {
3665 GST_ERROR("failed to activate PPS");
3666 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3669 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3670 GST_ERROR("failed to activate SPS");
3671 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3674 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3675 GST_ERROR("failed to map buffer");
3676 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3679 /* Check wether this is the first/last slice in the current access unit */
3680 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3681 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3682 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3683 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3685 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3686 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3687 gst_buffer_unmap(buffer, &map_info);
3689 GST_ERROR("failed to allocate slice");
3690 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3693 init_picture_refs(decoder, picture, slice_hdr);
3694 if (!fill_slice(decoder, slice, pi)) {
3695 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3696 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3699 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3700 picture->last_slice_hdr = slice_hdr;
3701 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3702 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3706 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3708 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3709 0xffffff00, 0x00000100,
3714 static GstVaapiDecoderStatus
3715 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3717 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3718 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3719 GstVaapiDecoderStatus status;
3721 priv->decoder_state |= pi->state;
3722 switch (pi->nalu.type) {
3723 case GST_H264_NAL_SPS:
3724 status = decode_sps(decoder, unit);
3726 case GST_H264_NAL_SUBSET_SPS:
3727 status = decode_subset_sps(decoder, unit);
3729 case GST_H264_NAL_PPS:
3730 status = decode_pps(decoder, unit);
3732 case GST_H264_NAL_SLICE_EXT:
3733 case GST_H264_NAL_SLICE_IDR:
3734 /* fall-through. IDR specifics are handled in init_picture() */
3735 case GST_H264_NAL_SLICE:
3736 status = decode_slice(decoder, unit);
3738 case GST_H264_NAL_SEQ_END:
3739 case GST_H264_NAL_STREAM_END:
3740 status = decode_sequence_end(decoder);
3742 case GST_H264_NAL_SEI:
3743 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3746 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3747 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3753 static GstVaapiDecoderStatus
3754 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3755 const guchar *buf, guint buf_size)
3757 GstVaapiDecoderH264 * const decoder =
3758 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3759 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3760 GstVaapiDecoderStatus status;
3761 GstVaapiDecoderUnit unit;
3762 GstVaapiParserInfoH264 *pi = NULL;
3763 GstH264ParserResult result;
3764 guint i, ofs, num_sps, num_pps;
3766 unit.parsed_info = NULL;
3769 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3772 GST_ERROR("failed to decode codec-data, not in avcC format");
3773 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3776 priv->nal_length_size = (buf[4] & 0x03) + 1;
3778 num_sps = buf[5] & 0x1f;
3781 for (i = 0; i < num_sps; i++) {
3782 pi = gst_vaapi_parser_info_h264_new();
3784 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3785 unit.parsed_info = pi;
3787 result = gst_h264_parser_identify_nalu_avc(
3789 buf, ofs, buf_size, 2,
3792 if (result != GST_H264_PARSER_OK) {
3793 status = get_status(result);
3797 status = parse_sps(decoder, &unit);
3798 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3800 ofs = pi->nalu.offset + pi->nalu.size;
3802 status = decode_sps(decoder, &unit);
3803 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3805 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3811 for (i = 0; i < num_pps; i++) {
3812 pi = gst_vaapi_parser_info_h264_new();
3814 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3815 unit.parsed_info = pi;
3817 result = gst_h264_parser_identify_nalu_avc(
3819 buf, ofs, buf_size, 2,
3822 if (result != GST_H264_PARSER_OK) {
3823 status = get_status(result);
3827 status = parse_pps(decoder, &unit);
3828 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3830 ofs = pi->nalu.offset + pi->nalu.size;
3832 status = decode_pps(decoder, &unit);
3833 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3835 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3838 priv->is_avcC = TRUE;
3839 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3842 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3846 static GstVaapiDecoderStatus
3847 ensure_decoder(GstVaapiDecoderH264 *decoder)
3849 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3850 GstVaapiDecoderStatus status;
3852 if (!priv->is_opened) {
3853 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3854 if (!priv->is_opened)
3855 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3857 status = gst_vaapi_decoder_decode_codec_data(
3858 GST_VAAPI_DECODER_CAST(decoder));
3859 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3862 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3865 static GstVaapiDecoderStatus
3866 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3867 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3869 GstVaapiDecoderH264 * const decoder =
3870 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3871 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3872 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3873 GstVaapiParserInfoH264 *pi;
3874 GstVaapiDecoderStatus status;
3875 GstH264ParserResult result;
3877 guint i, size, buf_size, nalu_size, flags;
3881 status = ensure_decoder(decoder);
3882 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3885 switch (priv->stream_alignment) {
3886 case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3887 size = gst_adapter_available_fast(adapter);
3890 size = gst_adapter_available(adapter);
3894 if (priv->is_avcC) {
3895 if (size < priv->nal_length_size)
3896 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3898 buf = (guchar *)&start_code;
3899 g_assert(priv->nal_length_size <= sizeof(start_code));
3900 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3903 for (i = 0; i < priv->nal_length_size; i++)
3904 nalu_size = (nalu_size << 8) | buf[i];
3906 buf_size = priv->nal_length_size + nalu_size;
3907 if (size < buf_size)
3908 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3912 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3914 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3917 ofs = scan_for_start_code(adapter, 0, size, NULL);
3919 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3922 gst_adapter_flush(adapter, ofs);
3926 ofs2 = ps->input_offset2 - ofs - 4;
3930 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3931 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3933 // Assume the whole NAL unit is present if end-of-stream
3935 ps->input_offset2 = size;
3936 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3943 ps->input_offset2 = 0;
3945 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3947 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3949 unit->size = buf_size;
3951 pi = gst_vaapi_parser_info_h264_new();
3953 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3955 gst_vaapi_decoder_unit_set_parsed_info(unit,
3956 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3959 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3960 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3962 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3963 buf, 0, buf_size, &pi->nalu);
3964 status = get_status(result);
3965 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3968 switch (pi->nalu.type) {
3969 case GST_H264_NAL_SPS:
3970 status = parse_sps(decoder, unit);
3972 case GST_H264_NAL_SUBSET_SPS:
3973 status = parse_subset_sps(decoder, unit);
3975 case GST_H264_NAL_PPS:
3976 status = parse_pps(decoder, unit);
3978 case GST_H264_NAL_SEI:
3979 status = parse_sei(decoder, unit);
3981 case GST_H264_NAL_SLICE_EXT:
3982 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3983 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3987 case GST_H264_NAL_SLICE_IDR:
3988 case GST_H264_NAL_SLICE:
3989 status = parse_slice(decoder, unit);
3992 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3995 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3999 switch (pi->nalu.type) {
4000 case GST_H264_NAL_AU_DELIMITER:
4001 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4002 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4004 case GST_H264_NAL_FILLER_DATA:
4005 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4007 case GST_H264_NAL_STREAM_END:
4008 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4010 case GST_H264_NAL_SEQ_END:
4011 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4012 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4014 case GST_H264_NAL_SPS:
4015 case GST_H264_NAL_SUBSET_SPS:
4016 case GST_H264_NAL_PPS:
4017 case GST_H264_NAL_SEI:
4018 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4019 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4021 case GST_H264_NAL_SLICE_EXT:
4022 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4023 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4027 case GST_H264_NAL_SLICE_IDR:
4028 case GST_H264_NAL_SLICE:
4029 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4030 if (is_new_picture(pi, priv->prev_slice_pi)) {
4031 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4032 if (is_new_access_unit(pi, priv->prev_slice_pi))
4033 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4035 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4037 case GST_H264_NAL_SPS_EXT:
4038 case GST_H264_NAL_SLICE_AUX:
4039 /* skip SPS extension and auxiliary slice for now */
4040 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4042 case GST_H264_NAL_PREFIX_UNIT:
4043 /* skip Prefix NAL units for now */
4044 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4045 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4046 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4049 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4050 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4051 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4054 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4055 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4056 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4058 pi->nalu.data = NULL;
4059 pi->state = priv->parser_state;
4061 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4062 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4065 static GstVaapiDecoderStatus
4066 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4067 GstVaapiDecoderUnit *unit)
4069 GstVaapiDecoderH264 * const decoder =
4070 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4071 GstVaapiDecoderStatus status;
4073 status = ensure_decoder(decoder);
4074 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4076 return decode_unit(decoder, unit);
4079 static GstVaapiDecoderStatus
4080 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4081 GstVaapiDecoderUnit *unit)
4083 GstVaapiDecoderH264 * const decoder =
4084 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4086 return decode_picture(decoder, unit);
4089 static GstVaapiDecoderStatus
4090 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4092 GstVaapiDecoderH264 * const decoder =
4093 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4095 return decode_current_picture(decoder);
4098 static GstVaapiDecoderStatus
4099 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4101 GstVaapiDecoderH264 * const decoder =
4102 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4104 dpb_flush(decoder, NULL);
4105 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4109 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4111 GstVaapiMiniObjectClass * const object_class =
4112 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4113 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4115 object_class->size = sizeof(GstVaapiDecoderH264);
4116 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4118 decoder_class->create = gst_vaapi_decoder_h264_create;
4119 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4120 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4121 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4122 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4123 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4124 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4126 decoder_class->decode_codec_data =
4127 gst_vaapi_decoder_h264_decode_codec_data;
4130 static inline const GstVaapiDecoderClass *
4131 gst_vaapi_decoder_h264_class(void)
4133 static GstVaapiDecoderH264Class g_class;
4134 static gsize g_class_init = FALSE;
4136 if (g_once_init_enter(&g_class_init)) {
4137 gst_vaapi_decoder_h264_class_init(&g_class);
4138 g_once_init_leave(&g_class_init, TRUE);
4140 return GST_VAAPI_DECODER_CLASS(&g_class);
4144 * gst_vaapi_decoder_h264_set_alignment:
4145 * @decoder: a #GstVaapiDecoderH264
4146 * @alignment: the #GstVaapiStreamAlignH264
4148 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4149 * of each buffer that is supplied to the decoder. This could be no
4150 * specific alignment, NAL unit boundaries, or access unit boundaries.
4153 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4154 GstVaapiStreamAlignH264 alignment)
4156 g_return_if_fail(decoder != NULL);
4158 decoder->priv.stream_alignment = alignment;
4162 * gst_vaapi_decoder_h264_new:
4163 * @display: a #GstVaapiDisplay
4164 * @caps: a #GstCaps holding codec information
4166 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4167 * hold extra information like codec-data and pictured coded size.
4169 * Return value: the newly allocated #GstVaapiDecoder object
4172 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4174 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);