2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
40 #include "gstvaapidebug.h"
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
45 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
52 // Used for field_poc[]
54 #define BOTTOM_FIELD 1
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info --- */
58 /* ------------------------------------------------------------------------- */
61 * Extended decoder unit flags:
63 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74 GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
76 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82 ((GstVaapiParserInfoH264 *)(obj))
84 struct _GstVaapiParserInfoH264 {
85 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
102 switch (pi->nalu.type) {
103 case GST_H264_NAL_SPS:
104 case GST_H264_NAL_SUBSET_SPS:
105 gst_h264_sps_clear(&pi->data.sps);
107 case GST_H264_NAL_SEI:
109 g_array_unref(pi->data.sei);
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
119 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120 .size = sizeof(GstVaapiParserInfoH264),
121 .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
123 return &GstVaapiParserInfoH264Class;
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
129 return (GstVaapiParserInfoH264 *)
130 gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
140 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
141 (GstVaapiMiniObject *)(new_pi))
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures --- */
145 /* ------------------------------------------------------------------------- */
148 * Extended picture flags:
150 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152 * may be used for inter-view prediction
153 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154 * i.e. a picture that is decoded with only inter-view prediction,
155 * and not inter prediction
156 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
158 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
160 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161 * "used for short-term reference"
162 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163 * "used for long-term reference"
164 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165 * reference picture (short-term reference or long-term reference)
168 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
175 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176 GST_VAAPI_PICTURE_FLAG_REFERENCE),
177 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178 GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179 GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
188 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
189 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
190 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
193 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
194 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
195 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
203 #define GST_VAAPI_PICTURE_H264(picture) \
204 ((GstVaapiPictureH264 *)(picture))
206 struct _GstVaapiPictureH264 {
207 GstVaapiPicture base;
208 GstH264SliceHdr *last_slice_hdr;
211 gint32 frame_num; // Original frame_num from slice_header()
212 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
213 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
214 gint32 pic_num; // Temporary for ref pic marking: PicNum
215 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
216 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
217 guint output_flag : 1;
218 guint output_needed : 1;
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
226 gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 gst_vaapi_picture_h264_create(
231 GstVaapiPictureH264 *picture,
232 const GstVaapiCodecObjectConstructorArgs *args
235 if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
238 picture->field_poc[0] = G_MAXINT32;
239 picture->field_poc[1] = G_MAXINT32;
240 picture->output_needed = FALSE;
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
247 return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248 &GstVaapiPictureH264Class,
249 GST_VAAPI_CODEC_BASE(decoder),
250 NULL, sizeof(VAPictureParameterBufferH264),
256 gst_vaapi_picture_h264_set_reference(
257 GstVaapiPictureH264 *picture,
258 guint reference_flags,
264 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
267 if (!other_field || !(picture = picture->other_field))
269 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270 GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
276 g_return_val_if_fail(picture, NULL);
278 return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB) --- */
283 /* ------------------------------------------------------------------------- */
285 struct _GstVaapiFrameStore {
287 GstVaapiMiniObject parent_instance;
291 GstVaapiPictureH264 *buffers[2];
297 gst_vaapi_frame_store_finalize(gpointer object)
299 GstVaapiFrameStore * const fs = object;
302 for (i = 0; i < fs->num_buffers; i++)
303 gst_vaapi_picture_replace(&fs->buffers[i], NULL);
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
309 GstVaapiFrameStore *fs;
311 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312 sizeof(GstVaapiFrameStore),
313 gst_vaapi_frame_store_finalize
316 fs = (GstVaapiFrameStore *)
317 gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321 fs->view_id = picture->base.view_id;
322 fs->structure = picture->structure;
323 fs->buffers[0] = gst_vaapi_picture_ref(picture);
324 fs->buffers[1] = NULL;
326 fs->output_needed = picture->output_needed;
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
335 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337 g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
339 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340 if (picture->output_flag) {
341 picture->output_needed = TRUE;
345 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
347 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348 TOP_FIELD : BOTTOM_FIELD;
349 g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351 g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
359 GstVaapiPictureH264 * const first_field = fs->buffers[0];
360 GstVaapiPictureH264 *second_field;
362 g_return_val_if_fail(fs->num_buffers == 1, FALSE);
364 first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365 GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
367 second_field = gst_vaapi_picture_h264_new_field(first_field);
370 gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371 gst_vaapi_picture_unref(second_field);
373 second_field->frame_num = first_field->frame_num;
374 second_field->field_poc[0] = first_field->field_poc[0];
375 second_field->field_poc[1] = first_field->field_poc[1];
376 second_field->output_flag = first_field->output_flag;
377 if (second_field->output_flag) {
378 second_field->output_needed = TRUE;
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
387 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
395 for (i = 0; i < fs->num_buffers; i++) {
396 if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
407 for (i = 0; i < fs->num_buffers; i++) {
408 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
414 #define gst_vaapi_frame_store_ref(fs) \
415 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
417 #define gst_vaapi_frame_store_unref(fs) \
418 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
421 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
422 (GstVaapiMiniObject *)(new_fs))
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder --- */
426 /* ------------------------------------------------------------------------- */
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429 ((GstVaapiDecoderH264 *)(decoder))
432 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
433 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
434 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
436 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437 GST_H264_VIDEO_STATE_GOT_SPS |
438 GST_H264_VIDEO_STATE_GOT_PPS),
439 GST_H264_VIDEO_STATE_VALID_PICTURE = (
440 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441 GST_H264_VIDEO_STATE_GOT_SLICE)
444 struct _GstVaapiDecoderH264Private {
445 GstH264NalParser *parser;
448 GstVaapiStreamAlignH264 stream_alignment;
449 GstVaapiPictureH264 *current_picture;
450 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
451 GstVaapiParserInfoH264 *active_sps;
452 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
453 GstVaapiParserInfoH264 *active_pps;
454 GstVaapiParserInfoH264 *prev_pi;
455 GstVaapiParserInfoH264 *prev_slice_pi;
456 GstVaapiFrameStore **prev_frames;
457 guint prev_frames_alloc;
458 GstVaapiFrameStore **dpb;
463 GstVaapiProfile profile;
464 GstVaapiEntrypoint entrypoint;
465 GstVaapiChromaType chroma_type;
466 GPtrArray *inter_views;
467 GstVaapiPictureH264 *short_ref[32];
468 guint short_ref_count;
469 GstVaapiPictureH264 *long_ref[32];
470 guint long_ref_count;
471 GstVaapiPictureH264 *RefPicList0[32];
472 guint RefPicList0_count;
473 GstVaapiPictureH264 *RefPicList1[32];
474 guint RefPicList1_count;
475 guint nal_length_size;
478 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479 gint32 poc_msb; // PicOrderCntMsb
480 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
481 gint32 prev_poc_msb; // prevPicOrderCntMsb
482 gint32 prev_poc_lsb; // prevPicOrderCntLsb
483 gint32 frame_num_offset; // FrameNumOffset
484 gint32 frame_num; // frame_num (from slice_header())
485 gint32 prev_frame_num; // prevFrameNum
486 gboolean prev_pic_has_mmco5; // prevMmco5Pic
487 gboolean prev_pic_structure; // previous picture structure
490 guint has_context : 1;
491 guint progressive_sequence : 1;
495 * GstVaapiDecoderH264:
497 * A decoder based on H264.
499 struct _GstVaapiDecoderH264 {
501 GstVaapiDecoder parent_instance;
502 GstVaapiDecoderH264Private priv;
506 * GstVaapiDecoderH264Class:
508 * A decoder class based on H264.
510 struct _GstVaapiDecoderH264Class {
512 GstVaapiDecoderClass parent_class;
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520 GstVaapiPictureH264 *picture);
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524 GstVaapiFrameStore *fs)
526 return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
529 /* Determines if the supplied profile is one of the MVC set */
531 is_mvc_profile(GstH264Profile profile)
533 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534 profile == GST_H264_PROFILE_STEREO_HIGH;
537 /* Determines the view_id from the supplied NAL unit */
539 get_view_id(GstH264NalUnit *nalu)
541 return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
544 /* Determines the view order index (VOIdx) from the supplied view_id */
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
548 GstH264SPSExtMVC *mvc;
551 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
554 mvc = &sps->extension.mvc;
555 for (i = 0; i <= mvc->num_views_minus1; i++) {
556 if (mvc->view[i].view_id == view_id)
559 GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
563 /* Determines NumViews */
565 get_num_views(GstH264SPS *sps)
567 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568 sps->extension.mvc.num_views_minus1 : 0);
571 /* Get number of reference frames to use */
573 get_max_dec_frame_buffering(GstH264SPS *sps)
575 guint num_views, max_dpb_frames;
576 guint max_dec_frame_buffering, PicSizeMbs;
577 GstVaapiLevelH264 level;
578 const GstVaapiH264LevelLimits *level_limits;
580 /* Table A-1 - Level limits */
581 if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582 level = GST_VAAPI_LEVEL_H264_L1b;
584 level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585 level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586 if (G_UNLIKELY(!level_limits)) {
587 GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588 max_dec_frame_buffering = 16;
591 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592 (sps->pic_height_in_map_units_minus1 + 1) *
593 (sps->frame_mbs_only_flag ? 1 : 2));
594 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
596 if (is_mvc_profile(sps->profile_idc))
597 max_dec_frame_buffering <<= 1;
600 if (sps->vui_parameters_present_flag) {
601 GstH264VUIParams * const vui_params = &sps->vui_parameters;
602 if (vui_params->bitstream_restriction_flag)
603 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
605 switch (sps->profile_idc) {
606 case 44: // CAVLC 4:4:4 Intra profile
607 case GST_H264_PROFILE_SCALABLE_HIGH:
608 case GST_H264_PROFILE_HIGH:
609 case GST_H264_PROFILE_HIGH10:
610 case GST_H264_PROFILE_HIGH_422:
611 case GST_H264_PROFILE_HIGH_444:
612 if (sps->constraint_set3_flag)
613 max_dec_frame_buffering = 0;
619 num_views = get_num_views(sps);
620 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621 if (max_dec_frame_buffering > max_dpb_frames)
622 max_dec_frame_buffering = max_dpb_frames;
623 else if (max_dec_frame_buffering < sps->num_ref_frames)
624 max_dec_frame_buffering = sps->num_ref_frames;
625 return MAX(1, max_dec_frame_buffering);
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
631 gpointer * const entries = array;
632 guint num_entries = *array_length_ptr;
634 g_return_if_fail(index < num_entries);
636 if (index != --num_entries)
637 entries[index] = entries[num_entries];
638 entries[num_entries] = NULL;
639 *array_length_ptr = num_entries;
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
646 array_remove_index_fast(array, array_length_ptr, index);
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
652 gpointer * const entries = array;
653 const guint num_entries = *array_length_ptr - 1;
656 g_return_if_fail(index <= num_entries);
658 for (i = index; i < num_entries; i++)
659 entries[i] = entries[i + 1];
660 entries[num_entries] = NULL;
661 *array_length_ptr = num_entries;
665 #define ARRAY_REMOVE_INDEX(array, index) \
666 array_remove_index(array, &array##_count, index)
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
671 GstVaapiDecoderH264Private * const priv = &decoder->priv;
672 guint i, num_frames = --priv->dpb_count;
674 if (USE_STRICT_DPB_ORDERING) {
675 for (i = index; i < num_frames; i++)
676 gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
678 else if (index != num_frames)
679 gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680 gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
685 GstVaapiDecoderH264 *decoder,
686 GstVaapiFrameStore *fs,
687 GstVaapiPictureH264 *picture
690 picture->output_needed = FALSE;
693 if (--fs->output_needed > 0)
695 picture = fs->buffers[0];
697 return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
703 GstVaapiDecoderH264Private * const priv = &decoder->priv;
704 GstVaapiFrameStore * const fs = priv->dpb[i];
706 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707 dpb_remove_index(decoder, i);
710 /* Finds the frame store holding the supplied picture */
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
714 GstVaapiDecoderH264Private * const priv = &decoder->priv;
717 for (i = 0; i < priv->dpb_count; i++) {
718 GstVaapiFrameStore * const fs = priv->dpb[i];
719 for (j = 0; j < fs->num_buffers; j++) {
720 if (fs->buffers[j] == picture)
727 /* Finds the picture with the lowest POC that needs to be output */
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730 GstVaapiPictureH264 **found_picture_ptr)
732 GstVaapiDecoderH264Private * const priv = &decoder->priv;
733 GstVaapiPictureH264 *found_picture = NULL;
734 guint i, j, found_index;
736 for (i = 0; i < priv->dpb_count; i++) {
737 GstVaapiFrameStore * const fs = priv->dpb[i];
738 if (!fs->output_needed)
740 if (picture && picture->base.view_id != fs->view_id)
742 for (j = 0; j < fs->num_buffers; j++) {
743 GstVaapiPictureH264 * const pic = fs->buffers[j];
744 if (!pic->output_needed)
746 if (!found_picture || found_picture->base.poc > pic->base.poc ||
747 (found_picture->base.poc == pic->base.poc &&
748 found_picture->base.voc > pic->base.voc))
749 found_picture = pic, found_index = i;
753 if (found_picture_ptr)
754 *found_picture_ptr = found_picture;
755 return found_picture ? found_index : -1;
758 /* Finds the picture with the lowest VOC that needs to be output */
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761 GstVaapiPictureH264 **found_picture_ptr)
763 GstVaapiDecoderH264Private * const priv = &decoder->priv;
764 GstVaapiPictureH264 *found_picture = NULL;
765 guint i, j, found_index;
767 for (i = 0; i < priv->dpb_count; i++) {
768 GstVaapiFrameStore * const fs = priv->dpb[i];
769 if (!fs->output_needed || fs->view_id == picture->base.view_id)
771 for (j = 0; j < fs->num_buffers; j++) {
772 GstVaapiPictureH264 * const pic = fs->buffers[j];
773 if (!pic->output_needed || pic->base.poc != picture->base.poc)
775 if (!found_picture || found_picture->base.voc > pic->base.voc)
776 found_picture = pic, found_index = i;
780 if (found_picture_ptr)
781 *found_picture_ptr = found_picture;
782 return found_picture ? found_index : -1;
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787 GstVaapiPictureH264 *picture, guint voc)
789 GstVaapiDecoderH264Private * const priv = &decoder->priv;
790 GstVaapiPictureH264 *found_picture;
794 if (priv->max_views == 1)
797 /* Emit all other view components that were in the same access
798 unit than the picture we have just found */
799 found_picture = picture;
801 found_index = dpb_find_lowest_voc(decoder, found_picture,
803 if (found_index < 0 || found_picture->base.voc >= voc)
805 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806 dpb_evict(decoder, found_picture, found_index);
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
816 GstVaapiDecoderH264Private * const priv = &decoder->priv;
817 GstVaapiPictureH264 *found_picture;
821 found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
825 if (picture && picture->base.poc != found_picture->base.poc)
826 dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
828 success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829 dpb_evict(decoder, found_picture, found_index);
830 if (priv->max_views == 1)
833 if (picture && picture->base.poc != found_picture->base.poc)
834 dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
841 GstVaapiDecoderH264Private * const priv = &decoder->priv;
844 for (i = 0; i < priv->dpb_count; i++) {
845 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
847 gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
850 /* Compact the resulting DPB, i.e. remove holes */
851 for (i = 0, n = 0; i < priv->dpb_count; i++) {
854 priv->dpb[n] = priv->dpb[i];
862 /* Clear previous frame buffers only if this is a "flush-all" operation,
863 or if the picture is the first one in the access unit */
864 if (priv->prev_frames && (!picture ||
865 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
866 GST_VAAPI_PICTURE_FLAG_AU_START))) {
867 for (i = 0; i < priv->max_views; i++)
868 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
873 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
875 while (dpb_bump(decoder, picture))
877 dpb_clear(decoder, picture);
881 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
883 GstVaapiDecoderH264Private * const priv = &decoder->priv;
884 const gboolean is_last_picture = /* in the access unit */
885 GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
888 // Remove all unused inter-view only reference components of the current AU
890 while (i < priv->dpb_count) {
891 GstVaapiFrameStore * const fs = priv->dpb[i];
892 if (fs->view_id != picture->base.view_id &&
893 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
895 !is_inter_view_reference_for_next_frames(decoder, fs)))
896 dpb_remove_index(decoder, i);
903 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
905 GstVaapiDecoderH264Private * const priv = &decoder->priv;
906 GstVaapiFrameStore *fs;
909 if (priv->max_views > 1)
910 dpb_prune_mvc(decoder, picture);
912 // Remove all unused pictures
913 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
915 while (i < priv->dpb_count) {
916 GstVaapiFrameStore * const fs = priv->dpb[i];
917 if (fs->view_id == picture->base.view_id &&
918 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
919 dpb_remove_index(decoder, i);
925 // Check if picture is the second field and the first field is still in DPB
926 if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
927 !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
928 const gint found_index = dpb_find_picture(decoder,
929 GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
930 if (found_index >= 0)
931 return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
934 // Create new frame store, and split fields if necessary
935 fs = gst_vaapi_frame_store_new(picture);
938 gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
939 gst_vaapi_frame_store_unref(fs);
941 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
942 if (!gst_vaapi_frame_store_split_fields(fs))
946 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
947 if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
948 while (priv->dpb_count == priv->dpb_size) {
949 if (!dpb_bump(decoder, picture))
954 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
956 const gboolean StoreInterViewOnlyRefFlag =
957 !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
958 GST_VAAPI_PICTURE_FLAG_AU_END) &&
959 GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
960 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
961 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
963 while (priv->dpb_count == priv->dpb_size) {
964 GstVaapiPictureH264 *found_picture;
965 if (!StoreInterViewOnlyRefFlag) {
966 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
967 found_picture->base.poc > picture->base.poc)
968 return dpb_output(decoder, NULL, picture);
970 if (!dpb_bump(decoder, picture))
975 gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
976 if (picture->output_flag) {
977 picture->output_needed = TRUE;
984 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
986 GstVaapiDecoderH264Private * const priv = &decoder->priv;
988 if (dpb_size > priv->dpb_size_max) {
989 priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
992 memset(&priv->dpb[priv->dpb_size_max], 0,
993 (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
994 priv->dpb_size_max = dpb_size;
996 priv->dpb_size = dpb_size;
998 GST_DEBUG("DPB size %u", priv->dpb_size);
1003 unref_inter_view(GstVaapiPictureH264 *picture)
1007 GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1008 gst_vaapi_picture_unref(picture);
1011 /* Resets MVC resources */
1013 mvc_reset(GstVaapiDecoderH264 *decoder)
1015 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1018 // Resize array of inter-view references
1019 if (!priv->inter_views) {
1020 priv->inter_views = g_ptr_array_new_full(priv->max_views,
1021 (GDestroyNotify)unref_inter_view);
1022 if (!priv->inter_views)
1026 // Resize array of previous frame buffers
1027 for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1028 gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1030 priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1031 sizeof(*priv->prev_frames));
1032 if (!priv->prev_frames) {
1033 priv->prev_frames_alloc = 0;
1036 for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1037 priv->prev_frames[i] = NULL;
1038 priv->prev_frames_alloc = priv->max_views;
1042 static GstVaapiDecoderStatus
1043 get_status(GstH264ParserResult result)
1045 GstVaapiDecoderStatus status;
1048 case GST_H264_PARSER_OK:
1049 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1051 case GST_H264_PARSER_NO_NAL_END:
1052 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1054 case GST_H264_PARSER_ERROR:
1055 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1058 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1065 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1067 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1069 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1070 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1071 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1073 dpb_clear(decoder, NULL);
1075 if (priv->inter_views) {
1076 g_ptr_array_unref(priv->inter_views);
1077 priv->inter_views = NULL;
1081 gst_h264_nal_parser_free(priv->parser);
1082 priv->parser = NULL;
1087 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1089 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1091 gst_vaapi_decoder_h264_close(decoder);
1093 priv->parser = gst_h264_nal_parser_new();
1100 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1102 GstVaapiDecoderH264 * const decoder =
1103 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1104 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1107 gst_vaapi_decoder_h264_close(decoder);
1113 g_free(priv->prev_frames);
1114 priv->prev_frames = NULL;
1115 priv->prev_frames_alloc = 0;
1117 for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1118 gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1119 gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1121 for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1122 gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1123 gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1127 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1129 GstVaapiDecoderH264 * const decoder =
1130 GST_VAAPI_DECODER_H264_CAST(base_decoder);
1131 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1133 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1134 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1135 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1136 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1137 priv->progressive_sequence = TRUE;
1141 /* Activates the supplied PPS */
1143 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1145 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1146 GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1148 gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1149 return pi ? &pi->data.pps : NULL;
1152 /* Returns the active PPS */
1153 static inline GstH264PPS *
1154 get_pps(GstVaapiDecoderH264 *decoder)
1156 GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1158 return pi ? &pi->data.pps : NULL;
1161 /* Activate the supplied SPS */
1163 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1165 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1166 GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1168 gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1169 return pi ? &pi->data.sps : NULL;
1172 /* Returns the active SPS */
1173 static inline GstH264SPS *
1174 get_sps(GstVaapiDecoderH264 *decoder)
1176 GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1178 return pi ? &pi->data.sps : NULL;
1182 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1183 GstVaapiProfile profile)
1185 guint n_profiles = *n_profiles_ptr;
1187 profiles[n_profiles++] = profile;
1189 case GST_VAAPI_PROFILE_H264_MAIN:
1190 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1195 *n_profiles_ptr = n_profiles;
1198 /* Fills in compatible profiles for MVC decoding */
1200 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1201 guint *n_profiles_ptr, guint dpb_size)
1203 const gchar * const vendor_string =
1204 gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1206 gboolean add_high_profile = FALSE;
1211 const struct map *m;
1213 // Drivers that support slice level decoding
1214 if (vendor_string && dpb_size <= 16) {
1215 static const struct map drv_names[] = {
1216 { "Intel i965 driver", 17 },
1219 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1220 if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1221 add_high_profile = TRUE;
1225 if (add_high_profile)
1226 fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1229 static GstVaapiProfile
1230 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1232 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1233 GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1234 GstVaapiProfile profile, profiles[4];
1235 guint i, n_profiles = 0;
1237 profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1239 return GST_VAAPI_PROFILE_UNKNOWN;
1241 fill_profiles(profiles, &n_profiles, profile);
1243 case GST_VAAPI_PROFILE_H264_BASELINE:
1244 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1245 fill_profiles(profiles, &n_profiles,
1246 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1247 fill_profiles(profiles, &n_profiles,
1248 GST_VAAPI_PROFILE_H264_MAIN);
1251 case GST_VAAPI_PROFILE_H264_EXTENDED:
1252 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1253 fill_profiles(profiles, &n_profiles,
1254 GST_VAAPI_PROFILE_H264_MAIN);
1257 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1258 if (priv->max_views == 2) {
1259 fill_profiles(profiles, &n_profiles,
1260 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1262 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1264 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1265 if (sps->frame_mbs_only_flag) {
1266 fill_profiles(profiles, &n_profiles,
1267 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1269 fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1275 /* If the preferred profile (profiles[0]) matches one that we already
1276 found, then just return it now instead of searching for it again */
1277 if (profiles[0] == priv->profile)
1278 return priv->profile;
1280 for (i = 0; i < n_profiles; i++) {
1281 if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1284 return GST_VAAPI_PROFILE_UNKNOWN;
1287 static GstVaapiDecoderStatus
1288 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1290 GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1291 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1292 GstVaapiContextInfo info;
1293 GstVaapiProfile profile;
1294 GstVaapiChromaType chroma_type;
1295 gboolean reset_context = FALSE;
1296 guint mb_width, mb_height, dpb_size, num_views;
1298 num_views = get_num_views(sps);
1299 if (priv->max_views < num_views) {
1300 priv->max_views = num_views;
1301 GST_DEBUG("maximum number of views changed to %u", num_views);
1304 dpb_size = get_max_dec_frame_buffering(sps);
1305 if (priv->dpb_size < dpb_size) {
1306 GST_DEBUG("DPB size increased");
1307 reset_context = TRUE;
1310 profile = get_profile(decoder, sps, dpb_size);
1312 GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1313 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1316 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1317 GST_DEBUG("profile changed");
1318 reset_context = TRUE;
1319 priv->profile = profile;
1322 chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1324 GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1325 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1328 if (priv->chroma_type != chroma_type) {
1329 GST_DEBUG("chroma format changed");
1330 reset_context = TRUE;
1331 priv->chroma_type = chroma_type;
1334 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1335 mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1336 !sps->frame_mbs_only_flag;
1337 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1338 GST_DEBUG("size changed");
1339 reset_context = TRUE;
1340 priv->mb_width = mb_width;
1341 priv->mb_height = mb_height;
1344 priv->progressive_sequence = sps->frame_mbs_only_flag;
1345 gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1347 gst_vaapi_decoder_set_pixel_aspect_ratio(
1349 sps->vui_parameters.par_n,
1350 sps->vui_parameters.par_d
1353 if (!reset_context && priv->has_context)
1354 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1356 /* XXX: fix surface size when cropping is implemented */
1357 info.profile = priv->profile;
1358 info.entrypoint = priv->entrypoint;
1359 info.chroma_type = priv->chroma_type;
1360 info.width = sps->width;
1361 info.height = sps->height;
1362 info.ref_frames = dpb_size;
1364 if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1365 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1366 priv->has_context = TRUE;
1369 if (!dpb_reset(decoder, dpb_size))
1370 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1372 /* Reset MVC data */
1373 if (!mvc_reset(decoder))
1374 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1375 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1379 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1380 const GstH264SPS *sps)
1384 /* There are always 6 4x4 scaling lists */
1385 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1386 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1388 for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1389 gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1390 iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1394 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1395 const GstH264SPS *sps)
1399 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1400 if (!pps->transform_8x8_mode_flag)
1403 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1404 g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1406 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1407 for (i = 0; i < n; i++) {
1408 gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1409 iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1413 static GstVaapiDecoderStatus
1414 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1416 GstVaapiPicture * const base_picture = &picture->base;
1417 GstH264PPS * const pps = get_pps(decoder);
1418 GstH264SPS * const sps = get_sps(decoder);
1419 VAIQMatrixBufferH264 *iq_matrix;
1421 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1422 if (!base_picture->iq_matrix) {
1423 GST_ERROR("failed to allocate IQ matrix");
1424 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1426 iq_matrix = base_picture->iq_matrix->param;
1428 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1429 is not large enough to hold lists for 4:4:4 */
1430 if (sps->chroma_format_idc == 3)
1431 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1433 fill_iq_matrix_4x4(iq_matrix, pps, sps);
1434 fill_iq_matrix_8x8(iq_matrix, pps, sps);
1436 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1439 static inline gboolean
1440 is_valid_state(guint state, guint ref_state)
1442 return (state & ref_state) == ref_state;
1445 static GstVaapiDecoderStatus
1446 decode_current_picture(GstVaapiDecoderH264 *decoder)
1448 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1449 GstVaapiPictureH264 * const picture = priv->current_picture;
1451 if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1453 priv->decoder_state = 0;
1456 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1458 if (!exec_ref_pic_marking(decoder, picture))
1460 if (!dpb_add(decoder, picture))
1462 if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1464 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1465 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1468 /* XXX: fix for cases where first field failed to be decoded */
1469 gst_vaapi_picture_replace(&priv->current_picture, NULL);
1470 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1473 priv->decoder_state = 0;
1474 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1477 static GstVaapiDecoderStatus
1478 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1480 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1481 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1482 GstH264SPS * const sps = &pi->data.sps;
1483 GstH264ParserResult result;
1485 GST_DEBUG("parse SPS");
1487 priv->parser_state = 0;
1489 /* Variables that don't have inferred values per the H.264
1490 standard but that should get a default value anyway */
1491 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1493 result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1494 if (result != GST_H264_PARSER_OK)
1495 return get_status(result);
1497 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1498 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1501 static GstVaapiDecoderStatus
1502 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1504 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1505 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1506 GstH264SPS * const sps = &pi->data.sps;
1507 GstH264ParserResult result;
1509 GST_DEBUG("parse subset SPS");
1511 /* Variables that don't have inferred values per the H.264
1512 standard but that should get a default value anyway */
1513 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1515 result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1517 if (result != GST_H264_PARSER_OK)
1518 return get_status(result);
1520 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1521 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1524 static GstVaapiDecoderStatus
1525 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1527 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1528 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1529 GstH264PPS * const pps = &pi->data.pps;
1530 GstH264ParserResult result;
1532 GST_DEBUG("parse PPS");
1534 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1536 /* Variables that don't have inferred values per the H.264
1537 standard but that should get a default value anyway */
1538 pps->slice_group_map_type = 0;
1539 pps->slice_group_change_rate_minus1 = 0;
1541 result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1542 if (result != GST_H264_PARSER_OK)
1543 return get_status(result);
1545 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1546 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1549 static GstVaapiDecoderStatus
1550 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1552 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1553 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1554 GArray ** const sei_ptr = &pi->data.sei;
1555 GstH264ParserResult result;
1557 GST_DEBUG("parse SEI");
1559 result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1560 if (result != GST_H264_PARSER_OK) {
1561 GST_WARNING("failed to parse SEI messages");
1562 return get_status(result);
1564 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1567 static GstVaapiDecoderStatus
1568 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1570 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1571 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1572 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1573 GstH264NalUnit * const nalu = &pi->nalu;
1575 GstH264ParserResult result;
1577 GST_DEBUG("parse slice");
1579 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1580 GST_H264_VIDEO_STATE_GOT_PPS);
1582 /* Propagate Prefix NAL unit info, if necessary */
1583 switch (nalu->type) {
1584 case GST_H264_NAL_SLICE:
1585 case GST_H264_NAL_SLICE_IDR: {
1586 GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1587 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1588 /* MVC sequences shall have a Prefix NAL unit immediately
1589 preceding this NAL unit */
1590 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1591 pi->nalu.extension = prev_pi->nalu.extension;
1594 /* In the very unlikely case there is no Prefix NAL unit
1595 immediately preceding this NAL unit, try to infer some
1596 defaults (H.7.4.1.1) */
1597 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1598 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1599 nalu->idr_pic_flag = !mvc->non_idr_flag;
1600 mvc->priority_id = 0;
1602 mvc->temporal_id = 0;
1603 mvc->anchor_pic_flag = 0;
1604 mvc->inter_view_flag = 1;
1610 /* Variables that don't have inferred values per the H.264
1611 standard but that should get a default value anyway */
1612 slice_hdr->cabac_init_idc = 0;
1613 slice_hdr->direct_spatial_mv_pred_flag = 0;
1615 result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1616 slice_hdr, TRUE, TRUE);
1617 if (result != GST_H264_PARSER_OK)
1618 return get_status(result);
1620 sps = slice_hdr->pps->sequence;
1622 /* Update MVC data */
1623 pi->view_id = get_view_id(&pi->nalu);
1624 pi->voc = get_view_order_index(sps, pi->view_id);
1626 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1627 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1630 static GstVaapiDecoderStatus
1631 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1633 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1634 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1635 GstH264SPS * const sps = &pi->data.sps;
1637 GST_DEBUG("decode SPS");
1639 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1640 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1643 static GstVaapiDecoderStatus
1644 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1646 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1647 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1648 GstH264SPS * const sps = &pi->data.sps;
1650 GST_DEBUG("decode subset SPS");
1652 gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1653 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1656 static GstVaapiDecoderStatus
1657 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1659 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1660 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1661 GstH264PPS * const pps = &pi->data.pps;
1663 GST_DEBUG("decode PPS");
1665 gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1666 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1669 static GstVaapiDecoderStatus
1670 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1672 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1673 GstVaapiDecoderStatus status;
1675 GST_DEBUG("decode sequence-end");
1677 status = decode_current_picture(decoder);
1678 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1681 dpb_flush(decoder, NULL);
1683 /* Reset defaults, should there be a new sequence available next */
1684 priv->max_views = 1;
1685 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1688 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1691 GstVaapiDecoderH264 *decoder,
1692 GstVaapiPictureH264 *picture,
1693 GstH264SliceHdr *slice_hdr
1696 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1697 GstH264SPS * const sps = get_sps(decoder);
1698 const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1701 GST_DEBUG("decode picture order count type 0");
1703 if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1704 priv->prev_poc_msb = 0;
1705 priv->prev_poc_lsb = 0;
1707 else if (priv->prev_pic_has_mmco5) {
1708 priv->prev_poc_msb = 0;
1709 priv->prev_poc_lsb =
1710 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1711 0 : priv->field_poc[TOP_FIELD]);
1714 priv->prev_poc_msb = priv->poc_msb;
1715 priv->prev_poc_lsb = priv->poc_lsb;
1719 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1720 if (priv->poc_lsb < priv->prev_poc_lsb &&
1721 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1722 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1723 else if (priv->poc_lsb > priv->prev_poc_lsb &&
1724 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1725 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1727 priv->poc_msb = priv->prev_poc_msb;
1729 temp_poc = priv->poc_msb + priv->poc_lsb;
1730 switch (picture->structure) {
1731 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1733 priv->field_poc[TOP_FIELD] = temp_poc;
1734 priv->field_poc[BOTTOM_FIELD] = temp_poc +
1735 slice_hdr->delta_pic_order_cnt_bottom;
1737 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1739 priv->field_poc[TOP_FIELD] = temp_poc;
1741 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1743 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1748 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1751 GstVaapiDecoderH264 *decoder,
1752 GstVaapiPictureH264 *picture,
1753 GstH264SliceHdr *slice_hdr
1756 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1757 GstH264SPS * const sps = get_sps(decoder);
1758 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1759 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1762 GST_DEBUG("decode picture order count type 1");
1764 if (priv->prev_pic_has_mmco5)
1765 prev_frame_num_offset = 0;
1767 prev_frame_num_offset = priv->frame_num_offset;
1770 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1771 priv->frame_num_offset = 0;
1772 else if (priv->prev_frame_num > priv->frame_num)
1773 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1775 priv->frame_num_offset = prev_frame_num_offset;
1778 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1779 abs_frame_num = priv->frame_num_offset + priv->frame_num;
1782 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1783 abs_frame_num = abs_frame_num - 1;
1785 if (abs_frame_num > 0) {
1786 gint32 expected_delta_per_poc_cycle;
1787 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1789 expected_delta_per_poc_cycle = 0;
1790 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1791 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1794 poc_cycle_cnt = (abs_frame_num - 1) /
1795 sps->num_ref_frames_in_pic_order_cnt_cycle;
1796 frame_num_in_poc_cycle = (abs_frame_num - 1) %
1797 sps->num_ref_frames_in_pic_order_cnt_cycle;
1800 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1801 for (i = 0; i <= frame_num_in_poc_cycle; i++)
1802 expected_poc += sps->offset_for_ref_frame[i];
1806 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1807 expected_poc += sps->offset_for_non_ref_pic;
1810 switch (picture->structure) {
1811 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1812 priv->field_poc[TOP_FIELD] = expected_poc +
1813 slice_hdr->delta_pic_order_cnt[0];
1814 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1815 sps->offset_for_top_to_bottom_field +
1816 slice_hdr->delta_pic_order_cnt[1];
1818 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1819 priv->field_poc[TOP_FIELD] = expected_poc +
1820 slice_hdr->delta_pic_order_cnt[0];
1822 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1823 priv->field_poc[BOTTOM_FIELD] = expected_poc +
1824 sps->offset_for_top_to_bottom_field +
1825 slice_hdr->delta_pic_order_cnt[0];
1830 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1833 GstVaapiDecoderH264 *decoder,
1834 GstVaapiPictureH264 *picture,
1835 GstH264SliceHdr *slice_hdr
1838 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1839 GstH264SPS * const sps = get_sps(decoder);
1840 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1841 gint32 prev_frame_num_offset, temp_poc;
1843 GST_DEBUG("decode picture order count type 2");
1845 if (priv->prev_pic_has_mmco5)
1846 prev_frame_num_offset = 0;
1848 prev_frame_num_offset = priv->frame_num_offset;
1851 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1852 priv->frame_num_offset = 0;
1853 else if (priv->prev_frame_num > priv->frame_num)
1854 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1856 priv->frame_num_offset = prev_frame_num_offset;
1859 if (GST_VAAPI_PICTURE_IS_IDR(picture))
1861 else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1862 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1864 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1867 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1868 priv->field_poc[TOP_FIELD] = temp_poc;
1869 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1870 priv->field_poc[BOTTOM_FIELD] = temp_poc;
1873 /* 8.2.1 - Decoding process for picture order count */
1876 GstVaapiDecoderH264 *decoder,
1877 GstVaapiPictureH264 *picture,
1878 GstH264SliceHdr *slice_hdr
1881 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1882 GstH264SPS * const sps = get_sps(decoder);
1884 switch (sps->pic_order_cnt_type) {
1886 init_picture_poc_0(decoder, picture, slice_hdr);
1889 init_picture_poc_1(decoder, picture, slice_hdr);
1892 init_picture_poc_2(decoder, picture, slice_hdr);
1896 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1897 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1898 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1899 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1900 picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1904 compare_picture_pic_num_dec(const void *a, const void *b)
1906 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1907 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1909 return picB->pic_num - picA->pic_num;
1913 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1915 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1916 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1918 return picA->long_term_pic_num - picB->long_term_pic_num;
1922 compare_picture_poc_dec(const void *a, const void *b)
1924 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1925 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1927 return picB->base.poc - picA->base.poc;
1931 compare_picture_poc_inc(const void *a, const void *b)
1933 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1934 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1936 return picA->base.poc - picB->base.poc;
1940 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1942 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1943 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1945 return picB->frame_num_wrap - picA->frame_num_wrap;
1949 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1951 const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1952 const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1954 return picA->long_term_frame_idx - picB->long_term_frame_idx;
1957 /* 8.2.4.1 - Decoding process for picture numbers */
1959 init_picture_refs_pic_num(
1960 GstVaapiDecoderH264 *decoder,
1961 GstVaapiPictureH264 *picture,
1962 GstH264SliceHdr *slice_hdr
1965 GstVaapiDecoderH264Private * const priv = &decoder->priv;
1966 GstH264SPS * const sps = get_sps(decoder);
1967 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1970 GST_DEBUG("decode picture numbers");
1972 for (i = 0; i < priv->short_ref_count; i++) {
1973 GstVaapiPictureH264 * const pic = priv->short_ref[i];
1976 if (pic->base.view_id != picture->base.view_id)
1980 if (pic->frame_num > priv->frame_num)
1981 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1983 pic->frame_num_wrap = pic->frame_num;
1985 // (8-28, 8-30, 8-31)
1986 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1987 pic->pic_num = pic->frame_num_wrap;
1989 if (pic->structure == picture->structure)
1990 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1992 pic->pic_num = 2 * pic->frame_num_wrap;
1996 for (i = 0; i < priv->long_ref_count; i++) {
1997 GstVaapiPictureH264 * const pic = priv->long_ref[i];
2000 if (pic->base.view_id != picture->base.view_id)
2003 // (8-29, 8-32, 8-33)
2004 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2005 pic->long_term_pic_num = pic->long_term_frame_idx;
2007 if (pic->structure == picture->structure)
2008 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2010 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2015 #define SORT_REF_LIST(list, n, compare_func) \
2016 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2019 init_picture_refs_fields_1(
2020 guint picture_structure,
2021 GstVaapiPictureH264 *RefPicList[32],
2022 guint *RefPicList_count,
2023 GstVaapiPictureH264 *ref_list[32],
2024 guint ref_list_count
2031 n = *RefPicList_count;
2034 for (; i < ref_list_count; i++) {
2035 if (ref_list[i]->structure == picture_structure) {
2036 RefPicList[n++] = ref_list[i++];
2040 for (; j < ref_list_count; j++) {
2041 if (ref_list[j]->structure != picture_structure) {
2042 RefPicList[n++] = ref_list[j++];
2046 } while (i < ref_list_count || j < ref_list_count);
2047 *RefPicList_count = n;
2051 init_picture_refs_fields(
2052 GstVaapiPictureH264 *picture,
2053 GstVaapiPictureH264 *RefPicList[32],
2054 guint *RefPicList_count,
2055 GstVaapiPictureH264 *short_ref[32],
2056 guint short_ref_count,
2057 GstVaapiPictureH264 *long_ref[32],
2058 guint long_ref_count
2063 /* 8.2.4.2.5 - reference picture lists in fields */
2064 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2065 short_ref, short_ref_count);
2066 init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2067 long_ref, long_ref_count);
2068 *RefPicList_count = n;
2071 /* Finds the inter-view reference picture with the supplied view id */
2072 static GstVaapiPictureH264 *
2073 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2075 GPtrArray * const inter_views = decoder->priv.inter_views;
2078 for (i = 0; i < inter_views->len; i++) {
2079 GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2080 if (picture->base.view_id == view_id)
2084 GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2089 /* Checks whether the view id exists in the supplied list of view ids */
2091 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2095 for (i = 0; i < num_view_ids; i++) {
2096 if (view_ids[i] == view_id)
2103 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2107 return (find_view_id(view_id, view->anchor_ref_l0,
2108 view->num_anchor_refs_l0) ||
2109 find_view_id(view_id, view->anchor_ref_l1,
2110 view->num_anchor_refs_l1));
2112 return (find_view_id(view_id, view->non_anchor_ref_l0,
2113 view->num_non_anchor_refs_l0) ||
2114 find_view_id(view_id, view->non_anchor_ref_l1,
2115 view->num_non_anchor_refs_l1));
2118 /* Checks whether the inter-view reference picture with the supplied
2119 view id is used for decoding the current view component picture */
2121 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2122 guint16 view_id, GstVaapiPictureH264 *picture)
2124 const GstH264SPS * const sps = get_sps(decoder);
2127 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2128 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2131 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2132 return find_view_id_in_view(view_id,
2133 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2136 /* Checks whether the supplied inter-view reference picture is used
2137 for decoding the next view component pictures */
2139 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2140 GstVaapiPictureH264 *picture)
2142 const GstH264SPS * const sps = get_sps(decoder);
2146 if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2147 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2150 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2151 num_views = sps->extension.mvc.num_views_minus1 + 1;
2152 for (i = picture->base.voc + 1; i < num_views; i++) {
2153 const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2154 if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2160 /* H.8.2.1 - Initialization process for inter-view prediction references */
2162 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2163 GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2164 const guint16 *view_ids, guint num_view_ids)
2168 n = *ref_list_count_ptr;
2169 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2170 GstVaapiPictureH264 * const pic =
2171 find_inter_view_reference(decoder, view_ids[j]);
2173 ref_list[n++] = pic;
2175 *ref_list_count_ptr = n;
2179 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2180 GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2182 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2183 const GstH264SPS * const sps = get_sps(decoder);
2184 const GstH264SPSExtMVCView *view;
2186 GST_DEBUG("initialize reference picture list for inter-view prediction");
2188 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2190 view = &sps->extension.mvc.view[picture->base.voc];
2192 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2193 init_picture_refs_mvc_1(decoder, \
2194 priv->RefPicList##ref_list, \
2195 &priv->RefPicList##ref_list##_count, \
2196 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2197 view->view_list##_l##ref_list, \
2198 view->num_##view_list##s_l##ref_list); \
2202 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2203 INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2205 INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2208 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2209 INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2211 INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2214 #undef INVOKE_INIT_PICTURE_REFS_MVC
2218 init_picture_refs_p_slice(
2219 GstVaapiDecoderH264 *decoder,
2220 GstVaapiPictureH264 *picture,
2221 GstH264SliceHdr *slice_hdr
2224 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2225 GstVaapiPictureH264 **ref_list;
2228 GST_DEBUG("decode reference picture list for P and SP slices");
2230 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2231 /* 8.2.4.2.1 - P and SP slices in frames */
2232 if (priv->short_ref_count > 0) {
2233 ref_list = priv->RefPicList0;
2234 for (i = 0; i < priv->short_ref_count; i++)
2235 ref_list[i] = priv->short_ref[i];
2236 SORT_REF_LIST(ref_list, i, pic_num_dec);
2237 priv->RefPicList0_count += i;
2240 if (priv->long_ref_count > 0) {
2241 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2242 for (i = 0; i < priv->long_ref_count; i++)
2243 ref_list[i] = priv->long_ref[i];
2244 SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2245 priv->RefPicList0_count += i;
2249 /* 8.2.4.2.2 - P and SP slices in fields */
2250 GstVaapiPictureH264 *short_ref[32];
2251 guint short_ref_count = 0;
2252 GstVaapiPictureH264 *long_ref[32];
2253 guint long_ref_count = 0;
2255 if (priv->short_ref_count > 0) {
2256 for (i = 0; i < priv->short_ref_count; i++)
2257 short_ref[i] = priv->short_ref[i];
2258 SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2259 short_ref_count = i;
2262 if (priv->long_ref_count > 0) {
2263 for (i = 0; i < priv->long_ref_count; i++)
2264 long_ref[i] = priv->long_ref[i];
2265 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2269 init_picture_refs_fields(
2271 priv->RefPicList0, &priv->RefPicList0_count,
2272 short_ref, short_ref_count,
2273 long_ref, long_ref_count
2277 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2279 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2284 init_picture_refs_b_slice(
2285 GstVaapiDecoderH264 *decoder,
2286 GstVaapiPictureH264 *picture,
2287 GstH264SliceHdr *slice_hdr
2290 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2291 GstVaapiPictureH264 **ref_list;
2294 GST_DEBUG("decode reference picture list for B slices");
2296 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2297 /* 8.2.4.2.3 - B slices in frames */
2300 if (priv->short_ref_count > 0) {
2301 // 1. Short-term references
2302 ref_list = priv->RefPicList0;
2303 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2304 if (priv->short_ref[i]->base.poc < picture->base.poc)
2305 ref_list[n++] = priv->short_ref[i];
2307 SORT_REF_LIST(ref_list, n, poc_dec);
2308 priv->RefPicList0_count += n;
2310 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2311 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2312 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2313 ref_list[n++] = priv->short_ref[i];
2315 SORT_REF_LIST(ref_list, n, poc_inc);
2316 priv->RefPicList0_count += n;
2319 if (priv->long_ref_count > 0) {
2320 // 2. Long-term references
2321 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2322 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2323 ref_list[n++] = priv->long_ref[i];
2324 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2325 priv->RefPicList0_count += n;
2329 if (priv->short_ref_count > 0) {
2330 // 1. Short-term references
2331 ref_list = priv->RefPicList1;
2332 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2333 if (priv->short_ref[i]->base.poc > picture->base.poc)
2334 ref_list[n++] = priv->short_ref[i];
2336 SORT_REF_LIST(ref_list, n, poc_inc);
2337 priv->RefPicList1_count += n;
2339 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2340 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2341 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2342 ref_list[n++] = priv->short_ref[i];
2344 SORT_REF_LIST(ref_list, n, poc_dec);
2345 priv->RefPicList1_count += n;
2348 if (priv->long_ref_count > 0) {
2349 // 2. Long-term references
2350 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2351 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2352 ref_list[n++] = priv->long_ref[i];
2353 SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2354 priv->RefPicList1_count += n;
2358 /* 8.2.4.2.4 - B slices in fields */
2359 GstVaapiPictureH264 *short_ref0[32];
2360 guint short_ref0_count = 0;
2361 GstVaapiPictureH264 *short_ref1[32];
2362 guint short_ref1_count = 0;
2363 GstVaapiPictureH264 *long_ref[32];
2364 guint long_ref_count = 0;
2366 /* refFrameList0ShortTerm */
2367 if (priv->short_ref_count > 0) {
2368 ref_list = short_ref0;
2369 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2370 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2371 ref_list[n++] = priv->short_ref[i];
2373 SORT_REF_LIST(ref_list, n, poc_dec);
2374 short_ref0_count += n;
2376 ref_list = &short_ref0[short_ref0_count];
2377 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2378 if (priv->short_ref[i]->base.poc > picture->base.poc)
2379 ref_list[n++] = priv->short_ref[i];
2381 SORT_REF_LIST(ref_list, n, poc_inc);
2382 short_ref0_count += n;
2385 /* refFrameList1ShortTerm */
2386 if (priv->short_ref_count > 0) {
2387 ref_list = short_ref1;
2388 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2389 if (priv->short_ref[i]->base.poc > picture->base.poc)
2390 ref_list[n++] = priv->short_ref[i];
2392 SORT_REF_LIST(ref_list, n, poc_inc);
2393 short_ref1_count += n;
2395 ref_list = &short_ref1[short_ref1_count];
2396 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2397 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2398 ref_list[n++] = priv->short_ref[i];
2400 SORT_REF_LIST(ref_list, n, poc_dec);
2401 short_ref1_count += n;
2404 /* refFrameListLongTerm */
2405 if (priv->long_ref_count > 0) {
2406 for (i = 0; i < priv->long_ref_count; i++)
2407 long_ref[i] = priv->long_ref[i];
2408 SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2412 init_picture_refs_fields(
2414 priv->RefPicList0, &priv->RefPicList0_count,
2415 short_ref0, short_ref0_count,
2416 long_ref, long_ref_count
2419 init_picture_refs_fields(
2421 priv->RefPicList1, &priv->RefPicList1_count,
2422 short_ref1, short_ref1_count,
2423 long_ref, long_ref_count
2427 /* Check whether RefPicList1 is identical to RefPicList0, then
2428 swap if necessary */
2429 if (priv->RefPicList1_count > 1 &&
2430 priv->RefPicList1_count == priv->RefPicList0_count &&
2431 memcmp(priv->RefPicList0, priv->RefPicList1,
2432 priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2433 GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2434 priv->RefPicList1[0] = priv->RefPicList1[1];
2435 priv->RefPicList1[1] = tmp;
2438 if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2440 init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2443 init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2447 #undef SORT_REF_LIST
2450 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2452 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2455 for (i = 0; i < priv->short_ref_count; i++) {
2456 if (priv->short_ref[i]->pic_num == pic_num)
2459 GST_ERROR("found no short-term reference picture with PicNum = %d",
2465 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2467 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2470 for (i = 0; i < priv->long_ref_count; i++) {
2471 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2474 GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2480 exec_picture_refs_modification_1(
2481 GstVaapiDecoderH264 *decoder,
2482 GstVaapiPictureH264 *picture,
2483 GstH264SliceHdr *slice_hdr,
2487 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2488 GstH264SPS * const sps = get_sps(decoder);
2489 GstH264RefPicListModification *ref_pic_list_modification;
2490 guint num_ref_pic_list_modifications;
2491 GstVaapiPictureH264 **ref_list;
2492 guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2493 const guint16 *view_ids = NULL;
2494 guint i, j, n, num_refs, num_view_ids = 0;
2496 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2498 GST_DEBUG("modification process of reference picture list %u", list);
2501 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2502 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2503 ref_list = priv->RefPicList0;
2504 ref_list_count_ptr = &priv->RefPicList0_count;
2505 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2507 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2508 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2509 const GstH264SPSExtMVCView * const view =
2510 &sps->extension.mvc.view[picture->base.voc];
2511 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2512 view_ids = view->anchor_ref_l0;
2513 num_view_ids = view->num_anchor_refs_l0;
2516 view_ids = view->non_anchor_ref_l0;
2517 num_view_ids = view->num_non_anchor_refs_l0;
2522 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2523 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2524 ref_list = priv->RefPicList1;
2525 ref_list_count_ptr = &priv->RefPicList1_count;
2526 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2528 if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2529 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2530 const GstH264SPSExtMVCView * const view =
2531 &sps->extension.mvc.view[picture->base.voc];
2532 if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2533 view_ids = view->anchor_ref_l1;
2534 num_view_ids = view->num_anchor_refs_l1;
2537 view_ids = view->non_anchor_ref_l1;
2538 num_view_ids = view->num_non_anchor_refs_l1;
2542 ref_list_count = *ref_list_count_ptr;
2544 if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2545 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2546 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2549 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2550 CurrPicNum = slice_hdr->frame_num; // frame_num
2553 picNumPred = CurrPicNum;
2554 picViewIdxPred = -1;
2556 for (i = 0; i < num_ref_pic_list_modifications; i++) {
2557 GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2558 if (l->modification_of_pic_nums_idc == 3)
2561 /* 8.2.4.3.1 - Short-term reference pictures */
2562 if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2563 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2564 gint32 picNum, picNumNoWrap;
2567 if (l->modification_of_pic_nums_idc == 0) {
2568 picNumNoWrap = picNumPred - abs_diff_pic_num;
2569 if (picNumNoWrap < 0)
2570 picNumNoWrap += MaxPicNum;
2575 picNumNoWrap = picNumPred + abs_diff_pic_num;
2576 if (picNumNoWrap >= MaxPicNum)
2577 picNumNoWrap -= MaxPicNum;
2579 picNumPred = picNumNoWrap;
2582 picNum = picNumNoWrap;
2583 if (picNum > CurrPicNum)
2584 picNum -= MaxPicNum;
2587 for (j = num_refs; j > ref_list_idx; j--)
2588 ref_list[j] = ref_list[j - 1];
2589 found_ref_idx = find_short_term_reference(decoder, picNum);
2590 ref_list[ref_list_idx++] =
2591 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2593 for (j = ref_list_idx; j <= num_refs; j++) {
2598 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2599 ref_list[j]->pic_num : MaxPicNum;
2600 if (PicNumF != picNum ||
2601 ref_list[j]->base.view_id != picture->base.view_id)
2602 ref_list[n++] = ref_list[j];
2606 /* 8.2.4.3.2 - Long-term reference pictures */
2607 else if (l->modification_of_pic_nums_idc == 2) {
2609 for (j = num_refs; j > ref_list_idx; j--)
2610 ref_list[j] = ref_list[j - 1];
2612 find_long_term_reference(decoder, l->value.long_term_pic_num);
2613 ref_list[ref_list_idx++] =
2614 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2616 for (j = ref_list_idx; j <= num_refs; j++) {
2617 gint32 LongTermPicNumF;
2621 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2622 ref_list[j]->long_term_pic_num : INT_MAX;
2623 if (LongTermPicNumF != l->value.long_term_pic_num ||
2624 ref_list[j]->base.view_id != picture->base.view_id)
2625 ref_list[n++] = ref_list[j];
2629 /* H.8.2.2.3 - Inter-view prediction reference pictures */
2630 else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2631 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2632 (l->modification_of_pic_nums_idc == 4 ||
2633 l->modification_of_pic_nums_idc == 5)) {
2634 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2635 gint32 picViewIdx, targetViewId;
2638 if (l->modification_of_pic_nums_idc == 4) {
2639 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2641 picViewIdx += num_view_ids;
2646 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2647 if (picViewIdx >= num_view_ids)
2648 picViewIdx -= num_view_ids;
2650 picViewIdxPred = picViewIdx;
2653 targetViewId = view_ids[picViewIdx];
2656 for (j = num_refs; j > ref_list_idx; j--)
2657 ref_list[j] = ref_list[j - 1];
2658 ref_list[ref_list_idx++] =
2659 find_inter_view_reference(decoder, targetViewId);
2661 for (j = ref_list_idx; j <= num_refs; j++) {
2664 if (ref_list[j]->base.view_id != targetViewId ||
2665 ref_list[j]->base.poc != picture->base.poc)
2666 ref_list[n++] = ref_list[j];
2672 for (i = 0; i < num_refs; i++)
2674 GST_ERROR("list %u entry %u is empty", list, i);
2676 *ref_list_count_ptr = num_refs;
2679 /* 8.2.4.3 - Modification process for reference picture lists */
2681 exec_picture_refs_modification(
2682 GstVaapiDecoderH264 *decoder,
2683 GstVaapiPictureH264 *picture,
2684 GstH264SliceHdr *slice_hdr
2687 GST_DEBUG("execute ref_pic_list_modification()");
2690 if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2691 slice_hdr->ref_pic_list_modification_flag_l0)
2692 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2695 if (GST_H264_IS_B_SLICE(slice_hdr) &&
2696 slice_hdr->ref_pic_list_modification_flag_l1)
2697 exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2701 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2702 GstVaapiPictureH264 *picture)
2704 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2705 guint i, j, short_ref_count, long_ref_count;
2707 short_ref_count = 0;
2709 if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2710 for (i = 0; i < priv->dpb_count; i++) {
2711 GstVaapiFrameStore * const fs = priv->dpb[i];
2712 GstVaapiPictureH264 *pic;
2713 if (!gst_vaapi_frame_store_has_frame(fs))
2715 pic = fs->buffers[0];
2716 if (pic->base.view_id != picture->base.view_id)
2718 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2719 priv->short_ref[short_ref_count++] = pic;
2720 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2721 priv->long_ref[long_ref_count++] = pic;
2722 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2723 pic->other_field = fs->buffers[1];
2727 for (i = 0; i < priv->dpb_count; i++) {
2728 GstVaapiFrameStore * const fs = priv->dpb[i];
2729 for (j = 0; j < fs->num_buffers; j++) {
2730 GstVaapiPictureH264 * const pic = fs->buffers[j];
2731 if (pic->base.view_id != picture->base.view_id)
2733 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2734 priv->short_ref[short_ref_count++] = pic;
2735 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2736 priv->long_ref[long_ref_count++] = pic;
2737 pic->structure = pic->base.structure;
2738 pic->other_field = fs->buffers[j ^ 1];
2743 for (i = short_ref_count; i < priv->short_ref_count; i++)
2744 priv->short_ref[i] = NULL;
2745 priv->short_ref_count = short_ref_count;
2747 for (i = long_ref_count; i < priv->long_ref_count; i++)
2748 priv->long_ref[i] = NULL;
2749 priv->long_ref_count = long_ref_count;
2754 GstVaapiDecoderH264 *decoder,
2755 GstVaapiPictureH264 *picture,
2756 GstH264SliceHdr *slice_hdr
2759 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2762 init_picture_ref_lists(decoder, picture);
2763 init_picture_refs_pic_num(decoder, picture, slice_hdr);
2765 priv->RefPicList0_count = 0;
2766 priv->RefPicList1_count = 0;
2768 switch (slice_hdr->type % 5) {
2769 case GST_H264_P_SLICE:
2770 case GST_H264_SP_SLICE:
2771 init_picture_refs_p_slice(decoder, picture, slice_hdr);
2773 case GST_H264_B_SLICE:
2774 init_picture_refs_b_slice(decoder, picture, slice_hdr);
2780 exec_picture_refs_modification(decoder, picture, slice_hdr);
2782 switch (slice_hdr->type % 5) {
2783 case GST_H264_B_SLICE:
2784 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2785 for (i = priv->RefPicList1_count; i < num_refs; i++)
2786 priv->RefPicList1[i] = NULL;
2787 priv->RefPicList1_count = num_refs;
2790 case GST_H264_P_SLICE:
2791 case GST_H264_SP_SLICE:
2792 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2793 for (i = priv->RefPicList0_count; i < num_refs; i++)
2794 priv->RefPicList0[i] = NULL;
2795 priv->RefPicList0_count = num_refs;
2804 GstVaapiDecoderH264 *decoder,
2805 GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2807 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2808 GstVaapiPicture * const base_picture = &picture->base;
2809 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2811 priv->prev_frame_num = priv->frame_num;
2812 priv->frame_num = slice_hdr->frame_num;
2813 picture->frame_num = priv->frame_num;
2814 picture->frame_num_wrap = priv->frame_num;
2815 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
2816 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2817 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
2818 base_picture->view_id = pi->view_id;
2819 base_picture->voc = pi->voc;
2821 /* Initialize extensions */
2822 switch (pi->nalu.extension_type) {
2823 case GST_H264_NAL_EXTENSION_MVC: {
2824 GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2826 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2827 if (mvc->inter_view_flag)
2828 GST_VAAPI_PICTURE_FLAG_SET(picture,
2829 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2830 if (mvc->anchor_pic_flag)
2831 GST_VAAPI_PICTURE_FLAG_SET(picture,
2832 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2837 /* Reset decoder state for IDR pictures */
2838 if (pi->nalu.idr_pic_flag) {
2840 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2841 dpb_flush(decoder, picture);
2844 /* Initialize picture structure */
2845 if (!slice_hdr->field_pic_flag)
2846 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2848 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2849 if (!slice_hdr->bottom_field_flag)
2850 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2852 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2854 picture->structure = base_picture->structure;
2856 /* Initialize reference flags */
2857 if (pi->nalu.ref_idc) {
2858 GstH264DecRefPicMarking * const dec_ref_pic_marking =
2859 &slice_hdr->dec_ref_pic_marking;
2861 if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2862 dec_ref_pic_marking->long_term_reference_flag)
2863 GST_VAAPI_PICTURE_FLAG_SET(picture,
2864 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2866 GST_VAAPI_PICTURE_FLAG_SET(picture,
2867 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2870 init_picture_poc(decoder, picture, slice_hdr);
2874 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2876 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2878 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2879 GstH264SPS * const sps = get_sps(decoder);
2880 GstVaapiPictureH264 *ref_picture;
2881 guint i, m, max_num_ref_frames;
2883 GST_DEBUG("reference picture marking process (sliding window)");
2885 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2888 max_num_ref_frames = sps->num_ref_frames;
2889 if (max_num_ref_frames == 0)
2890 max_num_ref_frames = 1;
2891 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2892 max_num_ref_frames <<= 1;
2894 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2896 if (priv->short_ref_count < 1)
2899 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2900 GstVaapiPictureH264 * const picture = priv->short_ref[i];
2901 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2905 ref_picture = priv->short_ref[m];
2906 gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2907 ARRAY_REMOVE_INDEX(priv->short_ref, m);
2909 /* Both fields need to be marked as "unused for reference", so
2910 remove the other field from the short_ref[] list as well */
2911 if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2912 for (i = 0; i < priv->short_ref_count; i++) {
2913 if (priv->short_ref[i] == ref_picture->other_field) {
2914 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2922 static inline gint32
2923 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2927 if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2928 pic_num = picture->frame_num_wrap;
2930 pic_num = 2 * picture->frame_num_wrap + 1;
2931 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2935 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2937 exec_ref_pic_marking_adaptive_mmco_1(
2938 GstVaapiDecoderH264 *decoder,
2939 GstVaapiPictureH264 *picture,
2940 GstH264RefPicMarking *ref_pic_marking
2943 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2946 picNumX = get_picNumX(picture, ref_pic_marking);
2947 i = find_short_term_reference(decoder, picNumX);
2951 gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2952 GST_VAAPI_PICTURE_IS_FRAME(picture));
2953 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2956 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2958 exec_ref_pic_marking_adaptive_mmco_2(
2959 GstVaapiDecoderH264 *decoder,
2960 GstVaapiPictureH264 *picture,
2961 GstH264RefPicMarking *ref_pic_marking
2964 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2967 i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2971 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2972 GST_VAAPI_PICTURE_IS_FRAME(picture));
2973 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2976 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2978 exec_ref_pic_marking_adaptive_mmco_3(
2979 GstVaapiDecoderH264 *decoder,
2980 GstVaapiPictureH264 *picture,
2981 GstH264RefPicMarking *ref_pic_marking
2984 GstVaapiDecoderH264Private * const priv = &decoder->priv;
2985 GstVaapiPictureH264 *ref_picture, *other_field;
2988 for (i = 0; i < priv->long_ref_count; i++) {
2989 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2992 if (i != priv->long_ref_count) {
2993 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2994 ARRAY_REMOVE_INDEX(priv->long_ref, i);
2997 picNumX = get_picNumX(picture, ref_pic_marking);
2998 i = find_short_term_reference(decoder, picNumX);
3002 ref_picture = priv->short_ref[i];
3003 ARRAY_REMOVE_INDEX(priv->short_ref, i);
3004 priv->long_ref[priv->long_ref_count++] = ref_picture;
3006 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3007 gst_vaapi_picture_h264_set_reference(ref_picture,
3008 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3009 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3011 /* Assign LongTermFrameIdx to the other field if it was also
3012 marked as "used for long-term reference */
3013 other_field = ref_picture->other_field;
3014 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3015 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3018 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3019 * as "unused for reference" */
3021 exec_ref_pic_marking_adaptive_mmco_4(
3022 GstVaapiDecoderH264 *decoder,
3023 GstVaapiPictureH264 *picture,
3024 GstH264RefPicMarking *ref_pic_marking
3027 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3028 gint32 i, long_term_frame_idx;
3030 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3032 for (i = 0; i < priv->long_ref_count; i++) {
3033 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3035 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3036 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3041 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3043 exec_ref_pic_marking_adaptive_mmco_5(
3044 GstVaapiDecoderH264 *decoder,
3045 GstVaapiPictureH264 *picture,
3046 GstH264RefPicMarking *ref_pic_marking
3049 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3051 dpb_flush(decoder, picture);
3053 priv->prev_pic_has_mmco5 = TRUE;
3055 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3056 priv->frame_num = 0;
3057 priv->frame_num_offset = 0;
3058 picture->frame_num = 0;
3060 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3061 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3062 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3063 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3064 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3065 picture->base.poc = 0;
3068 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3070 exec_ref_pic_marking_adaptive_mmco_6(
3071 GstVaapiDecoderH264 *decoder,
3072 GstVaapiPictureH264 *picture,
3073 GstH264RefPicMarking *ref_pic_marking
3076 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3077 GstVaapiPictureH264 *other_field;
3080 for (i = 0; i < priv->long_ref_count; i++) {
3081 if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3084 if (i != priv->long_ref_count) {
3085 gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3086 ARRAY_REMOVE_INDEX(priv->long_ref, i);
3089 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3090 gst_vaapi_picture_h264_set_reference(picture,
3091 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3092 GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3094 /* Assign LongTermFrameIdx to the other field if it was also
3095 marked as "used for long-term reference */
3096 other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3097 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3098 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3101 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3103 exec_ref_pic_marking_adaptive(
3104 GstVaapiDecoderH264 *decoder,
3105 GstVaapiPictureH264 *picture,
3106 GstH264DecRefPicMarking *dec_ref_pic_marking
3111 GST_DEBUG("reference picture marking process (adaptive memory control)");
3113 typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3114 GstVaapiDecoderH264 *decoder,
3115 GstVaapiPictureH264 *picture,
3116 GstH264RefPicMarking *ref_pic_marking
3119 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3121 exec_ref_pic_marking_adaptive_mmco_1,
3122 exec_ref_pic_marking_adaptive_mmco_2,
3123 exec_ref_pic_marking_adaptive_mmco_3,
3124 exec_ref_pic_marking_adaptive_mmco_4,
3125 exec_ref_pic_marking_adaptive_mmco_5,
3126 exec_ref_pic_marking_adaptive_mmco_6,
3129 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3130 GstH264RefPicMarking * const ref_pic_marking =
3131 &dec_ref_pic_marking->ref_pic_marking[i];
3133 const guint mmco = ref_pic_marking->memory_management_control_operation;
3134 if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3135 mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3137 GST_ERROR("unhandled MMCO %u", mmco);
3144 /* 8.2.5 - Execute reference picture marking process */
3146 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3148 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3150 priv->prev_pic_has_mmco5 = FALSE;
3151 priv->prev_pic_structure = picture->structure;
3153 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3154 g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3156 if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3159 if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3160 GstH264DecRefPicMarking * const dec_ref_pic_marking =
3161 &picture->last_slice_hdr->dec_ref_pic_marking;
3162 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3163 if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3167 if (!exec_ref_pic_marking_sliding_window(decoder))
3175 vaapi_init_picture(VAPictureH264 *pic)
3177 pic->picture_id = VA_INVALID_ID;
3179 pic->flags = VA_PICTURE_H264_INVALID;
3180 pic->TopFieldOrderCnt = 0;
3181 pic->BottomFieldOrderCnt = 0;
3185 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3186 guint picture_structure)
3188 if (!picture_structure)
3189 picture_structure = picture->structure;
3191 pic->picture_id = picture->base.surface_id;
3194 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3195 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3196 pic->frame_idx = picture->long_term_frame_idx;
3199 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3200 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3201 pic->frame_idx = picture->frame_num;
3204 switch (picture_structure) {
3205 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3206 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3207 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3209 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3210 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3211 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3212 pic->BottomFieldOrderCnt = 0;
3214 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3215 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3216 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3217 pic->TopFieldOrderCnt = 0;
3223 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3224 GstVaapiPictureH264 *picture)
3226 vaapi_fill_picture(pic, picture, 0);
3228 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3229 if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3230 /* The inter-view reference components and inter-view only
3231 reference components that are included in the reference
3232 picture lists are considered as not being marked as "used for
3233 short-term reference" or "used for long-term reference" */
3234 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3235 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3240 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3242 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3243 GstVaapiPicture * const base_picture = &picture->base;
3244 GstH264PPS * const pps = get_pps(decoder);
3245 GstH264SPS * const sps = get_sps(decoder);
3246 VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3249 /* Fill in VAPictureParameterBufferH264 */
3250 vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3252 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3253 GstVaapiFrameStore * const fs = priv->dpb[i];
3254 if ((gst_vaapi_frame_store_has_reference(fs) &&
3255 fs->view_id == picture->base.view_id) ||
3256 (gst_vaapi_frame_store_has_inter_view(fs) &&
3257 is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3258 vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3259 fs->buffers[0], fs->structure);
3260 if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3263 for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3264 vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3266 #define COPY_FIELD(s, f) \
3267 pic_param->f = (s)->f
3269 #define COPY_BFM(a, s, f) \
3270 pic_param->a.bits.f = (s)->f
3272 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3273 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3274 pic_param->frame_num = priv->frame_num;
3276 COPY_FIELD(sps, bit_depth_luma_minus8);
3277 COPY_FIELD(sps, bit_depth_chroma_minus8);
3278 COPY_FIELD(sps, num_ref_frames);
3279 COPY_FIELD(pps, num_slice_groups_minus1);
3280 COPY_FIELD(pps, slice_group_map_type);
3281 COPY_FIELD(pps, slice_group_change_rate_minus1);
3282 COPY_FIELD(pps, pic_init_qp_minus26);
3283 COPY_FIELD(pps, pic_init_qs_minus26);
3284 COPY_FIELD(pps, chroma_qp_index_offset);
3285 COPY_FIELD(pps, second_chroma_qp_index_offset);
3287 pic_param->seq_fields.value = 0; /* reset all bits */
3288 pic_param->seq_fields.bits.residual_colour_transform_flag = sps->separate_colour_plane_flag;
3289 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3291 COPY_BFM(seq_fields, sps, chroma_format_idc);
3292 COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3293 COPY_BFM(seq_fields, sps, frame_mbs_only_flag);
3294 COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag);
3295 COPY_BFM(seq_fields, sps, direct_8x8_inference_flag);
3296 COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3297 COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3298 COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3299 COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3301 pic_param->pic_fields.value = 0; /* reset all bits */
3302 pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3303 pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3305 COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3306 COPY_BFM(pic_fields, pps, weighted_pred_flag);
3307 COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3308 COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3309 COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3310 COPY_BFM(pic_fields, pps, pic_order_present_flag);
3311 COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3312 COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3316 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3318 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3320 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3321 GstH264PPS * const pps = slice_hdr->pps;
3322 GstH264SPS * const sps = pps->sequence;
3323 GstH264SliceHdr *prev_slice_hdr;
3327 prev_slice_hdr = &prev_pi->data.slice_hdr;
3329 #define CHECK_EXPR(expr, field_name) do { \
3331 GST_DEBUG(field_name " differs in value"); \
3336 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3337 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3339 /* view_id differs in value and VOIdx of current slice_hdr is less
3340 than the VOIdx of the prev_slice_hdr */
3341 CHECK_VALUE(pi, prev_pi, view_id);
3343 /* frame_num differs in value, regardless of inferred values to 0 */
3344 CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3346 /* pic_parameter_set_id differs in value */
3347 CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3349 /* field_pic_flag differs in value */
3350 CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3352 /* bottom_field_flag is present in both and differs in value */
3353 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3354 CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3356 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3357 CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3358 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3360 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3361 value or delta_pic_order_cnt_bottom differs in value */
3362 if (sps->pic_order_cnt_type == 0) {
3363 CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3364 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3365 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3368 /* POC type is 1 for both and either delta_pic_order_cnt[0]
3369 differs in value or delta_pic_order_cnt[1] differs in value */
3370 else if (sps->pic_order_cnt_type == 1) {
3371 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3372 CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3375 /* IdrPicFlag differs in value */
3376 CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3378 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3379 if (pi->nalu.idr_pic_flag)
3380 CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3387 /* Detection of a new access unit, assuming we are already in presence
3389 static inline gboolean
3390 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3392 if (!prev_pi || prev_pi->view_id == pi->view_id)
3394 return pi->voc < prev_pi->voc;
3397 /* Finds the first field picture corresponding to the supplied picture */
3398 static GstVaapiPictureH264 *
3399 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3401 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3402 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3403 GstVaapiFrameStore *fs;
3405 if (!slice_hdr->field_pic_flag)
3408 fs = priv->prev_frames[pi->voc];
3409 if (!fs || gst_vaapi_frame_store_has_frame(fs))
3412 if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3413 return fs->buffers[0];
3417 static GstVaapiDecoderStatus
3418 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3420 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3421 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3422 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3423 GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3424 GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3425 GstVaapiPictureH264 *picture, *first_field;
3426 GstVaapiDecoderStatus status;
3428 g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3429 g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3431 /* Only decode base stream for MVC */
3432 switch (sps->profile_idc) {
3433 case GST_H264_PROFILE_MULTIVIEW_HIGH:
3434 case GST_H264_PROFILE_STEREO_HIGH:
3436 GST_DEBUG("drop picture from substream");
3437 return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3442 status = ensure_context(decoder, sps);
3443 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3446 priv->decoder_state = 0;
3448 first_field = find_first_field(decoder, pi);
3450 /* Re-use current picture where the first field was decoded */
3451 picture = gst_vaapi_picture_h264_new_field(first_field);
3453 GST_ERROR("failed to allocate field picture");
3454 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3458 /* Create new picture */
3459 picture = gst_vaapi_picture_h264_new(decoder);
3461 GST_ERROR("failed to allocate picture");
3462 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3465 gst_vaapi_picture_replace(&priv->current_picture, picture);
3466 gst_vaapi_picture_unref(picture);
3468 /* Clear inter-view references list if this is the primary coded
3469 picture of the current access unit */
3470 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3471 g_ptr_array_set_size(priv->inter_views, 0);
3473 /* Update cropping rectangle */
3474 if (sps->frame_cropping_flag) {
3475 GstVaapiRectangle crop_rect;
3476 crop_rect.x = sps->crop_rect_x;
3477 crop_rect.y = sps->crop_rect_y;
3478 crop_rect.width = sps->crop_rect_width;
3479 crop_rect.height = sps->crop_rect_height;
3480 gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3483 status = ensure_quant_matrix(decoder, picture);
3484 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3485 GST_ERROR("failed to reset quantizer matrix");
3489 if (!init_picture(decoder, picture, pi))
3490 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3491 if (!fill_picture(decoder, picture))
3492 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3494 priv->decoder_state = pi->state;
3495 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3499 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3503 epb_count = slice_hdr->n_emulation_prevention_bytes;
3504 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3508 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3509 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3511 VASliceParameterBufferH264 * const slice_param = slice->param;
3512 GstH264PPS * const pps = get_pps(decoder);
3513 GstH264SPS * const sps = get_sps(decoder);
3514 GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3515 guint num_weight_tables = 0;
3518 if (pps->weighted_pred_flag &&
3519 (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3520 num_weight_tables = 1;
3521 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3522 num_weight_tables = 2;
3524 num_weight_tables = 0;
3526 slice_param->luma_log2_weight_denom = 0;
3527 slice_param->chroma_log2_weight_denom = 0;
3528 slice_param->luma_weight_l0_flag = 0;
3529 slice_param->chroma_weight_l0_flag = 0;
3530 slice_param->luma_weight_l1_flag = 0;
3531 slice_param->chroma_weight_l1_flag = 0;
3533 if (num_weight_tables < 1)
3536 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
3537 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3539 slice_param->luma_weight_l0_flag = 1;
3540 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3541 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3542 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3545 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3546 if (slice_param->chroma_weight_l0_flag) {
3547 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3548 for (j = 0; j < 2; j++) {
3549 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3550 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3555 if (num_weight_tables < 2)
3558 slice_param->luma_weight_l1_flag = 1;
3559 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3560 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3561 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3564 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3565 if (slice_param->chroma_weight_l1_flag) {
3566 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3567 for (j = 0; j < 2; j++) {
3568 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3569 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3577 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3578 GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3580 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3581 VASliceParameterBufferH264 * const slice_param = slice->param;
3582 guint i, num_ref_lists = 0;
3584 slice_param->num_ref_idx_l0_active_minus1 = 0;
3585 slice_param->num_ref_idx_l1_active_minus1 = 0;
3587 if (GST_H264_IS_B_SLICE(slice_hdr))
3589 else if (GST_H264_IS_I_SLICE(slice_hdr))
3594 if (num_ref_lists < 1)
3597 slice_param->num_ref_idx_l0_active_minus1 =
3598 slice_hdr->num_ref_idx_l0_active_minus1;
3600 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3601 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3602 priv->RefPicList0[i]);
3603 for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3604 vaapi_init_picture(&slice_param->RefPicList0[i]);
3606 if (num_ref_lists < 2)
3609 slice_param->num_ref_idx_l1_active_minus1 =
3610 slice_hdr->num_ref_idx_l1_active_minus1;
3612 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3613 vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3614 priv->RefPicList1[i]);
3615 for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3616 vaapi_init_picture(&slice_param->RefPicList1[i]);
3621 fill_slice(GstVaapiDecoderH264 *decoder,
3622 GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3624 VASliceParameterBufferH264 * const slice_param = slice->param;
3625 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3627 /* Fill in VASliceParameterBufferH264 */
3628 slice_param->slice_data_bit_offset =
3629 get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3630 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
3631 slice_param->slice_type = slice_hdr->type % 5;
3632 slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
3633 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
3634 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
3635 slice_param->disable_deblocking_filter_idc = slice_hdr->disable_deblocking_filter_idc;
3636 slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
3637 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
3639 if (!fill_RefPicList(decoder, slice, slice_hdr))
3641 if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3646 static GstVaapiDecoderStatus
3647 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3649 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3650 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3651 GstVaapiPictureH264 * const picture = priv->current_picture;
3652 GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3653 GstVaapiSlice *slice;
3654 GstBuffer * const buffer =
3655 GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3656 GstMapInfo map_info;
3658 GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3660 if (!is_valid_state(pi->state,
3661 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3662 GST_WARNING("failed to receive enough headers to decode slice");
3663 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3666 if (!ensure_pps(decoder, slice_hdr->pps)) {
3667 GST_ERROR("failed to activate PPS");
3668 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3671 if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3672 GST_ERROR("failed to activate SPS");
3673 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3676 if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3677 GST_ERROR("failed to map buffer");
3678 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3681 /* Check wether this is the first/last slice in the current access unit */
3682 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3683 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3684 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3685 GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3687 slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3688 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3689 gst_buffer_unmap(buffer, &map_info);
3691 GST_ERROR("failed to allocate slice");
3692 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3695 init_picture_refs(decoder, picture, slice_hdr);
3696 if (!fill_slice(decoder, slice, pi)) {
3697 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3698 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3701 gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3702 picture->last_slice_hdr = slice_hdr;
3703 priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3704 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3708 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3710 return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3711 0xffffff00, 0x00000100,
3716 static GstVaapiDecoderStatus
3717 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3719 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3720 GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3721 GstVaapiDecoderStatus status;
3723 priv->decoder_state |= pi->state;
3724 switch (pi->nalu.type) {
3725 case GST_H264_NAL_SPS:
3726 status = decode_sps(decoder, unit);
3728 case GST_H264_NAL_SUBSET_SPS:
3729 status = decode_subset_sps(decoder, unit);
3731 case GST_H264_NAL_PPS:
3732 status = decode_pps(decoder, unit);
3734 case GST_H264_NAL_SLICE_EXT:
3735 case GST_H264_NAL_SLICE_IDR:
3736 /* fall-through. IDR specifics are handled in init_picture() */
3737 case GST_H264_NAL_SLICE:
3738 status = decode_slice(decoder, unit);
3740 case GST_H264_NAL_SEQ_END:
3741 case GST_H264_NAL_STREAM_END:
3742 status = decode_sequence_end(decoder);
3744 case GST_H264_NAL_SEI:
3745 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3748 GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3749 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3755 static GstVaapiDecoderStatus
3756 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3757 const guchar *buf, guint buf_size)
3759 GstVaapiDecoderH264 * const decoder =
3760 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3761 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3762 GstVaapiDecoderStatus status;
3763 GstVaapiDecoderUnit unit;
3764 GstVaapiParserInfoH264 *pi = NULL;
3765 GstH264ParserResult result;
3766 guint i, ofs, num_sps, num_pps;
3768 unit.parsed_info = NULL;
3771 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3774 GST_ERROR("failed to decode codec-data, not in avcC format");
3775 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3778 priv->nal_length_size = (buf[4] & 0x03) + 1;
3780 num_sps = buf[5] & 0x1f;
3783 for (i = 0; i < num_sps; i++) {
3784 pi = gst_vaapi_parser_info_h264_new();
3786 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3787 unit.parsed_info = pi;
3789 result = gst_h264_parser_identify_nalu_avc(
3791 buf, ofs, buf_size, 2,
3794 if (result != GST_H264_PARSER_OK) {
3795 status = get_status(result);
3799 status = parse_sps(decoder, &unit);
3800 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3802 ofs = pi->nalu.offset + pi->nalu.size;
3804 status = decode_sps(decoder, &unit);
3805 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3807 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3813 for (i = 0; i < num_pps; i++) {
3814 pi = gst_vaapi_parser_info_h264_new();
3816 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3817 unit.parsed_info = pi;
3819 result = gst_h264_parser_identify_nalu_avc(
3821 buf, ofs, buf_size, 2,
3824 if (result != GST_H264_PARSER_OK) {
3825 status = get_status(result);
3829 status = parse_pps(decoder, &unit);
3830 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3832 ofs = pi->nalu.offset + pi->nalu.size;
3834 status = decode_pps(decoder, &unit);
3835 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3837 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3840 priv->is_avcC = TRUE;
3841 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3844 gst_vaapi_parser_info_h264_replace(&pi, NULL);
3848 static GstVaapiDecoderStatus
3849 ensure_decoder(GstVaapiDecoderH264 *decoder)
3851 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3852 GstVaapiDecoderStatus status;
3854 if (!priv->is_opened) {
3855 priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3856 if (!priv->is_opened)
3857 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3859 status = gst_vaapi_decoder_decode_codec_data(
3860 GST_VAAPI_DECODER_CAST(decoder));
3861 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3864 return GST_VAAPI_DECODER_STATUS_SUCCESS;
3867 static GstVaapiDecoderStatus
3868 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3869 GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3871 GstVaapiDecoderH264 * const decoder =
3872 GST_VAAPI_DECODER_H264_CAST(base_decoder);
3873 GstVaapiDecoderH264Private * const priv = &decoder->priv;
3874 GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3875 GstVaapiParserInfoH264 *pi;
3876 GstVaapiDecoderStatus status;
3877 GstH264ParserResult result;
3879 guint i, size, buf_size, nalu_size, flags;
3883 status = ensure_decoder(decoder);
3884 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3887 switch (priv->stream_alignment) {
3888 case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3889 size = gst_adapter_available_fast(adapter);
3892 size = gst_adapter_available(adapter);
3896 if (priv->is_avcC) {
3897 if (size < priv->nal_length_size)
3898 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3900 buf = (guchar *)&start_code;
3901 g_assert(priv->nal_length_size <= sizeof(start_code));
3902 gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3905 for (i = 0; i < priv->nal_length_size; i++)
3906 nalu_size = (nalu_size << 8) | buf[i];
3908 buf_size = priv->nal_length_size + nalu_size;
3909 if (size < buf_size)
3910 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3914 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3916 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3919 ofs = scan_for_start_code(adapter, 0, size, NULL);
3921 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3924 gst_adapter_flush(adapter, ofs);
3928 ofs2 = ps->input_offset2 - ofs - 4;
3932 ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3933 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3935 // Assume the whole NAL unit is present if end-of-stream
3937 ps->input_offset2 = size;
3938 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3945 ps->input_offset2 = 0;
3947 buf = (guchar *)gst_adapter_map(adapter, buf_size);
3949 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3951 unit->size = buf_size;
3953 pi = gst_vaapi_parser_info_h264_new();
3955 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3957 gst_vaapi_decoder_unit_set_parsed_info(unit,
3958 pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3961 result = gst_h264_parser_identify_nalu_avc(priv->parser,
3962 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3964 result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3965 buf, 0, buf_size, &pi->nalu);
3966 status = get_status(result);
3967 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3970 switch (pi->nalu.type) {
3971 case GST_H264_NAL_SPS:
3972 status = parse_sps(decoder, unit);
3974 case GST_H264_NAL_SUBSET_SPS:
3975 status = parse_subset_sps(decoder, unit);
3977 case GST_H264_NAL_PPS:
3978 status = parse_pps(decoder, unit);
3980 case GST_H264_NAL_SEI:
3981 status = parse_sei(decoder, unit);
3983 case GST_H264_NAL_SLICE_EXT:
3984 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3985 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3989 case GST_H264_NAL_SLICE_IDR:
3990 case GST_H264_NAL_SLICE:
3991 status = parse_slice(decoder, unit);
3994 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3997 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4001 switch (pi->nalu.type) {
4002 case GST_H264_NAL_AU_DELIMITER:
4003 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4004 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4006 case GST_H264_NAL_FILLER_DATA:
4007 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4009 case GST_H264_NAL_STREAM_END:
4010 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4012 case GST_H264_NAL_SEQ_END:
4013 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4014 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4016 case GST_H264_NAL_SPS:
4017 case GST_H264_NAL_SUBSET_SPS:
4018 case GST_H264_NAL_PPS:
4019 case GST_H264_NAL_SEI:
4020 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4021 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4023 case GST_H264_NAL_SLICE_EXT:
4024 if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4025 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4029 case GST_H264_NAL_SLICE_IDR:
4030 case GST_H264_NAL_SLICE:
4031 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4032 if (is_new_picture(pi, priv->prev_slice_pi)) {
4033 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4034 if (is_new_access_unit(pi, priv->prev_slice_pi))
4035 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4037 gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4039 case GST_H264_NAL_SPS_EXT:
4040 case GST_H264_NAL_SLICE_AUX:
4041 /* skip SPS extension and auxiliary slice for now */
4042 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4044 case GST_H264_NAL_PREFIX_UNIT:
4045 /* skip Prefix NAL units for now */
4046 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4047 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4048 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4051 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4052 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4053 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4056 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4057 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4058 GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4060 pi->nalu.data = NULL;
4061 pi->state = priv->parser_state;
4063 gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4064 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4067 static GstVaapiDecoderStatus
4068 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4069 GstVaapiDecoderUnit *unit)
4071 GstVaapiDecoderH264 * const decoder =
4072 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4073 GstVaapiDecoderStatus status;
4075 status = ensure_decoder(decoder);
4076 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4078 return decode_unit(decoder, unit);
4081 static GstVaapiDecoderStatus
4082 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4083 GstVaapiDecoderUnit *unit)
4085 GstVaapiDecoderH264 * const decoder =
4086 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4088 return decode_picture(decoder, unit);
4091 static GstVaapiDecoderStatus
4092 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4094 GstVaapiDecoderH264 * const decoder =
4095 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4097 return decode_current_picture(decoder);
4100 static GstVaapiDecoderStatus
4101 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4103 GstVaapiDecoderH264 * const decoder =
4104 GST_VAAPI_DECODER_H264_CAST(base_decoder);
4106 dpb_flush(decoder, NULL);
4107 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4111 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4113 GstVaapiMiniObjectClass * const object_class =
4114 GST_VAAPI_MINI_OBJECT_CLASS(klass);
4115 GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4117 object_class->size = sizeof(GstVaapiDecoderH264);
4118 object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
4120 decoder_class->create = gst_vaapi_decoder_h264_create;
4121 decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
4122 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4123 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4124 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4125 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4126 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4128 decoder_class->decode_codec_data =
4129 gst_vaapi_decoder_h264_decode_codec_data;
4132 static inline const GstVaapiDecoderClass *
4133 gst_vaapi_decoder_h264_class(void)
4135 static GstVaapiDecoderH264Class g_class;
4136 static gsize g_class_init = FALSE;
4138 if (g_once_init_enter(&g_class_init)) {
4139 gst_vaapi_decoder_h264_class_init(&g_class);
4140 g_once_init_leave(&g_class_init, TRUE);
4142 return GST_VAAPI_DECODER_CLASS(&g_class);
4146 * gst_vaapi_decoder_h264_set_alignment:
4147 * @decoder: a #GstVaapiDecoderH264
4148 * @alignment: the #GstVaapiStreamAlignH264
4150 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4151 * of each buffer that is supplied to the decoder. This could be no
4152 * specific alignment, NAL unit boundaries, or access unit boundaries.
4155 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4156 GstVaapiStreamAlignH264 alignment)
4158 g_return_if_fail(decoder != NULL);
4160 decoder->priv.stream_alignment = alignment;
4164 * gst_vaapi_decoder_h264_new:
4165 * @display: a #GstVaapiDisplay
4166 * @caps: a #GstCaps holding codec information
4168 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
4169 * hold extra information like codec-data and pictured coded size.
4171 * Return value: the newly allocated #GstVaapiDecoder object
4174 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4176 return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);