2 * gstvaapidecoder_h264.c - H.264 decoder
4 * Copyright (C) 2011-2014 Intel Corporation
5 * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public License
9 * as published by the Free Software Foundation; either version 2.1
10 * of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free
19 * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20 * Boston, MA 02110-1301 USA
24 * SECTION:gstvaapidecoder_h264
25 * @short_description: H.264 decoder
29 #include <gst/base/gstadapter.h>
30 #include <gst/codecparsers/gsth264parser.h>
31 #include "gstvaapidecoder_h264.h"
32 #include "gstvaapidecoder_objects.h"
33 #include "gstvaapidecoder_priv.h"
34 #include "gstvaapidisplay_priv.h"
35 #include "gstvaapiutils_h264_priv.h"
38 #include "gstvaapidebug.h"
40 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
41 #define USE_STRICT_DPB_ORDERING 0
43 typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
44 typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
45 typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
46 typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
47 typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
48 typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
49 typedef struct _GstVaapiStereo3DInfo GstVaapiStereo3DInfo;
51 // Used for field_poc[]
53 #define BOTTOM_FIELD 1
55 /* ------------------------------------------------------------------------- */
56 /* --- H.264 Parser Info --- */
57 /* ------------------------------------------------------------------------- */
60 * Extended decoder unit flags:
62 * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
63 * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
67 /* This flag does not strictly follow the definitions (7.4.1.2.3)
68 for detecting the start of an access unit as we are only
69 interested in knowing if the current slice is the first one or
70 the last one in the current access unit */
71 GST_VAAPI_DECODER_UNIT_FLAG_AU_START =
72 (GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73 GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75 GST_VAAPI_DECODER_UNIT_FLAGS_AU = (GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
76 GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 #define GST_VAAPI_PARSER_INFO_H264(obj) \
80 ((GstVaapiParserInfoH264 *)(obj))
82 struct _GstVaapiParserInfoH264
84 GstVaapiMiniObject parent_instance;
91 GstH264SliceHdr slice_hdr;
94 guint flags; // Same as decoder unit flags (persistent)
95 guint view_id; // View ID of slice
96 guint voc; // View order index (VOIdx) of slice
100 gst_vaapi_parser_info_h264_finalize (GstVaapiParserInfoH264 * pi)
105 switch (pi->nalu.type) {
106 case GST_H264_NAL_SPS:
107 case GST_H264_NAL_SUBSET_SPS:
108 gst_h264_sps_clear (&pi->data.sps);
110 case GST_H264_NAL_PPS:
111 gst_h264_pps_clear (&pi->data.pps);
113 case GST_H264_NAL_SEI:
115 g_array_unref (pi->data.sei);
122 static inline const GstVaapiMiniObjectClass *
123 gst_vaapi_parser_info_h264_class (void)
125 static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
126 .size = sizeof (GstVaapiParserInfoH264),
127 .finalize = (GDestroyNotify) gst_vaapi_parser_info_h264_finalize
129 return &GstVaapiParserInfoH264Class;
132 static inline GstVaapiParserInfoH264 *
133 gst_vaapi_parser_info_h264_new (void)
135 return (GstVaapiParserInfoH264 *)
136 gst_vaapi_mini_object_new (gst_vaapi_parser_info_h264_class ());
139 #define gst_vaapi_parser_info_h264_ref(pi) \
140 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
142 #define gst_vaapi_parser_info_h264_unref(pi) \
143 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
145 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
146 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
147 (GstVaapiMiniObject *)(new_pi))
149 /* ------------------------------------------------------------------------- */
150 /* --- H.264 Pictures --- */
151 /* ------------------------------------------------------------------------- */
154 * Extended picture flags:
156 * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
157 * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
158 * may be used for inter-view prediction
159 * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
160 * i.e. a picture that is decoded with only inter-view prediction,
161 * and not inter prediction
162 * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
164 * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
166 * @GST_VAAPI_PICTURE_FLAG_GHOST: flag that specifies a "non-existing"
167 * picture, without any viable GstVideoCodecFrame associated to it.
168 * i.e. a dummy picture with some valid contents
169 * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
170 * "used for short-term reference"
171 * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
172 * "used for long-term reference"
173 * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
174 * reference picture (short-term reference or long-term reference)
178 GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
179 GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
180 GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
181 GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
182 GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
183 GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
184 GST_VAAPI_PICTURE_FLAG_GHOST = (GST_VAAPI_PICTURE_FLAG_LAST << 6),
186 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE =
187 (GST_VAAPI_PICTURE_FLAG_REFERENCE),
188 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE =
189 (GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
190 GST_VAAPI_PICTURE_FLAGS_REFERENCE =
191 (GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
192 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
195 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
196 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
198 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
199 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
200 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
201 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
203 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
204 ((GST_VAAPI_PICTURE_FLAGS(picture) & \
205 GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
206 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
208 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
209 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
211 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
212 (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
214 #define GST_VAAPI_PICTURE_H264(picture) \
215 ((GstVaapiPictureH264 *)(picture))
217 struct _GstVaapiPictureH264
219 GstVaapiPicture base;
220 GstH264SliceHdr *last_slice_hdr;
223 gint32 frame_num; // Original frame_num from slice_header()
224 gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
225 gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
226 gint32 pic_num; // Temporary for ref pic marking: PicNum
227 gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
228 GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
230 guint output_needed:1;
233 GST_VAAPI_CODEC_DEFINE_TYPE (GstVaapiPictureH264, gst_vaapi_picture_h264);
236 gst_vaapi_picture_h264_destroy (GstVaapiPictureH264 * picture)
238 gst_vaapi_picture_destroy (GST_VAAPI_PICTURE (picture));
242 gst_vaapi_picture_h264_create (GstVaapiPictureH264 * picture,
243 const GstVaapiCodecObjectConstructorArgs * args)
245 if (!gst_vaapi_picture_create (GST_VAAPI_PICTURE (picture), args))
248 picture->structure = picture->base.structure;
249 picture->field_poc[0] = G_MAXINT32;
250 picture->field_poc[1] = G_MAXINT32;
251 picture->output_needed = FALSE;
255 static inline GstVaapiPictureH264 *
256 gst_vaapi_picture_h264_new (GstVaapiDecoderH264 * decoder)
258 return (GstVaapiPictureH264 *)
259 gst_vaapi_codec_object_new (&GstVaapiPictureH264Class,
260 GST_VAAPI_CODEC_BASE (decoder), NULL,
261 sizeof (VAPictureParameterBufferH264), NULL, 0, 0);
265 gst_vaapi_picture_h264_set_reference (GstVaapiPictureH264 * picture,
266 guint reference_flags, gboolean other_field)
270 GST_VAAPI_PICTURE_FLAG_UNSET (picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
271 GST_VAAPI_PICTURE_FLAG_SET (picture, reference_flags);
273 if (!other_field || !(picture = picture->other_field))
275 GST_VAAPI_PICTURE_FLAG_UNSET (picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
276 GST_VAAPI_PICTURE_FLAG_SET (picture, reference_flags);
279 static inline GstVaapiPictureH264 *
280 gst_vaapi_picture_h264_new_field (GstVaapiPictureH264 * picture)
282 g_return_val_if_fail (picture, NULL);
284 return (GstVaapiPictureH264 *) gst_vaapi_picture_new_field (&picture->base);
287 static inline GstVaapiPictureH264 *
288 gst_vaapi_picture_h264_new_clone (GstVaapiPictureH264 * picture)
290 g_return_val_if_fail (picture, NULL);
292 return (GstVaapiPictureH264 *) gst_vaapi_picture_new_clone (&picture->base);
295 /* ------------------------------------------------------------------------- */
296 /* --- Frame Buffers (DPB) --- */
297 /* ------------------------------------------------------------------------- */
299 struct _GstVaapiFrameStore
302 GstVaapiMiniObject parent_instance;
306 GstVaapiPictureH264 *buffers[2];
313 gst_vaapi_frame_store_finalize (gpointer object)
315 GstVaapiFrameStore *const fs = object;
318 for (i = 0; i < fs->num_buffers; i++)
319 gst_vaapi_picture_replace (&fs->buffers[i], NULL);
322 static GstVaapiFrameStore *
323 gst_vaapi_frame_store_new (GstVaapiPictureH264 * picture)
325 GstVaapiFrameStore *fs;
327 static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
328 sizeof (GstVaapiFrameStore),
329 gst_vaapi_frame_store_finalize
332 fs = (GstVaapiFrameStore *)
333 gst_vaapi_mini_object_new (&GstVaapiFrameStoreClass);
337 fs->view_id = picture->base.view_id;
338 fs->structure = picture->structure;
339 fs->buffers[0] = gst_vaapi_picture_ref (picture);
340 fs->buffers[1] = NULL;
342 fs->output_needed = 0;
343 fs->output_called = 0;
345 if (picture->output_flag) {
346 picture->output_needed = TRUE;
353 gst_vaapi_frame_store_add (GstVaapiFrameStore * fs,
354 GstVaapiPictureH264 * picture)
358 g_return_val_if_fail (fs->num_buffers == 1, FALSE);
359 g_return_val_if_fail (!GST_VAAPI_PICTURE_IS_FRAME (picture), FALSE);
360 g_return_val_if_fail (!GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture), FALSE);
362 gst_vaapi_picture_replace (&fs->buffers[fs->num_buffers++], picture);
363 if (picture->output_flag) {
364 picture->output_needed = TRUE;
368 fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
370 field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
371 TOP_FIELD : BOTTOM_FIELD;
373 if (fs->buffers[0]->field_poc[field] != G_MAXINT32)
375 fs->buffers[0]->field_poc[field] = picture->field_poc[field];
377 if (picture->field_poc[!field] != G_MAXINT32)
379 picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
385 gst_vaapi_frame_store_split_fields (GstVaapiFrameStore * fs, gboolean tff)
387 GstVaapiPictureH264 *const first_field = fs->buffers[0];
388 GstVaapiPictureH264 *second_field;
390 g_return_val_if_fail (fs->num_buffers == 1, FALSE);
392 first_field->base.structure = tff ?
393 GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD :
394 GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
395 GST_VAAPI_PICTURE_FLAG_SET (first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
397 second_field = gst_vaapi_picture_h264_new_field (first_field);
400 gst_vaapi_picture_h264_set_reference (second_field,
401 GST_VAAPI_PICTURE_FLAGS (first_field) & GST_VAAPI_PICTURE_FLAGS_REFERENCE,
403 gst_vaapi_picture_replace (&fs->buffers[fs->num_buffers++], second_field);
404 gst_vaapi_picture_unref (second_field);
406 second_field->frame_num = first_field->frame_num;
407 second_field->field_poc[0] = first_field->field_poc[0];
408 second_field->field_poc[1] = first_field->field_poc[1];
409 second_field->output_flag = first_field->output_flag;
410 if (second_field->output_flag) {
411 second_field->output_needed = TRUE;
417 static inline gboolean
418 gst_vaapi_frame_store_has_frame (GstVaapiFrameStore * fs)
420 return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
423 static inline gboolean
424 gst_vaapi_frame_store_is_complete (GstVaapiFrameStore * fs)
426 return gst_vaapi_frame_store_has_frame (fs) ||
427 GST_VAAPI_PICTURE_IS_ONEFIELD (fs->buffers[0]);
430 static inline gboolean
431 gst_vaapi_frame_store_has_reference (GstVaapiFrameStore * fs)
435 for (i = 0; i < fs->num_buffers; i++) {
436 if (GST_VAAPI_PICTURE_IS_REFERENCE (fs->buffers[i]))
443 gst_vaapi_frame_store_has_inter_view (GstVaapiFrameStore * fs)
447 for (i = 0; i < fs->num_buffers; i++) {
448 if (GST_VAAPI_PICTURE_IS_INTER_VIEW (fs->buffers[i]))
454 #define gst_vaapi_frame_store_ref(fs) \
455 gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
457 #define gst_vaapi_frame_store_unref(fs) \
458 gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
460 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
461 gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
462 (GstVaapiMiniObject *)(new_fs))
464 /* ------------------------------------------------------------------------- */
465 /* --- H.264 3D Info --- */
466 /* ------------------------------------------------------------------------- */
468 * GstVaapiStereo3DInfo:
469 * @mode: the #GstVideoMultiviewMode.
470 * @flags: the #GstVideoMultiviewFlags.
471 * @id: the id number.
472 * @repetition_period: 0 means once, 1 means always, >1 compare with poc.
474 struct _GstVaapiStereo3DInfo
476 GstVideoMultiviewMode mode;
477 GstVideoMultiviewFlags flags;
479 guint repetition_period;
482 /* ------------------------------------------------------------------------- */
483 /* --- H.264 Decoder --- */
484 /* ------------------------------------------------------------------------- */
486 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
487 ((GstVaapiDecoderH264 *)(decoder))
491 GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
492 GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
493 GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
494 GST_H264_VIDEO_STATE_GOT_I_FRAME = 1 << 3, // persistent across SPS
495 GST_H264_VIDEO_STATE_GOT_P_SLICE = 1 << 4, // predictive (all non-intra)
497 GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (GST_H264_VIDEO_STATE_GOT_SPS |
498 GST_H264_VIDEO_STATE_GOT_PPS),
499 GST_H264_VIDEO_STATE_VALID_PICTURE =
500 (GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
501 GST_H264_VIDEO_STATE_GOT_SLICE)
504 struct _GstVaapiDecoderH264Private
506 GstH264NalParser *parser;
509 GstVaapiStreamAlignH264 stream_alignment;
510 GstVaapiPictureH264 *current_picture;
511 GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
512 GstVaapiParserInfoH264 *active_sps;
513 GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
514 GstVaapiParserInfoH264 *active_pps;
515 GstVaapiParserInfoH264 *prev_pi;
516 GstVaapiParserInfoH264 *prev_slice_pi;
517 GstVaapiFrameStore **prev_ref_frames;
518 GstVaapiFrameStore **prev_frames;
519 guint prev_frames_alloc;
520 GstVaapiFrameStore **dpb;
525 GstVaapiProfile profile;
526 GstVaapiEntrypoint entrypoint;
527 GstVaapiChromaType chroma_type;
528 GPtrArray *inter_views;
529 GstVaapiPictureH264 *short_ref[32];
530 guint short_ref_count;
531 GstVaapiPictureH264 *long_ref[32];
532 guint long_ref_count;
533 GstVaapiPictureH264 *RefPicList0[32];
534 guint RefPicList0_count;
535 GstVaapiPictureH264 *RefPicList1[32];
536 guint RefPicList1_count;
537 guint nal_length_size;
540 guint pic_structure; // pic_struct (from SEI pic_timing() or inferred)
541 gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
542 gint32 poc_msb; // PicOrderCntMsb
543 gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
544 gint32 prev_poc_msb; // prevPicOrderCntMsb
545 gint32 prev_poc_lsb; // prevPicOrderCntLsb
546 gint32 frame_num_offset; // FrameNumOffset
547 gint32 frame_num; // frame_num (from slice_header())
548 gint32 prev_frame_num; // prevFrameNum
549 gint32 prev_ref_frame_num; // prevRefFrameNum
550 gboolean prev_pic_has_mmco5; // prevMmco5Pic
551 gboolean prev_pic_reference; // previous picture is a reference
552 guint prev_pic_structure; // previous picture structure
556 guint progressive_sequence:1;
557 guint top_field_first:1;
559 gboolean force_low_latency;
562 GstVaapiStereo3DInfo stereo_info;
566 * GstVaapiDecoderH264:
568 * A decoder based on H264.
570 struct _GstVaapiDecoderH264
573 GstVaapiDecoder parent_instance;
574 GstVaapiDecoderH264Private priv;
578 * GstVaapiDecoderH264Class:
580 * A decoder class based on H264.
582 struct _GstVaapiDecoderH264Class
585 GstVaapiDecoderClass parent_class;
588 G_DEFINE_TYPE (GstVaapiDecoderH264, gst_vaapi_decoder_h264,
589 GST_TYPE_VAAPI_DECODER);
592 exec_ref_pic_marking (GstVaapiDecoderH264 * decoder,
593 GstVaapiPictureH264 * picture);
596 exec_ref_pic_marking_sliding_window (GstVaapiDecoderH264 * decoder);
599 is_inter_view_reference_for_next_pictures (GstVaapiDecoderH264 * decoder,
600 GstVaapiPictureH264 * picture);
602 static inline gboolean
603 is_inter_view_reference_for_next_frames (GstVaapiDecoderH264 * decoder,
604 GstVaapiFrameStore * fs)
606 return is_inter_view_reference_for_next_pictures (decoder, fs->buffers[0]);
609 /* Determines if the supplied profile is one of the MVC set */
611 is_mvc_profile (GstH264Profile profile)
613 return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
614 profile == GST_H264_PROFILE_STEREO_HIGH;
617 /* Determines the view_id from the supplied NAL unit */
619 get_view_id (GstH264NalUnit * nalu)
621 return GST_H264_IS_MVC_NALU (nalu) ? nalu->extension.mvc.view_id : 0;
624 /* Determines the view order index (VOIdx) from the supplied view_id */
626 get_view_order_index (GstH264SPS * sps, guint16 view_id)
628 GstH264SPSExtMVC *mvc;
631 if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
634 mvc = &sps->extension.mvc;
635 for (i = 0; i <= mvc->num_views_minus1; i++) {
636 if (mvc->view[i].view_id == view_id)
639 GST_ERROR ("failed to find VOIdx from view_id (%d)", view_id);
643 /* Determines NumViews */
645 get_num_views (GstH264SPS * sps)
647 return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
648 sps->extension.mvc.num_views_minus1 : 0);
651 /* Get number of reference frames to use */
653 get_max_dec_frame_buffering (GstH264SPS * sps)
655 guint num_views, max_dpb_frames;
656 guint max_dec_frame_buffering, PicSizeMbs;
657 GstVaapiLevelH264 level;
658 const GstVaapiH264LevelLimits *level_limits;
660 /* Table A-1 - Level limits */
661 if (G_UNLIKELY (sps->level_idc == 11 && sps->constraint_set3_flag))
662 level = GST_VAAPI_LEVEL_H264_L1b;
664 level = gst_vaapi_utils_h264_get_level (sps->level_idc);
665 level_limits = gst_vaapi_utils_h264_get_level_limits (level);
666 if (G_UNLIKELY (!level_limits)) {
667 GST_FIXME ("unsupported level_idc value (%d)", sps->level_idc);
668 max_dec_frame_buffering = 16;
670 PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
671 (sps->pic_height_in_map_units_minus1 + 1) *
672 (sps->frame_mbs_only_flag ? 1 : 2));
673 max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
675 if (is_mvc_profile (sps->profile_idc))
676 max_dec_frame_buffering <<= 1;
679 if (sps->vui_parameters_present_flag) {
680 GstH264VUIParams *const vui_params = &sps->vui_parameters;
681 if (vui_params->bitstream_restriction_flag)
682 max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
684 switch (sps->profile_idc) {
685 case 44: // CAVLC 4:4:4 Intra profile
686 case GST_H264_PROFILE_SCALABLE_HIGH:
687 case GST_H264_PROFILE_HIGH:
688 case GST_H264_PROFILE_HIGH10:
689 case GST_H264_PROFILE_HIGH_422:
690 case GST_H264_PROFILE_HIGH_444:
691 if (sps->constraint_set3_flag)
692 max_dec_frame_buffering = 0;
698 num_views = get_num_views (sps);
699 max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage (num_views - 1) : 1);
700 if (max_dec_frame_buffering > max_dpb_frames)
701 max_dec_frame_buffering = max_dpb_frames;
702 else if (max_dec_frame_buffering < sps->num_ref_frames)
703 max_dec_frame_buffering = sps->num_ref_frames;
704 return MAX (1, max_dec_frame_buffering);
708 array_remove_index_fast (void *array, guint * array_length_ptr, guint index)
710 gpointer *const entries = array;
711 guint num_entries = *array_length_ptr;
713 g_return_if_fail (index < num_entries);
715 if (index != --num_entries)
716 entries[index] = entries[num_entries];
717 entries[num_entries] = NULL;
718 *array_length_ptr = num_entries;
723 array_remove_index (void *array, guint * array_length_ptr, guint index)
725 array_remove_index_fast (array, array_length_ptr, index);
729 array_remove_index (void *array, guint * array_length_ptr, guint index)
731 gpointer *const entries = array;
732 const guint num_entries = *array_length_ptr - 1;
735 g_return_if_fail (index <= num_entries);
737 for (i = index; i < num_entries; i++)
738 entries[i] = entries[i + 1];
739 entries[num_entries] = NULL;
740 *array_length_ptr = num_entries;
744 #define ARRAY_REMOVE_INDEX(array, index) \
745 array_remove_index(array, &array##_count, index)
748 dpb_remove_index (GstVaapiDecoderH264 * decoder, guint index)
750 GstVaapiDecoderH264Private *const priv = &decoder->priv;
751 guint i, num_frames = --priv->dpb_count;
753 if (USE_STRICT_DPB_ORDERING) {
754 for (i = index; i < num_frames; i++)
755 gst_vaapi_frame_store_replace (&priv->dpb[i], priv->dpb[i + 1]);
756 } else if (index != num_frames)
757 gst_vaapi_frame_store_replace (&priv->dpb[index], priv->dpb[num_frames]);
758 gst_vaapi_frame_store_replace (&priv->dpb[num_frames], NULL);
762 dpb_output (GstVaapiDecoderH264 * decoder, GstVaapiFrameStore * fs)
764 GstVaapiPictureH264 *picture = NULL;
767 g_return_val_if_fail (fs != NULL, FALSE);
770 if (!gst_vaapi_frame_store_is_complete (fs))
773 for (i = 0; i < fs->num_buffers; i++) {
774 GstVaapiPictureH264 *const pic = fs->buffers[i];
777 pic->output_needed = FALSE;
778 if (!GST_VAAPI_PICTURE_FLAG_IS_SET (pic, GST_VAAPI_PICTURE_FLAG_GHOST))
782 fs->output_needed = 0;
783 fs->output_called = 0;
786 return gst_vaapi_picture_output (GST_VAAPI_PICTURE_CAST (picture));
790 dpb_evict (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture,
793 GstVaapiDecoderH264Private *const priv = &decoder->priv;
794 GstVaapiFrameStore *const fs = priv->dpb[i];
796 if (!fs->output_needed && !gst_vaapi_frame_store_has_reference (fs))
797 dpb_remove_index (decoder, i);
800 /* Finds the picture with the nearest previous POC and same structure */
802 dpb_find_nearest_prev_poc (GstVaapiDecoderH264 * decoder,
803 GstVaapiPictureH264 * picture, guint picture_structure,
804 GstVaapiPictureH264 ** found_picture_ptr)
806 GstVaapiDecoderH264Private *const priv = &decoder->priv;
807 GstVaapiPictureH264 *found_picture = NULL;
808 guint i, j, found_index = -1;
810 g_return_val_if_fail (picture != NULL, -1);
812 if (!picture_structure)
813 picture_structure = picture->base.structure;
815 for (i = 0; i < priv->dpb_count; i++) {
816 GstVaapiFrameStore *const fs = priv->dpb[i];
817 if (picture->base.view_id != fs->view_id)
819 for (j = 0; j < fs->num_buffers; j++) {
820 GstVaapiPictureH264 *const pic = fs->buffers[j];
821 if (pic->base.structure != picture_structure)
823 if (pic->base.poc >= picture->base.poc)
825 if (!found_picture || found_picture->base.poc < pic->base.poc)
826 found_picture = pic, found_index = i;
830 if (found_picture_ptr)
831 *found_picture_ptr = found_picture;
835 /* Finds the picture with the lowest POC that needs to be output */
837 dpb_find_lowest_poc_for_output (GstVaapiDecoderH264 * decoder,
838 GstVaapiPictureH264 * picture, GstVaapiPictureH264 ** found_picture_ptr,
839 gboolean * can_be_output)
841 GstVaapiDecoderH264Private *const priv = &decoder->priv;
842 GstVaapiPictureH264 *found_picture = NULL;
843 guint i, j, found_index = -1, found_poc = -1;
844 gboolean is_first = TRUE;
845 gint last_output_poc = -1;
847 for (i = 0; i < priv->dpb_count; i++) {
848 GstVaapiFrameStore *const fs = priv->dpb[i];
849 if (!fs->output_needed) {
850 /* find the maximum poc of any previously output frames that are
851 * still held in the DPB. */
852 if (can_be_output != NULL) {
853 for (j = 0; j < fs->num_buffers; j++) {
854 if (is_first || fs->buffers[j]->base.poc > last_output_poc) {
856 last_output_poc = fs->buffers[j]->base.poc;
862 if (picture && picture->base.view_id != fs->view_id)
864 for (j = 0; j < fs->num_buffers; j++) {
865 GstVaapiPictureH264 *const pic = fs->buffers[j];
866 if (!pic->output_needed)
868 if (!found_picture || found_picture->base.poc > pic->base.poc ||
869 (found_picture->base.poc == pic->base.poc &&
870 found_picture->base.voc > pic->base.voc))
871 found_picture = pic, found_index = i, found_poc = pic->base.poc;
875 if (can_be_output != NULL) {
876 /* found_picture can be output if it's the first frame in the DPB,
877 * or if there's no gap between it and the most recently output
879 *can_be_output = FALSE;
881 gst_vaapi_frame_store_is_complete (priv->dpb[found_index])) {
883 *can_be_output = TRUE;
884 } else if (((int) (found_poc)) > ((int) (last_output_poc))) {
885 *can_be_output = (found_poc - last_output_poc) <= 2;
887 /* A frame with a higher poc has already been sent. No choice
888 * now but to drop this frame */
889 GST_WARNING ("dropping out-of-sequence frame");
890 priv->dpb[found_index]->output_needed = FALSE;
895 if (found_picture_ptr)
896 *found_picture_ptr = found_picture;
900 /* Finds the picture with the lowest POC that needs to be output */
902 dpb_find_lowest_poc (GstVaapiDecoderH264 * decoder,
903 GstVaapiPictureH264 * picture, GstVaapiPictureH264 ** found_picture_ptr)
905 return dpb_find_lowest_poc_for_output (decoder, picture, found_picture_ptr,
910 /* Finds the picture with the lowest VOC that needs to be output */
912 dpb_find_lowest_voc (GstVaapiDecoderH264 * decoder,
913 GstVaapiPictureH264 * picture, GstVaapiPictureH264 ** found_picture_ptr)
915 GstVaapiDecoderH264Private *const priv = &decoder->priv;
916 GstVaapiPictureH264 *found_picture = NULL;
917 guint i, j, found_index = -1;
919 for (i = 0; i < priv->dpb_count; i++) {
920 GstVaapiFrameStore *const fs = priv->dpb[i];
921 if (!fs->output_needed || fs->view_id == picture->base.view_id)
923 for (j = 0; j < fs->num_buffers; j++) {
924 GstVaapiPictureH264 *const pic = fs->buffers[j];
925 if (!pic->output_needed || pic->base.poc != picture->base.poc)
927 if (!found_picture || found_picture->base.voc > pic->base.voc)
928 found_picture = pic, found_index = i;
932 if (found_picture_ptr)
933 *found_picture_ptr = found_picture;
938 dpb_output_other_views (GstVaapiDecoderH264 * decoder,
939 GstVaapiPictureH264 * picture, guint voc)
941 GstVaapiDecoderH264Private *const priv = &decoder->priv;
942 GstVaapiPictureH264 *found_picture;
946 if (priv->max_views == 1)
949 /* Emit all other view components that were in the same access
950 unit than the picture we have just found */
951 found_picture = picture;
953 found_index = dpb_find_lowest_voc (decoder, found_picture, &found_picture);
954 if (found_index < 0 || found_picture->base.voc >= voc)
956 success = dpb_output (decoder, priv->dpb[found_index]);
957 dpb_evict (decoder, found_picture, found_index);
965 dpb_bump (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
967 GstVaapiDecoderH264Private *const priv = &decoder->priv;
968 GstVaapiPictureH264 *found_picture;
972 found_index = dpb_find_lowest_poc (decoder, picture, &found_picture);
976 gst_vaapi_picture_ref (found_picture);
978 if (picture && picture->base.poc != found_picture->base.poc)
979 dpb_output_other_views (decoder, found_picture, found_picture->base.voc);
981 success = dpb_output (decoder, priv->dpb[found_index]);
983 dpb_evict (decoder, found_picture, found_index);
984 if (priv->max_views == 1)
987 if (picture && picture->base.poc != found_picture->base.poc)
988 dpb_output_other_views (decoder, found_picture, G_MAXUINT32);
991 gst_vaapi_picture_unref (found_picture);
996 dpb_output_ready_frames (GstVaapiDecoderH264 * decoder)
998 GstVaapiDecoderH264Private *const priv = &decoder->priv;
999 gboolean can_output = FALSE;
1003 found_index = dpb_find_lowest_poc_for_output (decoder,
1004 priv->current_picture, NULL, &can_output);
1005 if (found_index < 0 || !can_output)
1007 dpb_output (decoder, priv->dpb[found_index]);
1012 dpb_clear (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
1014 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1017 for (i = 0; i < priv->dpb_count; i++) {
1018 if (picture && picture->base.view_id != priv->dpb[i]->view_id)
1020 gst_vaapi_frame_store_replace (&priv->dpb[i], NULL);
1023 /* Compact the resulting DPB, i.e. remove holes */
1024 for (i = 0, n = 0; i < priv->dpb_count; i++) {
1027 priv->dpb[n] = priv->dpb[i];
1028 priv->dpb[i] = NULL;
1033 priv->dpb_count = n;
1035 /* Clear previous frame buffers only if this is a "flush-all" operation,
1036 or if the picture is the first one in the access unit */
1037 if (priv->prev_frames && (!picture ||
1038 GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
1039 GST_VAAPI_PICTURE_FLAG_AU_START))) {
1040 for (i = 0; i < priv->max_views; i++)
1041 gst_vaapi_frame_store_replace (&priv->prev_frames[i], NULL);
1044 /* Clear previous reference frame buffers only if this is a "flush-all"
1045 operation, or if the picture is part of an IDR NAL */
1046 if (priv->prev_ref_frames && (!picture ||
1047 GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
1048 GST_VAAPI_PICTURE_FLAG_IDR))) {
1049 for (i = 0; i < priv->max_views; i++)
1050 gst_vaapi_frame_store_replace (&priv->prev_ref_frames[i], NULL);
1055 dpb_flush (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
1057 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1060 /* Detect broken frames and mark them as having a single field if
1062 for (i = 0; i < priv->dpb_count; i++) {
1063 GstVaapiFrameStore *const fs = priv->dpb[i];
1064 if (!fs->output_needed || gst_vaapi_frame_store_is_complete (fs))
1066 GST_VAAPI_PICTURE_FLAG_SET (fs->buffers[0],
1067 GST_VAAPI_PICTURE_FLAG_ONEFIELD);
1070 /* Output any frame remaining in DPB */
1071 while (dpb_bump (decoder, picture));
1072 dpb_clear (decoder, picture);
1076 dpb_prune_mvc (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
1078 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1079 const gboolean is_last_picture = /* in the access unit */
1080 GST_VAAPI_PICTURE_FLAG_IS_SET (picture, GST_VAAPI_PICTURE_FLAG_AU_END);
1083 // Remove all unused inter-view only reference components of the current AU
1085 while (i < priv->dpb_count) {
1086 GstVaapiFrameStore *const fs = priv->dpb[i];
1087 if (fs->view_id != picture->base.view_id &&
1088 !fs->output_needed && !gst_vaapi_frame_store_has_reference (fs) &&
1090 !is_inter_view_reference_for_next_frames (decoder, fs)))
1091 dpb_remove_index (decoder, i);
1098 dpb_add (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
1100 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1101 GstVaapiFrameStore *fs;
1104 if (priv->max_views > 1)
1105 dpb_prune_mvc (decoder, picture);
1107 // Remove all unused pictures
1108 if (!GST_VAAPI_PICTURE_IS_IDR (picture)) {
1110 while (i < priv->dpb_count) {
1111 GstVaapiFrameStore *const fs = priv->dpb[i];
1112 if (fs->view_id == picture->base.view_id &&
1113 !fs->output_needed && !gst_vaapi_frame_store_has_reference (fs))
1114 dpb_remove_index (decoder, i);
1119 // Check if picture is the second field and the first field is still in DPB
1120 if (GST_VAAPI_PICTURE_IS_INTERLACED (picture) &&
1121 !GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture)) {
1122 fs = priv->prev_frames[picture->base.voc];
1123 if (!fs || &fs->buffers[0]->base != picture->base.parent_picture)
1125 if (!gst_vaapi_frame_store_add (fs, picture))
1128 if (fs->output_called)
1129 return dpb_output (decoder, fs);
1132 // Try to output the previous frame again if it was not submitted yet
1133 // e.g. delayed while waiting for the next field, or a field gap was closed
1134 fs = priv->prev_frames[picture->base.voc];
1135 if (fs && fs->output_called)
1136 dpb_output (decoder, fs);
1138 // Create new frame store, and split fields if necessary
1139 fs = gst_vaapi_frame_store_new (picture);
1142 gst_vaapi_frame_store_replace (&priv->prev_frames[picture->base.voc], fs);
1143 gst_vaapi_frame_store_unref (fs);
1145 if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame (fs)) {
1146 if (!gst_vaapi_frame_store_split_fields (fs, priv->top_field_first))
1149 // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
1150 if (GST_VAAPI_PICTURE_IS_REFERENCE (picture)) {
1151 while (priv->dpb_count == priv->dpb_size) {
1152 if (!dpb_bump (decoder, picture))
1155 gst_vaapi_frame_store_replace (&priv->prev_ref_frames[picture->base.voc],
1158 // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
1160 const gboolean StoreInterViewOnlyRefFlag =
1161 !GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
1162 GST_VAAPI_PICTURE_FLAG_AU_END) &&
1163 GST_VAAPI_PICTURE_FLAG_IS_SET (picture,
1164 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1165 if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
1167 while (priv->dpb_count == priv->dpb_size) {
1168 GstVaapiPictureH264 *found_picture;
1169 if (!StoreInterViewOnlyRefFlag) {
1170 if (dpb_find_lowest_poc (decoder, picture, &found_picture) < 0 ||
1171 found_picture->base.poc > picture->base.poc)
1172 return dpb_output (decoder, fs);
1174 if (!dpb_bump (decoder, picture))
1178 gst_vaapi_frame_store_replace (&priv->dpb[priv->dpb_count++], fs);
1183 dpb_reset (GstVaapiDecoderH264 * decoder, guint dpb_size)
1185 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1187 if (dpb_size > priv->dpb_size_max) {
1188 priv->dpb = g_try_realloc_n (priv->dpb, dpb_size, sizeof (*priv->dpb));
1191 memset (&priv->dpb[priv->dpb_size_max], 0,
1192 (dpb_size - priv->dpb_size_max) * sizeof (*priv->dpb));
1193 priv->dpb_size_max = dpb_size;
1195 priv->dpb_size = dpb_size;
1197 GST_DEBUG ("DPB size %u", priv->dpb_size);
1202 unref_inter_view (GstVaapiPictureH264 * picture)
1206 GST_VAAPI_PICTURE_FLAG_UNSET (picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1207 gst_vaapi_picture_unref (picture);
1210 /* Resets MVC resources */
1212 mvc_reset (GstVaapiDecoderH264 * decoder)
1214 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1217 // Resize array of inter-view references
1218 if (!priv->inter_views) {
1219 priv->inter_views = g_ptr_array_new_full (priv->max_views,
1220 (GDestroyNotify) unref_inter_view);
1221 if (!priv->inter_views)
1224 // Resize array of previous frame buffers
1225 for (i = priv->max_views; i < priv->prev_frames_alloc; i++) {
1226 gst_vaapi_frame_store_replace (&priv->prev_ref_frames[i], NULL);
1227 gst_vaapi_frame_store_replace (&priv->prev_frames[i], NULL);
1230 priv->prev_ref_frames = g_try_realloc_n (priv->prev_ref_frames,
1231 priv->max_views, sizeof (*priv->prev_ref_frames));
1232 if (!priv->prev_ref_frames)
1233 goto error_allocate;
1235 priv->prev_frames = g_try_realloc_n (priv->prev_frames, priv->max_views,
1236 sizeof (*priv->prev_frames));
1237 if (!priv->prev_frames)
1238 goto error_allocate;
1240 for (i = priv->prev_frames_alloc; i < priv->max_views; i++) {
1241 priv->prev_ref_frames[i] = NULL;
1242 priv->prev_frames[i] = NULL;
1244 priv->prev_frames_alloc = priv->max_views;
1250 g_free (priv->prev_ref_frames);
1251 priv->prev_ref_frames = NULL;
1252 g_free (priv->prev_frames);
1253 priv->prev_frames = NULL;
1254 priv->prev_frames_alloc = 0;
1259 static GstVaapiDecoderStatus
1260 get_status (GstH264ParserResult result)
1262 GstVaapiDecoderStatus status;
1265 case GST_H264_PARSER_OK:
1266 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1268 case GST_H264_PARSER_NO_NAL_END:
1269 status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1271 case GST_H264_PARSER_ERROR:
1272 status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1275 status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1282 gst_vaapi_decoder_h264_close (GstVaapiDecoderH264 * decoder)
1284 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1286 gst_vaapi_picture_replace (&priv->current_picture, NULL);
1287 gst_vaapi_parser_info_h264_replace (&priv->prev_slice_pi, NULL);
1288 gst_vaapi_parser_info_h264_replace (&priv->prev_pi, NULL);
1290 dpb_clear (decoder, NULL);
1292 if (priv->inter_views) {
1293 g_ptr_array_unref (priv->inter_views);
1294 priv->inter_views = NULL;
1298 gst_h264_nal_parser_free (priv->parser);
1299 priv->parser = NULL;
1304 gst_vaapi_decoder_h264_open (GstVaapiDecoderH264 * decoder)
1306 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1308 gst_vaapi_decoder_h264_close (decoder);
1310 priv->parser = gst_h264_nal_parser_new ();
1317 gst_vaapi_decoder_h264_destroy (GstVaapiDecoder * base_decoder)
1319 GstVaapiDecoderH264 *const decoder =
1320 GST_VAAPI_DECODER_H264_CAST (base_decoder);
1321 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1324 gst_vaapi_decoder_h264_close (decoder);
1325 priv->is_opened = FALSE;
1327 g_clear_pointer (&priv->dpb, g_free);
1328 priv->dpb_size_max = priv->dpb_size = 0;
1330 g_clear_pointer (&priv->prev_ref_frames, g_free);
1331 g_clear_pointer (&priv->prev_frames, g_free);
1332 priv->prev_frames_alloc = 0;
1334 for (i = 0; i < G_N_ELEMENTS (priv->pps); i++)
1335 gst_vaapi_parser_info_h264_replace (&priv->pps[i], NULL);
1336 gst_vaapi_parser_info_h264_replace (&priv->active_pps, NULL);
1338 for (i = 0; i < G_N_ELEMENTS (priv->sps); i++)
1339 gst_vaapi_parser_info_h264_replace (&priv->sps[i], NULL);
1340 gst_vaapi_parser_info_h264_replace (&priv->active_sps, NULL);
1344 gst_vaapi_decoder_h264_create (GstVaapiDecoder * base_decoder)
1346 GstVaapiDecoderH264 *const decoder =
1347 GST_VAAPI_DECODER_H264_CAST (base_decoder);
1348 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1350 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1351 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1352 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1353 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1354 priv->progressive_sequence = TRUE;
1355 priv->top_field_first = FALSE;
1356 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_MONO;
1357 priv->stereo_info.flags = GST_VIDEO_MULTIVIEW_FLAGS_NONE;
1361 /* Limited reset can just needs to get the decoder
1362 * ready to process fresh data after a flush.
1363 * Preserves the existing DPB allocation and any SPS/PPS */
1364 static GstVaapiDecoderStatus
1365 gst_vaapi_decoder_h264_reset (GstVaapiDecoder * base_decoder)
1367 GstVaapiDecoderH264 *const decoder =
1368 GST_VAAPI_DECODER_H264_CAST (base_decoder);
1369 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1371 gst_vaapi_decoder_h264_close (decoder);
1372 priv->is_opened = FALSE;
1376 g_clear_pointer (&priv->prev_ref_frames, g_free);
1377 g_clear_pointer (&priv->prev_frames, g_free);
1378 priv->prev_frames_alloc = 0;
1379 gst_vaapi_parser_info_h264_replace (&priv->active_pps, NULL);
1380 gst_vaapi_parser_info_h264_replace (&priv->active_sps, NULL);
1382 priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
1383 priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
1384 priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
1385 priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1386 priv->progressive_sequence = TRUE;
1387 priv->top_field_first = FALSE;
1389 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1392 /* Activates the supplied PPS */
1394 ensure_pps (GstVaapiDecoderH264 * decoder, GstH264PPS * pps)
1396 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1397 GstVaapiParserInfoH264 *const pi = priv->pps[pps->id];
1399 gst_vaapi_parser_info_h264_replace (&priv->active_pps, pi);
1400 return pi ? &pi->data.pps : NULL;
1403 /* Returns the active PPS */
1404 static inline GstH264PPS *
1405 get_pps (GstVaapiDecoderH264 * decoder)
1407 GstVaapiParserInfoH264 *const pi = decoder->priv.active_pps;
1409 return pi ? &pi->data.pps : NULL;
1412 /* Activate the supplied SPS */
1414 ensure_sps (GstVaapiDecoderH264 * decoder, GstH264SPS * sps)
1416 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1417 GstVaapiParserInfoH264 *const pi = priv->sps[sps->id];
1419 /* Propagate "got I-frame" state to the next SPS unit if the
1420 current sequence was not ended */
1421 if (pi && priv->active_sps)
1422 pi->state |= (priv->active_sps->state & GST_H264_VIDEO_STATE_GOT_I_FRAME);
1424 gst_vaapi_parser_info_h264_replace (&priv->active_sps, pi);
1425 return pi ? &pi->data.sps : NULL;
1428 /* Returns the active SPS */
1429 static inline GstH264SPS *
1430 get_sps (GstVaapiDecoderH264 * decoder)
1432 GstVaapiParserInfoH264 *const pi = decoder->priv.active_sps;
1434 return pi ? &pi->data.sps : NULL;
1438 fill_profiles (GstVaapiProfile profiles[], guint * n_profiles_ptr,
1439 GstVaapiProfile profile)
1441 guint n_profiles = *n_profiles_ptr;
1443 profiles[n_profiles++] = profile;
1445 case GST_VAAPI_PROFILE_H264_MAIN:
1446 profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1451 *n_profiles_ptr = n_profiles;
1454 /* Fills in compatible profiles for MVC decoding */
1456 fill_profiles_mvc (GstVaapiDecoderH264 * decoder, GstVaapiProfile profiles[],
1457 guint * n_profiles_ptr, guint dpb_size)
1459 const gchar *const vendor_string =
1460 gst_vaapi_display_get_vendor_string (GST_VAAPI_DECODER_DISPLAY (decoder));
1462 gboolean add_high_profile = FALSE;
1468 const struct map *m;
1470 // Drivers that support slice level decoding
1471 if (vendor_string && dpb_size <= 16) {
1472 static const struct map drv_names[] = {
1473 {"Intel i965 driver", 17},
1476 for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1477 if (g_ascii_strncasecmp (vendor_string, m->str, m->str_len) == 0)
1478 add_high_profile = TRUE;
1482 if (add_high_profile)
1483 fill_profiles (profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1486 static GstVaapiProfile
1487 get_profile (GstVaapiDecoderH264 * decoder, GstH264SPS * sps, guint dpb_size)
1489 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1490 GstVaapiDisplay *const display = GST_VAAPI_DECODER_DISPLAY (decoder);
1491 GstVaapiProfile profile, profiles[4];
1492 guint i, n_profiles = 0;
1494 profile = gst_vaapi_utils_h264_get_profile (sps->profile_idc);
1496 return GST_VAAPI_PROFILE_UNKNOWN;
1498 fill_profiles (profiles, &n_profiles, profile);
1500 case GST_VAAPI_PROFILE_H264_BASELINE:
1501 GST_INFO ("Baseline stream to be processed as Constrained-Baseline or "
1503 fill_profiles (profiles, &n_profiles,
1504 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1505 fill_profiles (profiles, &n_profiles, GST_VAAPI_PROFILE_H264_MAIN);
1507 case GST_VAAPI_PROFILE_H264_EXTENDED:
1508 if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1509 fill_profiles (profiles, &n_profiles, GST_VAAPI_PROFILE_H264_MAIN);
1512 case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1513 if (priv->max_views == 2) {
1514 fill_profiles (profiles, &n_profiles,
1515 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1517 fill_profiles_mvc (decoder, profiles, &n_profiles, dpb_size);
1519 case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1520 if (sps->frame_mbs_only_flag) {
1521 fill_profiles (profiles, &n_profiles,
1522 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1524 fill_profiles_mvc (decoder, profiles, &n_profiles, dpb_size);
1530 /* If the preferred profile (profiles[0]) matches one that we already
1531 found, then just return it now instead of searching for it again */
1532 if (profiles[0] == priv->profile)
1533 return priv->profile;
1535 for (i = 0; i < n_profiles; i++) {
1536 if (gst_vaapi_display_has_decoder (display, profiles[i], priv->entrypoint))
1539 return GST_VAAPI_PROFILE_UNKNOWN;
1542 static GstVaapiDecoderStatus
1543 ensure_context (GstVaapiDecoderH264 * decoder, GstH264SPS * sps)
1545 GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER_CAST (decoder);
1546 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1547 GstVaapiContextInfo info;
1548 GstVaapiProfile profile;
1549 GstVaapiChromaType chroma_type;
1550 gboolean reset_context = FALSE;
1551 guint mb_width, mb_height, dpb_size, num_views;
1553 num_views = get_num_views (sps);
1554 if (priv->max_views < num_views) {
1555 priv->max_views = num_views;
1556 reset_context = TRUE;
1557 GST_DEBUG ("maximum number of views changed to %u", num_views);
1560 dpb_size = get_max_dec_frame_buffering (sps);
1561 if (priv->dpb_size < dpb_size) {
1562 GST_DEBUG ("DPB size increased");
1563 reset_context = TRUE;
1566 profile = get_profile (decoder, sps, dpb_size);
1568 GST_ERROR ("unsupported profile_idc %u", sps->profile_idc);
1569 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1572 if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1573 GST_DEBUG ("profile changed to %x", profile);
1574 reset_context = TRUE;
1575 priv->profile = profile;
1578 if (reset_context) {
1579 switch (num_views) {
1581 /* Frame-packed mode details should be used if we got */
1582 if (priv->stereo_info.mode != GST_VIDEO_MULTIVIEW_MODE_NONE) {
1583 gst_vaapi_decoder_set_multiview_mode (base_decoder,
1584 2, priv->stereo_info.mode, priv->stereo_info.flags);
1586 gst_vaapi_decoder_set_multiview_mode (base_decoder,
1587 num_views, GST_VIDEO_MULTIVIEW_MODE_NONE,
1588 GST_VIDEO_MULTIVIEW_FLAGS_NONE);
1591 case 2: /* Assume stereo */
1592 if (profile == GST_VAAPI_PROFILE_H264_STEREO_HIGH) {
1593 GST_DEBUG ("Stereo profile - frame-by-frame output, %d views",
1595 gst_vaapi_decoder_set_multiview_mode (base_decoder, num_views,
1596 GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME,
1597 GST_VIDEO_MULTIVIEW_FLAGS_NONE);
1600 /* non-stereo 2 views. Fall through */
1602 GST_DEBUG ("Multiview profile - frame-by-frame output, %d views",
1604 gst_vaapi_decoder_set_multiview_mode (base_decoder, num_views,
1605 GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME,
1606 GST_VIDEO_MULTIVIEW_FLAGS_NONE);
1611 chroma_type = gst_vaapi_utils_h264_get_chroma_type (sps->chroma_format_idc);
1613 GST_ERROR ("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1614 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1617 if (priv->chroma_type != chroma_type) {
1618 GST_DEBUG ("chroma format changed");
1619 reset_context = TRUE;
1620 priv->chroma_type = chroma_type;
1623 mb_width = sps->pic_width_in_mbs_minus1 + 1;
1625 (sps->pic_height_in_map_units_minus1 + 1) << !sps->frame_mbs_only_flag;
1626 if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1627 GST_DEBUG ("size changed");
1628 reset_context = TRUE;
1629 priv->mb_width = mb_width;
1630 priv->mb_height = mb_height;
1633 if (priv->progressive_sequence != sps->frame_mbs_only_flag) {
1634 GST_DEBUG ("interlacing-mode changed");
1635 priv->progressive_sequence = sps->frame_mbs_only_flag;
1636 gst_vaapi_decoder_set_interlaced (base_decoder,
1637 !priv->progressive_sequence);
1638 priv->top_field_first = FALSE;
1641 gst_vaapi_decoder_set_pixel_aspect_ratio (base_decoder,
1642 sps->vui_parameters.par_n, sps->vui_parameters.par_d);
1644 if (!reset_context && priv->has_context)
1645 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1647 /* XXX: fix surface size when cropping is implemented */
1648 info.profile = priv->profile;
1649 info.entrypoint = priv->entrypoint;
1650 info.chroma_type = priv->chroma_type;
1651 info.width = sps->width;
1652 info.height = sps->height;
1653 info.ref_frames = dpb_size;
1655 if (!gst_vaapi_decoder_ensure_context (GST_VAAPI_DECODER (decoder), &info))
1656 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1657 priv->has_context = TRUE;
1660 if (!dpb_reset (decoder, dpb_size))
1661 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1663 /* Reset MVC data */
1664 if (!mvc_reset (decoder))
1665 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1666 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1670 fill_iq_matrix_4x4 (VAIQMatrixBufferH264 * iq_matrix, const GstH264PPS * pps,
1671 const GstH264SPS * sps)
1675 /* There are always 6 4x4 scaling lists */
1676 g_assert (G_N_ELEMENTS (iq_matrix->ScalingList4x4) == 6);
1677 g_assert (G_N_ELEMENTS (iq_matrix->ScalingList4x4[0]) == 16);
1679 for (i = 0; i < G_N_ELEMENTS (iq_matrix->ScalingList4x4); i++)
1680 gst_h264_quant_matrix_4x4_get_raster_from_zigzag (iq_matrix->ScalingList4x4
1681 [i], pps->scaling_lists_4x4[i]);
1685 fill_iq_matrix_8x8 (VAIQMatrixBufferH264 * iq_matrix, const GstH264PPS * pps,
1686 const GstH264SPS * sps)
1690 /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1691 if (!pps->transform_8x8_mode_flag)
1694 g_assert (G_N_ELEMENTS (iq_matrix->ScalingList8x8) >= 2);
1695 g_assert (G_N_ELEMENTS (iq_matrix->ScalingList8x8[0]) == 64);
1697 n = (sps->chroma_format_idc != 3) ? 2 : 6;
1698 for (i = 0; i < n; i++) {
1699 gst_h264_quant_matrix_8x8_get_raster_from_zigzag (iq_matrix->ScalingList8x8
1700 [i], pps->scaling_lists_8x8[i]);
1704 static GstVaapiDecoderStatus
1705 ensure_quant_matrix (GstVaapiDecoderH264 * decoder,
1706 GstVaapiPictureH264 * picture)
1708 GstVaapiPicture *const base_picture = &picture->base;
1709 GstH264PPS *const pps = get_pps (decoder);
1710 GstH264SPS *const sps = get_sps (decoder);
1711 VAIQMatrixBufferH264 *iq_matrix;
1713 base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW (H264, decoder);
1714 if (!base_picture->iq_matrix) {
1715 GST_ERROR ("failed to allocate IQ matrix");
1716 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1718 iq_matrix = base_picture->iq_matrix->param;
1720 /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1721 is not large enough to hold lists for 4:4:4 */
1722 if (sps->chroma_format_idc == 3)
1723 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1725 fill_iq_matrix_4x4 (iq_matrix, pps, sps);
1726 fill_iq_matrix_8x8 (iq_matrix, pps, sps);
1728 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1731 static inline gboolean
1732 is_valid_state (guint state, guint ref_state)
1734 return (state & ref_state) == ref_state;
1737 static GstVaapiDecoderStatus
1738 decode_current_picture (GstVaapiDecoderH264 * decoder)
1740 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1741 GstVaapiParserInfoH264 *const sps_pi = decoder->priv.active_sps;
1742 GstVaapiPictureH264 *const picture = priv->current_picture;
1744 if (!is_valid_state (priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1747 priv->decoder_state |= sps_pi->state;
1748 if (!(priv->decoder_state & GST_H264_VIDEO_STATE_GOT_I_FRAME)) {
1749 if (priv->decoder_state & GST_H264_VIDEO_STATE_GOT_P_SLICE)
1751 sps_pi->state |= GST_H264_VIDEO_STATE_GOT_I_FRAME;
1754 priv->decoder_state = 0;
1755 priv->pic_structure = GST_H264_SEI_PIC_STRUCT_FRAME;
1758 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1760 if (!gst_vaapi_picture_decode (GST_VAAPI_PICTURE_CAST (picture)))
1762 if (!exec_ref_pic_marking (decoder, picture))
1764 if (!dpb_add (decoder, picture))
1767 if (priv->force_low_latency)
1768 dpb_output_ready_frames (decoder);
1769 gst_vaapi_picture_replace (&priv->current_picture, NULL);
1770 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1775 /* XXX: fix for cases where first field failed to be decoded */
1776 gst_vaapi_picture_replace (&priv->current_picture, NULL);
1777 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1782 priv->decoder_state = 0;
1783 priv->pic_structure = GST_H264_SEI_PIC_STRUCT_FRAME;
1784 return (GstVaapiDecoderStatus) GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1788 static GstVaapiDecoderStatus
1789 parse_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1791 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1792 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1793 GstH264SPS *const sps = &pi->data.sps;
1794 GstH264ParserResult result;
1796 GST_DEBUG ("parse SPS");
1798 priv->parser_state = 0;
1800 /* Variables that don't have inferred values per the H.264
1801 standard but that should get a default value anyway */
1802 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1804 result = gst_h264_parser_parse_sps (priv->parser, &pi->nalu, sps);
1805 if (result != GST_H264_PARSER_OK)
1806 return get_status (result);
1808 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1809 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1812 static GstVaapiDecoderStatus
1813 parse_subset_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1815 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1816 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1817 GstH264SPS *const sps = &pi->data.sps;
1818 GstH264ParserResult result;
1820 GST_DEBUG ("parse subset SPS");
1822 /* Variables that don't have inferred values per the H.264
1823 standard but that should get a default value anyway */
1824 sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1826 result = gst_h264_parser_parse_subset_sps (priv->parser, &pi->nalu, sps);
1827 if (result != GST_H264_PARSER_OK)
1828 return get_status (result);
1830 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1831 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1834 static GstVaapiDecoderStatus
1835 parse_pps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1837 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1838 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1839 GstH264PPS *const pps = &pi->data.pps;
1840 GstH264ParserResult result;
1842 GST_DEBUG ("parse PPS");
1844 /* Variables that don't have inferred values per the H.264
1845 standard but that should get a default value anyway */
1846 pps->slice_group_map_type = 0;
1847 pps->slice_group_change_rate_minus1 = 0;
1848 pps->slice_group_id = NULL;
1850 result = gst_h264_parser_parse_pps (priv->parser, &pi->nalu, pps);
1852 /* PPS's sps id might be an ignored subset sps in SVC streams */
1853 if (priv->base_only && result == GST_H264_PARSER_BROKEN_LINK) {
1854 pi->nalu.valid = FALSE;
1855 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1858 priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1860 if (result != GST_H264_PARSER_OK)
1861 return get_status (result);
1863 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1865 if (pps->num_slice_groups_minus1 > 0) {
1866 GST_FIXME ("FMO is not supported");
1867 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1870 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1873 static GstVaapiDecoderStatus
1874 parse_sei (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1876 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1877 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1878 GArray **const sei_ptr = &pi->data.sei;
1879 GstH264ParserResult result;
1881 GST_DEBUG ("parse SEI");
1883 result = gst_h264_parser_parse_sei (priv->parser, &pi->nalu, sei_ptr);
1884 if (result != GST_H264_PARSER_OK) {
1885 GST_WARNING ("failed to parse SEI messages");
1886 return get_status (result);
1888 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1891 static GstVaapiDecoderStatus
1892 parse_slice (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1894 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1895 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1896 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
1897 GstH264NalUnit *const nalu = &pi->nalu;
1899 GstH264ParserResult result;
1901 GST_DEBUG ("parse slice");
1903 priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS |
1904 GST_H264_VIDEO_STATE_GOT_PPS);
1906 /* Propagate Prefix NAL unit info, if necessary */
1907 switch (nalu->type) {
1908 case GST_H264_NAL_SLICE:
1909 case GST_H264_NAL_SLICE_IDR:{
1910 GstVaapiParserInfoH264 *const prev_pi = priv->prev_pi;
1911 if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1912 /* MVC sequences shall have a Prefix NAL unit immediately
1913 preceding this NAL unit */
1914 pi->nalu.extension_type = prev_pi->nalu.extension_type;
1915 pi->nalu.extension = prev_pi->nalu.extension;
1917 /* In the very unlikely case there is no Prefix NAL unit
1918 immediately preceding this NAL unit, try to infer some
1919 defaults (H.7.4.1.1) */
1920 GstH264NalUnitExtensionMVC *const mvc = &pi->nalu.extension.mvc;
1921 mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1922 nalu->idr_pic_flag = !mvc->non_idr_flag;
1923 mvc->priority_id = 0;
1925 mvc->temporal_id = 0;
1926 mvc->anchor_pic_flag = 0;
1927 mvc->inter_view_flag = 1;
1933 /* Variables that don't have inferred values per the H.264
1934 standard but that should get a default value anyway */
1935 slice_hdr->cabac_init_idc = 0;
1936 slice_hdr->direct_spatial_mv_pred_flag = 0;
1938 result = gst_h264_parser_parse_slice_hdr (priv->parser, &pi->nalu,
1939 slice_hdr, TRUE, TRUE);
1940 if (result != GST_H264_PARSER_OK)
1941 return get_status (result);
1943 sps = slice_hdr->pps->sequence;
1945 /* Update MVC data */
1946 pi->view_id = get_view_id (&pi->nalu);
1947 pi->voc = get_view_order_index (sps, pi->view_id);
1949 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1950 if (!GST_H264_IS_I_SLICE (slice_hdr))
1951 priv->parser_state |= GST_H264_VIDEO_STATE_GOT_P_SLICE;
1952 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1955 static GstVaapiDecoderStatus
1956 decode_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1958 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1959 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1960 GstH264SPS *const sps = &pi->data.sps;
1962 GST_DEBUG ("decode SPS");
1964 gst_vaapi_parser_info_h264_replace (&priv->sps[sps->id], pi);
1965 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1968 static GstVaapiDecoderStatus
1969 decode_subset_sps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1971 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1972 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1973 GstH264SPS *const sps = &pi->data.sps;
1975 GST_DEBUG ("decode subset SPS");
1977 gst_vaapi_parser_info_h264_replace (&priv->sps[sps->id], pi);
1978 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1981 static GstVaapiDecoderStatus
1982 decode_pps (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
1984 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1985 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
1986 GstH264PPS *const pps = &pi->data.pps;
1988 GST_DEBUG ("decode PPS");
1990 gst_vaapi_parser_info_h264_replace (&priv->pps[pps->id], pi);
1991 return GST_VAAPI_DECODER_STATUS_SUCCESS;
1995 decode_sei_frame_packing (GstVaapiDecoderH264 * decoder,
1996 const GstH264FramePacking * frame_packing)
1998 GstVaapiDecoderH264Private *const priv = &decoder->priv;
1999 GstVideoMultiviewMode saved_mode = priv->stereo_info.mode;
2000 GstVideoMultiviewFlags saved_flags = priv->stereo_info.flags;
2001 gboolean left = TRUE;
2002 gboolean frame_revert = FALSE;
2004 /* Only IDs from 0->255 and 512->2^31-1 are valid. Ignore others */
2005 if ((frame_packing->frame_packing_id >= 256 &&
2006 frame_packing->frame_packing_id < 512) ||
2007 (frame_packing->frame_packing_id >= (1U << 31)))
2010 if (frame_packing->frame_packing_cancel_flag) {
2011 if (priv->stereo_info.id == frame_packing->frame_packing_id)
2012 priv->stereo_info = (GstVaapiStereo3DInfo) {
2013 GST_VIDEO_MULTIVIEW_MODE_MONO, GST_VIDEO_MULTIVIEW_FLAGS_NONE, 256, 0};
2017 if (frame_packing->frame_packing_repetition_period != 1) {
2018 GST_WARNING ("SEI: repetition_period != 1 is not unsupported");
2022 if (frame_packing->frame_packing_type > GST_H264_FRAME_PACKING_NONE) {
2023 GST_WARNING ("SEI: unsupported frame_packing_type %d",
2024 frame_packing->frame_packing_type);
2028 if (frame_packing->content_interpretation_type >= 3) {
2029 GST_WARNING ("SEI: unsupported content_interpretation_type %d",
2030 frame_packing->frame_packing_type);
2034 /* TODO: frame frame0/1_grid_position_x/y are ignored now. */
2036 priv->stereo_info = (GstVaapiStereo3DInfo) {
2037 GST_VIDEO_MULTIVIEW_MODE_MONO, GST_VIDEO_MULTIVIEW_FLAGS_NONE, 256, 0};
2039 switch (frame_packing->frame_packing_type) {
2040 case GST_H264_FRAME_PACKING_CHECKERBOARD_INTERLEAVING:
2041 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD;
2043 case GST_H264_FRAME_PACKING_COLUMN_INTERLEAVING:
2044 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED;
2046 case GST_H264_FRAME_PACKING_ROW_INTERLEAVING:
2047 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED;
2049 case GST_H264_FRAME_PACKING_SIDE_BY_SIDE:
2050 if (frame_packing->quincunx_sampling_flag) {
2051 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX;
2053 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE;
2056 case GST_H264_FRAME_PACKING_TOP_BOTTOM:
2057 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM;
2059 case GST_H264_FRAME_PACKING_TEMPORAL_INTERLEAVING:
2060 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME;
2063 priv->stereo_info.mode = GST_VIDEO_MULTIVIEW_MODE_MONO;
2067 /* Spec does not describe multi-IDs case, we just keep one valid */
2068 priv->stereo_info.id = frame_packing->frame_packing_id;
2069 priv->stereo_info.repetition_period =
2070 frame_packing->frame_packing_repetition_period;
2072 if (frame_packing->content_interpretation_type == 2)
2073 frame_revert = TRUE;
2075 if (frame_packing->frame_packing_type ==
2076 GST_H264_FRAME_PACKING_TEMPORAL_INTERLEAVING) {
2077 if (frame_packing->current_frame_is_frame0_flag) {
2088 priv->stereo_info.flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST;
2090 if (frame_packing->frame_packing_type == GST_H264_FRAME_PACKING_SIDE_BY_SIDE
2091 && frame_packing->spatial_flipping_flag) {
2092 if (frame_packing->frame0_flipped_flag !=
2093 ((priv->stereo_info.flags &
2094 GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST) != 0)) {
2095 priv->stereo_info.flags |= GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED;
2097 priv->stereo_info.flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED;
2100 if (frame_packing->frame_packing_type == GST_H264_FRAME_PACKING_TOP_BOTTOM
2101 && frame_packing->spatial_flipping_flag !=
2102 ((priv->stereo_info.flags &
2103 GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST) != 0)) {
2104 if (frame_packing->frame0_flipped_flag) {
2105 priv->stereo_info.flags |= GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED;
2107 priv->stereo_info.flags |= GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED;
2111 if (saved_mode != priv->stereo_info.mode
2112 || saved_flags != priv->stereo_info.flags) {
2113 gst_vaapi_decoder_set_multiview_mode (GST_VAAPI_DECODER_CAST (decoder),
2114 2, priv->stereo_info.mode, priv->stereo_info.flags);
2120 static GstVaapiDecoderStatus
2121 decode_sei (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
2123 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2124 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
2127 GST_DEBUG ("decode SEI messages");
2129 for (i = 0; i < pi->data.sei->len; i++) {
2130 const GstH264SEIMessage *const sei =
2131 &g_array_index (pi->data.sei, GstH264SEIMessage, i);
2133 switch (sei->payloadType) {
2134 case GST_H264_SEI_PIC_TIMING:{
2135 const GstH264PicTiming *const pic_timing = &sei->payload.pic_timing;
2136 if (pic_timing->pic_struct_present_flag)
2137 priv->pic_structure = pic_timing->pic_struct;
2140 case GST_H264_SEI_FRAME_PACKING:
2141 decode_sei_frame_packing (decoder, &sei->payload.frame_packing);
2147 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2150 static GstVaapiDecoderStatus
2151 decode_sequence_end (GstVaapiDecoderH264 * decoder)
2153 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2154 GstVaapiParserInfoH264 *const sps_pi = decoder->priv.active_sps;
2156 GST_DEBUG ("decode sequence-end");
2158 /* Sequence ended, don't try to propagate "got I-frame" state
2159 beyond this point */
2161 sps_pi->state &= ~GST_H264_VIDEO_STATE_GOT_I_FRAME;
2163 dpb_flush (decoder, NULL);
2165 /* Reset defaults, should there be a new sequence available next */
2166 priv->max_views = 1;
2167 return GST_VAAPI_DECODER_STATUS_SUCCESS;
2170 /* 8.2.1.1 - Decoding process for picture order count type 0 */
2172 init_picture_poc_0 (GstVaapiDecoderH264 * decoder,
2173 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2175 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2176 GstH264SPS *const sps = get_sps (decoder);
2177 const gint32 MaxPicOrderCntLsb =
2178 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
2181 GST_DEBUG ("decode picture order count type 0");
2183 if (GST_VAAPI_PICTURE_IS_IDR (picture)) {
2184 priv->prev_poc_msb = 0;
2185 priv->prev_poc_lsb = 0;
2186 } else if (priv->prev_pic_has_mmco5) {
2187 priv->prev_poc_msb = 0;
2188 priv->prev_poc_lsb =
2189 (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
2190 0 : priv->field_poc[TOP_FIELD]);
2192 priv->prev_poc_msb = priv->poc_msb;
2193 priv->prev_poc_lsb = priv->poc_lsb;
2197 priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
2198 if (priv->poc_lsb < priv->prev_poc_lsb &&
2199 (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
2200 priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
2201 else if (priv->poc_lsb > priv->prev_poc_lsb &&
2202 (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
2203 priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
2205 priv->poc_msb = priv->prev_poc_msb;
2207 temp_poc = priv->poc_msb + priv->poc_lsb;
2208 switch (picture->structure) {
2209 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
2211 priv->field_poc[TOP_FIELD] = temp_poc;
2212 priv->field_poc[BOTTOM_FIELD] = temp_poc +
2213 slice_hdr->delta_pic_order_cnt_bottom;
2215 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
2217 priv->field_poc[TOP_FIELD] = temp_poc;
2219 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
2221 priv->field_poc[BOTTOM_FIELD] = temp_poc;
2226 /* 8.2.1.2 - Decoding process for picture order count type 1 */
2228 init_picture_poc_1 (GstVaapiDecoderH264 * decoder,
2229 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2231 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2232 GstH264SPS *const sps = get_sps (decoder);
2233 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
2234 gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
2237 GST_DEBUG ("decode picture order count type 1");
2239 if (priv->prev_pic_has_mmco5)
2240 prev_frame_num_offset = 0;
2242 prev_frame_num_offset = priv->frame_num_offset;
2245 if (GST_VAAPI_PICTURE_IS_IDR (picture))
2246 priv->frame_num_offset = 0;
2247 else if (priv->prev_frame_num > priv->frame_num)
2248 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
2250 priv->frame_num_offset = prev_frame_num_offset;
2253 if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
2254 abs_frame_num = priv->frame_num_offset + priv->frame_num;
2257 if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture) && abs_frame_num > 0)
2258 abs_frame_num = abs_frame_num - 1;
2260 if (abs_frame_num > 0) {
2261 gint32 expected_delta_per_poc_cycle;
2262 gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
2264 expected_delta_per_poc_cycle = 0;
2265 for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
2266 expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
2269 poc_cycle_cnt = (abs_frame_num - 1) /
2270 sps->num_ref_frames_in_pic_order_cnt_cycle;
2271 frame_num_in_poc_cycle = (abs_frame_num - 1) %
2272 sps->num_ref_frames_in_pic_order_cnt_cycle;
2275 expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
2276 for (i = 0; i <= frame_num_in_poc_cycle; i++)
2277 expected_poc += sps->offset_for_ref_frame[i];
2280 if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture))
2281 expected_poc += sps->offset_for_non_ref_pic;
2284 switch (picture->structure) {
2285 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
2286 priv->field_poc[TOP_FIELD] = expected_poc +
2287 slice_hdr->delta_pic_order_cnt[0];
2288 priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
2289 sps->offset_for_top_to_bottom_field +
2290 slice_hdr->delta_pic_order_cnt[1];
2292 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
2293 priv->field_poc[TOP_FIELD] = expected_poc +
2294 slice_hdr->delta_pic_order_cnt[0];
2296 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
2297 priv->field_poc[BOTTOM_FIELD] = expected_poc +
2298 sps->offset_for_top_to_bottom_field +
2299 slice_hdr->delta_pic_order_cnt[0];
2304 /* 8.2.1.3 - Decoding process for picture order count type 2 */
2306 init_picture_poc_2 (GstVaapiDecoderH264 * decoder,
2307 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2309 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2310 GstH264SPS *const sps = get_sps (decoder);
2311 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
2312 gint32 prev_frame_num_offset, temp_poc;
2314 GST_DEBUG ("decode picture order count type 2");
2316 if (priv->prev_pic_has_mmco5)
2317 prev_frame_num_offset = 0;
2319 prev_frame_num_offset = priv->frame_num_offset;
2322 if (GST_VAAPI_PICTURE_IS_IDR (picture))
2323 priv->frame_num_offset = 0;
2324 else if (priv->prev_frame_num > priv->frame_num)
2325 priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
2327 priv->frame_num_offset = prev_frame_num_offset;
2330 if (GST_VAAPI_PICTURE_IS_IDR (picture))
2332 else if (!GST_VAAPI_PICTURE_IS_REFERENCE (picture))
2333 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
2335 temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
2338 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2339 priv->field_poc[TOP_FIELD] = temp_poc;
2340 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2341 priv->field_poc[BOTTOM_FIELD] = temp_poc;
2344 /* 8.2.1 - Decoding process for picture order count */
2346 init_picture_poc (GstVaapiDecoderH264 * decoder,
2347 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2349 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2350 GstH264SPS *const sps = get_sps (decoder);
2352 switch (sps->pic_order_cnt_type) {
2354 init_picture_poc_0 (decoder, picture, slice_hdr);
2357 init_picture_poc_1 (decoder, picture, slice_hdr);
2360 init_picture_poc_2 (decoder, picture, slice_hdr);
2364 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2365 picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
2366 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2367 picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
2368 picture->base.poc = MIN (picture->field_poc[0], picture->field_poc[1]);
2372 compare_picture_pic_num_dec (const void *a, const void *b)
2374 const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
2375 const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
2377 return picB->pic_num - picA->pic_num;
2381 compare_picture_long_term_pic_num_inc (const void *a, const void *b)
2383 const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
2384 const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
2386 return picA->long_term_pic_num - picB->long_term_pic_num;
2390 compare_picture_poc_dec (const void *a, const void *b)
2392 const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
2393 const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
2395 return picB->base.poc - picA->base.poc;
2399 compare_picture_poc_inc (const void *a, const void *b)
2401 const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
2402 const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
2404 return picA->base.poc - picB->base.poc;
2408 compare_picture_frame_num_wrap_dec (const void *a, const void *b)
2410 const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
2411 const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
2413 return picB->frame_num_wrap - picA->frame_num_wrap;
2417 compare_picture_long_term_frame_idx_inc (const void *a, const void *b)
2419 const GstVaapiPictureH264 *const picA = *(GstVaapiPictureH264 **) a;
2420 const GstVaapiPictureH264 *const picB = *(GstVaapiPictureH264 **) b;
2422 return picA->long_term_frame_idx - picB->long_term_frame_idx;
2425 /* 8.2.4.1 - Decoding process for picture numbers */
2427 init_picture_refs_pic_num (GstVaapiDecoderH264 * decoder,
2428 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2430 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2431 GstH264SPS *const sps = get_sps (decoder);
2432 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
2435 GST_DEBUG ("decode picture numbers");
2437 for (i = 0; i < priv->short_ref_count; i++) {
2438 GstVaapiPictureH264 *const pic = priv->short_ref[i];
2441 if (pic->base.view_id != picture->base.view_id)
2445 if (pic->frame_num > priv->frame_num)
2446 pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
2448 pic->frame_num_wrap = pic->frame_num;
2450 // (8-28, 8-30, 8-31)
2451 if (GST_VAAPI_PICTURE_IS_FRAME (picture))
2452 pic->pic_num = pic->frame_num_wrap;
2454 if (pic->structure == picture->structure)
2455 pic->pic_num = 2 * pic->frame_num_wrap + 1;
2457 pic->pic_num = 2 * pic->frame_num_wrap;
2461 for (i = 0; i < priv->long_ref_count; i++) {
2462 GstVaapiPictureH264 *const pic = priv->long_ref[i];
2465 if (pic->base.view_id != picture->base.view_id)
2468 // (8-29, 8-32, 8-33)
2469 if (GST_VAAPI_PICTURE_IS_FRAME (picture))
2470 pic->long_term_pic_num = pic->long_term_frame_idx;
2472 if (pic->structure == picture->structure)
2473 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2475 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2480 #define SORT_REF_LIST(list, n, compare_func) \
2481 qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2484 init_picture_refs_fields_1 (guint picture_structure,
2485 GstVaapiPictureH264 * RefPicList[32],
2486 guint * RefPicList_count,
2487 GstVaapiPictureH264 * ref_list[32], guint ref_list_count)
2493 n = *RefPicList_count;
2496 for (; i < ref_list_count; i++) {
2497 if (ref_list[i]->structure == picture_structure) {
2498 RefPicList[n++] = ref_list[i++];
2502 for (; j < ref_list_count; j++) {
2503 if (ref_list[j]->structure != picture_structure) {
2504 RefPicList[n++] = ref_list[j++];
2508 } while (i < ref_list_count || j < ref_list_count);
2509 *RefPicList_count = n;
2513 init_picture_refs_fields (GstVaapiPictureH264 * picture,
2514 GstVaapiPictureH264 * RefPicList[32],
2515 guint * RefPicList_count,
2516 GstVaapiPictureH264 * short_ref[32],
2517 guint short_ref_count,
2518 GstVaapiPictureH264 * long_ref[32], guint long_ref_count)
2522 /* 8.2.4.2.5 - reference picture lists in fields */
2523 init_picture_refs_fields_1 (picture->structure, RefPicList, &n,
2524 short_ref, short_ref_count);
2525 init_picture_refs_fields_1 (picture->structure, RefPicList, &n,
2526 long_ref, long_ref_count);
2527 *RefPicList_count = n;
2530 /* Finds the inter-view reference picture with the supplied view id */
2531 static GstVaapiPictureH264 *
2532 find_inter_view_reference (GstVaapiDecoderH264 * decoder, guint16 view_id)
2534 GPtrArray *const inter_views = decoder->priv.inter_views;
2537 for (i = 0; i < inter_views->len; i++) {
2538 GstVaapiPictureH264 *const picture = g_ptr_array_index (inter_views, i);
2539 if (picture->base.view_id == view_id)
2543 GST_WARNING ("failed to find inter-view reference picture for view_id: %d",
2548 /* Checks whether the view id exists in the supplied list of view ids */
2550 find_view_id (guint16 view_id, const guint16 * view_ids, guint num_view_ids)
2554 for (i = 0; i < num_view_ids; i++) {
2555 if (view_ids[i] == view_id)
2562 find_view_id_in_view (guint16 view_id, const GstH264SPSExtMVCView * view,
2566 return (find_view_id (view_id, view->anchor_ref_l0,
2567 view->num_anchor_refs_l0) ||
2568 find_view_id (view_id, view->anchor_ref_l1, view->num_anchor_refs_l1));
2570 return (find_view_id (view_id, view->non_anchor_ref_l0,
2571 view->num_non_anchor_refs_l0) ||
2572 find_view_id (view_id, view->non_anchor_ref_l1,
2573 view->num_non_anchor_refs_l1));
2576 /* Checks whether the inter-view reference picture with the supplied
2577 view id is used for decoding the current view component picture */
2579 is_inter_view_reference_for_picture (GstVaapiDecoderH264 * decoder,
2580 guint16 view_id, GstVaapiPictureH264 * picture)
2582 const GstH264SPS *const sps = get_sps (decoder);
2585 if (!GST_VAAPI_PICTURE_IS_MVC (picture) ||
2586 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2589 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR (picture);
2590 return find_view_id_in_view (view_id,
2591 &sps->extension.mvc.view[picture->base.voc], is_anchor);
2594 /* Checks whether the supplied inter-view reference picture is used
2595 for decoding the next view component pictures */
2597 is_inter_view_reference_for_next_pictures (GstVaapiDecoderH264 * decoder,
2598 GstVaapiPictureH264 * picture)
2600 const GstH264SPS *const sps = get_sps (decoder);
2604 if (!GST_VAAPI_PICTURE_IS_MVC (picture) ||
2605 sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2608 is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR (picture);
2609 num_views = sps->extension.mvc.num_views_minus1 + 1;
2610 for (i = picture->base.voc + 1; i < num_views; i++) {
2611 const GstH264SPSExtMVCView *const view = &sps->extension.mvc.view[i];
2612 if (find_view_id_in_view (picture->base.view_id, view, is_anchor))
2618 /* H.8.2.1 - Initialization process for inter-view prediction references */
2620 init_picture_refs_mvc_1 (GstVaapiDecoderH264 * decoder,
2621 GstVaapiPictureH264 ** ref_list, guint * ref_list_count_ptr, guint num_refs,
2622 const guint16 * view_ids, guint num_view_ids)
2626 n = *ref_list_count_ptr;
2627 for (j = 0; j < num_view_ids && n < num_refs; j++) {
2628 GstVaapiPictureH264 *pic;
2630 if (!(pic = find_inter_view_reference (decoder, view_ids[j])))
2633 ref_list[n++] = pic;
2636 *ref_list_count_ptr = n;
2641 static inline gboolean
2642 init_picture_refs_mvc (GstVaapiDecoderH264 * decoder,
2643 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr, guint list)
2645 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2646 const GstH264SPS *const sps = get_sps (decoder);
2647 const GstH264SPSExtMVCView *view;
2648 gboolean ret = TRUE;
2650 GST_DEBUG ("initialize reference picture list for inter-view prediction");
2652 if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2654 view = &sps->extension.mvc.view[picture->base.voc];
2656 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
2657 ret = init_picture_refs_mvc_1(decoder, \
2658 priv->RefPicList##ref_list, \
2659 &priv->RefPicList##ref_list##_count, \
2660 slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
2661 view->view_list##_l##ref_list, \
2662 view->num_##view_list##s_l##ref_list); \
2666 if (GST_VAAPI_PICTURE_IS_ANCHOR (picture))
2667 INVOKE_INIT_PICTURE_REFS_MVC (0, anchor_ref);
2669 INVOKE_INIT_PICTURE_REFS_MVC (0, non_anchor_ref);
2671 if (GST_VAAPI_PICTURE_IS_ANCHOR (picture))
2672 INVOKE_INIT_PICTURE_REFS_MVC (1, anchor_ref);
2674 INVOKE_INIT_PICTURE_REFS_MVC (1, non_anchor_ref);
2679 #undef INVOKE_INIT_PICTURE_REFS_MVC
2683 init_picture_refs_p_slice (GstVaapiDecoderH264 * decoder,
2684 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2686 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2687 GstVaapiPictureH264 **ref_list;
2689 gboolean ret = TRUE;
2691 GST_DEBUG ("decode reference picture list for P and SP slices");
2693 if (GST_VAAPI_PICTURE_IS_FRAME (picture)) {
2694 /* 8.2.4.2.1 - P and SP slices in frames */
2695 if (priv->short_ref_count > 0) {
2696 ref_list = priv->RefPicList0;
2697 for (i = 0; i < priv->short_ref_count; i++)
2698 ref_list[i] = priv->short_ref[i];
2699 SORT_REF_LIST (ref_list, i, pic_num_dec);
2700 priv->RefPicList0_count += i;
2703 if (priv->long_ref_count > 0) {
2704 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2705 for (i = 0; i < priv->long_ref_count; i++)
2706 ref_list[i] = priv->long_ref[i];
2707 SORT_REF_LIST (ref_list, i, long_term_pic_num_inc);
2708 priv->RefPicList0_count += i;
2711 /* 8.2.4.2.2 - P and SP slices in fields */
2712 GstVaapiPictureH264 *short_ref[32];
2713 guint short_ref_count = 0;
2714 GstVaapiPictureH264 *long_ref[32];
2715 guint long_ref_count = 0;
2717 if (priv->short_ref_count > 0) {
2718 for (i = 0; i < priv->short_ref_count; i++)
2719 short_ref[i] = priv->short_ref[i];
2720 SORT_REF_LIST (short_ref, i, frame_num_wrap_dec);
2721 short_ref_count = i;
2724 if (priv->long_ref_count > 0) {
2725 for (i = 0; i < priv->long_ref_count; i++)
2726 long_ref[i] = priv->long_ref[i];
2727 SORT_REF_LIST (long_ref, i, long_term_frame_idx_inc);
2731 init_picture_refs_fields (picture,
2732 priv->RefPicList0, &priv->RefPicList0_count,
2733 short_ref, short_ref_count, long_ref, long_ref_count);
2736 if (GST_VAAPI_PICTURE_IS_MVC (picture)) {
2738 ret = init_picture_refs_mvc (decoder, picture, slice_hdr, 0);
2745 init_picture_refs_b_slice (GstVaapiDecoderH264 * decoder,
2746 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
2748 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2749 GstVaapiPictureH264 **ref_list;
2751 gboolean ret = TRUE;
2753 GST_DEBUG ("decode reference picture list for B slices");
2755 if (GST_VAAPI_PICTURE_IS_FRAME (picture)) {
2756 /* 8.2.4.2.3 - B slices in frames */
2759 if (priv->short_ref_count > 0) {
2760 // 1. Short-term references
2761 ref_list = priv->RefPicList0;
2762 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2763 if (priv->short_ref[i]->base.poc < picture->base.poc)
2764 ref_list[n++] = priv->short_ref[i];
2766 SORT_REF_LIST (ref_list, n, poc_dec);
2767 priv->RefPicList0_count += n;
2769 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2770 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2771 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2772 ref_list[n++] = priv->short_ref[i];
2774 SORT_REF_LIST (ref_list, n, poc_inc);
2775 priv->RefPicList0_count += n;
2778 if (priv->long_ref_count > 0) {
2779 // 2. Long-term references
2780 ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2781 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2782 ref_list[n++] = priv->long_ref[i];
2783 SORT_REF_LIST (ref_list, n, long_term_pic_num_inc);
2784 priv->RefPicList0_count += n;
2788 if (priv->short_ref_count > 0) {
2789 // 1. Short-term references
2790 ref_list = priv->RefPicList1;
2791 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2792 if (priv->short_ref[i]->base.poc > picture->base.poc)
2793 ref_list[n++] = priv->short_ref[i];
2795 SORT_REF_LIST (ref_list, n, poc_inc);
2796 priv->RefPicList1_count += n;
2798 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2799 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2800 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2801 ref_list[n++] = priv->short_ref[i];
2803 SORT_REF_LIST (ref_list, n, poc_dec);
2804 priv->RefPicList1_count += n;
2807 if (priv->long_ref_count > 0) {
2808 // 2. Long-term references
2809 ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2810 for (n = 0, i = 0; i < priv->long_ref_count; i++)
2811 ref_list[n++] = priv->long_ref[i];
2812 SORT_REF_LIST (ref_list, n, long_term_pic_num_inc);
2813 priv->RefPicList1_count += n;
2816 /* 8.2.4.2.4 - B slices in fields */
2817 GstVaapiPictureH264 *short_ref0[32];
2818 guint short_ref0_count = 0;
2819 GstVaapiPictureH264 *short_ref1[32];
2820 guint short_ref1_count = 0;
2821 GstVaapiPictureH264 *long_ref[32];
2822 guint long_ref_count = 0;
2824 /* refFrameList0ShortTerm */
2825 if (priv->short_ref_count > 0) {
2826 ref_list = short_ref0;
2827 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2828 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2829 ref_list[n++] = priv->short_ref[i];
2831 SORT_REF_LIST (ref_list, n, poc_dec);
2832 short_ref0_count += n;
2834 ref_list = &short_ref0[short_ref0_count];
2835 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2836 if (priv->short_ref[i]->base.poc > picture->base.poc)
2837 ref_list[n++] = priv->short_ref[i];
2839 SORT_REF_LIST (ref_list, n, poc_inc);
2840 short_ref0_count += n;
2843 /* refFrameList1ShortTerm */
2844 if (priv->short_ref_count > 0) {
2845 ref_list = short_ref1;
2846 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2847 if (priv->short_ref[i]->base.poc > picture->base.poc)
2848 ref_list[n++] = priv->short_ref[i];
2850 SORT_REF_LIST (ref_list, n, poc_inc);
2851 short_ref1_count += n;
2853 ref_list = &short_ref1[short_ref1_count];
2854 for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2855 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2856 ref_list[n++] = priv->short_ref[i];
2858 SORT_REF_LIST (ref_list, n, poc_dec);
2859 short_ref1_count += n;
2862 /* refFrameListLongTerm */
2863 if (priv->long_ref_count > 0) {
2864 for (i = 0; i < priv->long_ref_count; i++)
2865 long_ref[i] = priv->long_ref[i];
2866 SORT_REF_LIST (long_ref, i, long_term_frame_idx_inc);
2870 init_picture_refs_fields (picture,
2871 priv->RefPicList0, &priv->RefPicList0_count,
2872 short_ref0, short_ref0_count, long_ref, long_ref_count);
2874 init_picture_refs_fields (picture,
2875 priv->RefPicList1, &priv->RefPicList1_count,
2876 short_ref1, short_ref1_count, long_ref, long_ref_count);
2879 /* Check whether RefPicList1 is identical to RefPicList0, then
2880 swap if necessary */
2881 if (priv->RefPicList1_count > 1 &&
2882 priv->RefPicList1_count == priv->RefPicList0_count &&
2883 memcmp (priv->RefPicList0, priv->RefPicList1,
2884 priv->RefPicList0_count * sizeof (priv->RefPicList0[0])) == 0) {
2885 GstVaapiPictureH264 *const tmp = priv->RefPicList1[0];
2886 priv->RefPicList1[0] = priv->RefPicList1[1];
2887 priv->RefPicList1[1] = tmp;
2890 if (GST_VAAPI_PICTURE_IS_MVC (picture)) {
2892 ret = init_picture_refs_mvc (decoder, picture, slice_hdr, 0);
2895 ret = init_picture_refs_mvc (decoder, picture, slice_hdr, 1);
2901 #undef SORT_REF_LIST
2904 find_short_term_reference (GstVaapiDecoderH264 * decoder, gint32 pic_num)
2906 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2909 for (i = 0; i < priv->short_ref_count; i++) {
2910 if (priv->short_ref[i]->pic_num == pic_num)
2913 GST_ERROR ("found no short-term reference picture with PicNum = %d", pic_num);
2918 find_long_term_reference (GstVaapiDecoderH264 * decoder,
2919 gint32 long_term_pic_num)
2921 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2924 for (i = 0; i < priv->long_ref_count; i++) {
2925 if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2928 GST_ERROR ("found no long-term reference picture with LongTermPicNum = %d",
2934 exec_picture_refs_modification_1 (GstVaapiDecoderH264 * decoder,
2935 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr, guint list)
2937 GstVaapiDecoderH264Private *const priv = &decoder->priv;
2938 GstH264SPS *const sps = get_sps (decoder);
2939 GstH264RefPicListModification *ref_pic_list_modification;
2940 guint num_ref_pic_list_modifications;
2941 GstVaapiPictureH264 **ref_list;
2942 guint *ref_list_count_ptr, ref_list_idx = 0;
2943 const guint16 *view_ids = NULL;
2944 guint i, j, n, num_refs, num_view_ids = 0;
2946 gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2947 gboolean ret = TRUE;
2949 GST_DEBUG ("modification process of reference picture list %u", list);
2952 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l0;
2953 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2954 ref_list = priv->RefPicList0;
2955 ref_list_count_ptr = &priv->RefPicList0_count;
2956 num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2958 if (GST_VAAPI_PICTURE_IS_MVC (picture) &&
2959 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2960 const GstH264SPSExtMVCView *const view =
2961 &sps->extension.mvc.view[picture->base.voc];
2962 if (GST_VAAPI_PICTURE_IS_ANCHOR (picture)) {
2963 view_ids = view->anchor_ref_l0;
2964 num_view_ids = view->num_anchor_refs_l0;
2966 view_ids = view->non_anchor_ref_l0;
2967 num_view_ids = view->num_non_anchor_refs_l0;
2971 ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
2972 num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2973 ref_list = priv->RefPicList1;
2974 ref_list_count_ptr = &priv->RefPicList1_count;
2975 num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2977 if (GST_VAAPI_PICTURE_IS_MVC (picture) &&
2978 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2979 const GstH264SPSExtMVCView *const view =
2980 &sps->extension.mvc.view[picture->base.voc];
2981 if (GST_VAAPI_PICTURE_IS_ANCHOR (picture)) {
2982 view_ids = view->anchor_ref_l1;
2983 num_view_ids = view->num_anchor_refs_l1;
2985 view_ids = view->non_anchor_ref_l1;
2986 num_view_ids = view->num_non_anchor_refs_l1;
2991 if (!GST_VAAPI_PICTURE_IS_FRAME (picture)) {
2992 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2993 CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
2995 MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2996 CurrPicNum = slice_hdr->frame_num; // frame_num
2999 picNumPred = CurrPicNum;
3000 picViewIdxPred = -1;
3002 for (i = 0; i < num_ref_pic_list_modifications; i++) {
3003 GstH264RefPicListModification *const l = &ref_pic_list_modification[i];
3004 if (l->modification_of_pic_nums_idc == 3)
3007 /* 8.2.4.3.1 - Short-term reference pictures */
3008 if (l->modification_of_pic_nums_idc == 0
3009 || l->modification_of_pic_nums_idc == 1) {
3010 gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
3011 gint32 picNum, picNumNoWrap;
3014 if (l->modification_of_pic_nums_idc == 0) {
3015 picNumNoWrap = picNumPred - abs_diff_pic_num;
3016 if (picNumNoWrap < 0)
3017 picNumNoWrap += MaxPicNum;
3021 picNumNoWrap = picNumPred + abs_diff_pic_num;
3022 if (picNumNoWrap >= MaxPicNum)
3023 picNumNoWrap -= MaxPicNum;
3025 picNumPred = picNumNoWrap;
3028 picNum = picNumNoWrap;
3029 if (picNum > CurrPicNum)
3030 picNum -= MaxPicNum;
3033 for (j = num_refs; j > ref_list_idx; j--)
3034 ref_list[j] = ref_list[j - 1];
3035 found_ref_idx = find_short_term_reference (decoder, picNum);
3036 ref_list[ref_list_idx++] =
3037 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
3039 for (j = ref_list_idx; j <= num_refs; j++) {
3044 GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (ref_list[j]) ?
3045 ref_list[j]->pic_num : MaxPicNum;
3046 if (PicNumF != picNum ||
3047 ref_list[j]->base.view_id != picture->base.view_id)
3048 ref_list[n++] = ref_list[j];
3052 /* 8.2.4.3.2 - Long-term reference pictures */
3053 else if (l->modification_of_pic_nums_idc == 2) {
3055 for (j = num_refs; j > ref_list_idx; j--)
3056 ref_list[j] = ref_list[j - 1];
3058 find_long_term_reference (decoder, l->value.long_term_pic_num);
3059 ref_list[ref_list_idx++] =
3060 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
3062 for (j = ref_list_idx; j <= num_refs; j++) {
3063 gint32 LongTermPicNumF;
3067 GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (ref_list[j]) ?
3068 ref_list[j]->long_term_pic_num : INT_MAX;
3069 if (LongTermPicNumF != l->value.long_term_pic_num ||
3070 ref_list[j]->base.view_id != picture->base.view_id)
3071 ref_list[n++] = ref_list[j];
3075 /* H.8.2.2.3 - Inter-view prediction reference pictures */
3076 else if ((GST_VAAPI_PICTURE_IS_MVC (picture) &&
3077 sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
3078 (l->modification_of_pic_nums_idc == 4 ||
3079 l->modification_of_pic_nums_idc == 5)) {
3080 gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
3081 gint32 picViewIdx, targetViewId;
3084 if (l->modification_of_pic_nums_idc == 4) {
3085 picViewIdx = picViewIdxPred - abs_diff_view_idx;
3087 picViewIdx += num_view_ids;
3091 picViewIdx = picViewIdxPred + abs_diff_view_idx;
3092 if (picViewIdx >= num_view_ids)
3093 picViewIdx -= num_view_ids;
3095 picViewIdxPred = picViewIdx;
3098 targetViewId = view_ids[picViewIdx];
3101 for (j = num_refs; j > ref_list_idx; j--)
3102 ref_list[j] = ref_list[j - 1];
3103 ref_list[ref_list_idx++] =
3104 find_inter_view_reference (decoder, targetViewId);
3106 for (j = ref_list_idx; j <= num_refs; j++) {
3109 if (ref_list[j]->base.view_id != targetViewId ||
3110 ref_list[j]->base.poc != picture->base.poc)
3111 ref_list[n++] = ref_list[j];
3116 for (i = 0; i < num_refs; i++) {
3119 GST_ERROR ("list %u entry %u is empty", list, i);
3123 *ref_list_count_ptr = num_refs;
3128 /* 8.2.4.3 - Modification process for reference picture lists */
3130 exec_picture_refs_modification (GstVaapiDecoderH264 * decoder,
3131 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
3133 gboolean ret = TRUE;
3135 GST_DEBUG ("execute ref_pic_list_modification()");
3138 if (!GST_H264_IS_I_SLICE (slice_hdr) && !GST_H264_IS_SI_SLICE (slice_hdr) &&
3139 slice_hdr->ref_pic_list_modification_flag_l0)
3140 ret = exec_picture_refs_modification_1 (decoder, picture, slice_hdr, 0);
3143 if (GST_H264_IS_B_SLICE (slice_hdr) &&
3144 slice_hdr->ref_pic_list_modification_flag_l1)
3145 ret = exec_picture_refs_modification_1 (decoder, picture, slice_hdr, 1);
3151 check_picture_ref_corruption (GstVaapiDecoderH264 * decoder,
3152 GstVaapiPictureH264 * RefPicList[32], guint RefPicList_count)
3154 const guint corrupted_flags =
3155 GST_VAAPI_PICTURE_FLAG_CORRUPTED | GST_VAAPI_PICTURE_FLAG_GHOST;
3158 for (i = 0; i < RefPicList_count; i++) {
3159 GstVaapiPictureH264 *const picture = RefPicList[i];
3160 if (picture && (GST_VAAPI_PICTURE_FLAGS (picture) & corrupted_flags))
3167 mark_picture_refs (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
3169 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3171 if (GST_VAAPI_PICTURE_IS_CORRUPTED (picture))
3174 if (check_picture_ref_corruption (decoder,
3175 priv->RefPicList0, priv->RefPicList0_count) ||
3176 check_picture_ref_corruption (decoder,
3177 priv->RefPicList1, priv->RefPicList1_count))
3178 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_CORRUPTED);
3182 init_picture_ref_lists (GstVaapiDecoderH264 * decoder,
3183 GstVaapiPictureH264 * picture)
3185 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3186 guint i, j, short_ref_count, long_ref_count;
3188 short_ref_count = 0;
3190 if (GST_VAAPI_PICTURE_IS_FRAME (picture)) {
3191 for (i = 0; i < priv->dpb_count; i++) {
3192 GstVaapiFrameStore *const fs = priv->dpb[i];
3193 GstVaapiPictureH264 *pic;
3194 if (!gst_vaapi_frame_store_has_frame (fs))
3196 pic = fs->buffers[0];
3197 if (pic->base.view_id != picture->base.view_id)
3199 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (pic))
3200 priv->short_ref[short_ref_count++] = pic;
3201 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (pic))
3202 priv->long_ref[long_ref_count++] = pic;
3203 pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
3204 pic->other_field = fs->buffers[1];
3207 for (i = 0; i < priv->dpb_count; i++) {
3208 GstVaapiFrameStore *const fs = priv->dpb[i];
3209 for (j = 0; j < fs->num_buffers; j++) {
3210 GstVaapiPictureH264 *const pic = fs->buffers[j];
3211 if (pic->base.view_id != picture->base.view_id)
3213 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (pic))
3214 priv->short_ref[short_ref_count++] = pic;
3215 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (pic))
3216 priv->long_ref[long_ref_count++] = pic;
3217 pic->structure = pic->base.structure;
3218 pic->other_field = fs->buffers[j ^ 1];
3223 for (i = short_ref_count; i < priv->short_ref_count; i++)
3224 priv->short_ref[i] = NULL;
3225 priv->short_ref_count = short_ref_count;
3227 for (i = long_ref_count; i < priv->long_ref_count; i++)
3228 priv->long_ref[i] = NULL;
3229 priv->long_ref_count = long_ref_count;
3233 init_picture_refs (GstVaapiDecoderH264 * decoder,
3234 GstVaapiPictureH264 * picture, GstH264SliceHdr * slice_hdr)
3236 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3238 gboolean ret = TRUE;
3240 init_picture_ref_lists (decoder, picture);
3241 init_picture_refs_pic_num (decoder, picture, slice_hdr);
3243 priv->RefPicList0_count = 0;
3244 priv->RefPicList1_count = 0;
3246 switch (slice_hdr->type % 5) {
3247 case GST_H264_P_SLICE:
3248 case GST_H264_SP_SLICE:
3249 ret = init_picture_refs_p_slice (decoder, picture, slice_hdr);
3251 case GST_H264_B_SLICE:
3252 ret = init_picture_refs_b_slice (decoder, picture, slice_hdr);
3258 switch (slice_hdr->type % 5) {
3259 case GST_H264_B_SLICE:
3260 num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
3261 for (i = priv->RefPicList1_count; i < num_refs; i++)
3262 priv->RefPicList1[i] = NULL;
3263 priv->RefPicList1_count = num_refs;
3266 case GST_H264_P_SLICE:
3267 case GST_H264_SP_SLICE:
3268 num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
3269 for (i = priv->RefPicList0_count; i < num_refs; i++)
3270 priv->RefPicList0[i] = NULL;
3271 priv->RefPicList0_count = num_refs;
3277 ret = ret && exec_picture_refs_modification (decoder, picture, slice_hdr);
3279 mark_picture_refs (decoder, picture);
3284 static GstVaapiPictureH264 *
3285 fill_picture_other_field_gap (GstVaapiDecoderH264 * decoder,
3286 GstVaapiPictureH264 * f0)
3288 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3289 GstVaapiPictureH264 *prev_picture, *f1;
3290 gint prev_frame_index;
3291 guint picture_structure;
3293 picture_structure = f0->base.structure;
3294 switch (picture_structure) {
3295 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3296 picture_structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
3298 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3299 picture_structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
3302 g_assert (0 && "unexpected picture structure");
3305 GST_VAAPI_PICTURE_FLAG_SET (f0, GST_VAAPI_PICTURE_FLAG_ONEFIELD);
3307 prev_frame_index = dpb_find_nearest_prev_poc (decoder, f0,
3308 picture_structure, &prev_picture);
3309 if (prev_frame_index < 0)
3310 goto error_find_field;
3312 f1 = gst_vaapi_picture_h264_new_field (f0);
3314 goto error_allocate_field;
3316 gst_vaapi_surface_proxy_replace (&f1->base.proxy, prev_picture->base.proxy);
3317 f1->base.surface = GST_VAAPI_SURFACE_PROXY_SURFACE (f1->base.proxy);
3318 f1->base.surface_id = GST_VAAPI_SURFACE_PROXY_SURFACE_ID (f1->base.proxy);
3320 f1->structure = f1->base.structure;
3322 /* XXX: clone other H.264 picture specific flags */
3323 GST_VAAPI_PICTURE_FLAG_SET (f1,
3324 (GST_VAAPI_PICTURE_FLAG_SKIPPED | GST_VAAPI_PICTURE_FLAG_GHOST));
3326 gst_vaapi_picture_h264_set_reference (f1, 0, FALSE);
3327 gst_vaapi_picture_replace (&priv->current_picture, f1);
3328 gst_vaapi_picture_unref (f1);
3330 init_picture_ref_lists (decoder, f1);
3331 init_picture_refs_pic_num (decoder, f1, NULL);
3332 if (!exec_ref_pic_marking_sliding_window (decoder))
3333 goto error_exec_ref_pic_marking;
3334 if (!dpb_add (decoder, f1))
3335 goto error_append_field;
3341 GST_ERROR ("failed to find field with POC nearest to %d", f0->base.poc);
3344 error_allocate_field:
3346 GST_ERROR ("failed to allocate missing field for previous frame store");
3349 error_exec_ref_pic_marking:
3351 GST_ERROR ("failed to execute reference picture marking process");
3356 GST_ERROR ("failed to add missing field into previous frame store");
3362 fill_picture_gaps (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture,
3363 GstH264SliceHdr * slice_hdr)
3365 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3366 GstH264SPS *const sps = get_sps (decoder);
3367 const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
3368 gint32 prev_frame_num;
3369 GstVaapiFrameStore *prev_frame;
3370 GstVaapiPicture *base_picture;
3371 GstVaapiPictureH264 *lost_picture, *prev_picture;
3372 GstH264SliceHdr lost_slice_hdr;
3373 gboolean success = FALSE;
3375 if (priv->prev_ref_frame_num == priv->frame_num)
3377 if ((priv->prev_ref_frame_num + 1) % MaxFrameNum == priv->frame_num)
3379 if (priv->dpb_count == 0)
3382 prev_frame = priv->prev_ref_frames[picture->base.voc];
3383 g_assert (prev_frame != NULL && prev_frame->buffers[0] != NULL);
3384 prev_picture = gst_vaapi_picture_ref (prev_frame->buffers[0]);
3385 gst_vaapi_picture_ref (picture);
3387 lost_slice_hdr = *slice_hdr;
3388 lost_slice_hdr.field_pic_flag = 0;
3389 if (sps->pic_order_cnt_type == 1) {
3390 lost_slice_hdr.delta_pic_order_cnt[0] = 0;
3391 lost_slice_hdr.delta_pic_order_cnt[1] = 0;
3393 lost_slice_hdr.dec_ref_pic_marking.adaptive_ref_pic_marking_mode_flag = 0;
3395 /* XXX: this process is incorrect for MVC */
3396 /* Reduce frame num gaps so we don't have to create unnecessary
3398 prev_frame_num = priv->prev_ref_frame_num;
3399 if (prev_frame_num > slice_hdr->frame_num)
3400 prev_frame_num -= MaxFrameNum;
3402 if ((slice_hdr->frame_num - prev_frame_num) - 1 > sps->num_ref_frames) {
3403 prev_frame_num = (slice_hdr->frame_num - sps->num_ref_frames) - 1;
3405 if (prev_frame_num < 0)
3406 prev_frame_num += MaxFrameNum;
3408 priv->frame_num = prev_frame_num;
3411 priv->prev_ref_frame_num = priv->frame_num;
3412 priv->frame_num = (priv->prev_ref_frame_num + 1) % MaxFrameNum;
3413 if (priv->frame_num == slice_hdr->frame_num)
3416 /* Create new picture */
3417 lost_picture = gst_vaapi_picture_h264_new_clone (prev_picture);
3419 goto error_allocate_picture;
3421 base_picture = &lost_picture->base;
3422 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
3423 base_picture->pts = GST_CLOCK_TIME_NONE;
3424 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
3425 lost_picture->frame_num = priv->frame_num;
3426 lost_picture->frame_num_wrap = priv->frame_num;
3427 lost_picture->structure = base_picture->structure;
3429 GST_VAAPI_PICTURE_FLAG_SET (lost_picture,
3430 (GST_VAAPI_PICTURE_FLAG_SKIPPED |
3431 GST_VAAPI_PICTURE_FLAG_GHOST |
3432 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE));
3434 if (sps->pic_order_cnt_type != 0)
3435 init_picture_poc (decoder, lost_picture, &lost_slice_hdr);
3437 base_picture->poc = prev_picture->base.poc + 2;
3438 if (prev_picture->field_poc[0] != G_MAXINT32)
3439 lost_picture->field_poc[0] = prev_picture->field_poc[0] + 2;
3440 if (prev_picture->field_poc[1] != G_MAXINT32)
3441 lost_picture->field_poc[1] = prev_picture->field_poc[1] + 2;
3444 gst_vaapi_picture_replace (&prev_picture, lost_picture);
3445 gst_vaapi_picture_replace (&priv->current_picture, lost_picture);
3446 gst_vaapi_picture_unref (lost_picture);
3448 init_picture_ref_lists (decoder, lost_picture);
3449 init_picture_refs_pic_num (decoder, lost_picture, &lost_slice_hdr);
3450 if (!exec_ref_pic_marking_sliding_window (decoder))
3451 goto error_exec_ref_pic_marking;
3452 if (!dpb_add (decoder, lost_picture))
3454 gst_vaapi_picture_replace (&priv->current_picture, NULL);
3459 priv->frame_num = slice_hdr->frame_num;
3460 priv->prev_ref_frame_num = (priv->frame_num + MaxFrameNum - 1) % MaxFrameNum;
3461 gst_vaapi_picture_replace (&prev_picture, NULL);
3462 gst_vaapi_picture_replace (&priv->current_picture, picture);
3463 gst_vaapi_picture_unref (picture);
3467 error_allocate_picture:
3469 GST_ERROR ("failed to allocate lost picture");
3472 error_exec_ref_pic_marking:
3474 GST_ERROR ("failed to execute reference picture marking process");
3479 GST_ERROR ("failed to store lost picture into the DPB");
3485 init_picture (GstVaapiDecoderH264 * decoder,
3486 GstVaapiPictureH264 * picture, GstVaapiParserInfoH264 * pi)
3488 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3489 GstVaapiPicture *const base_picture = &picture->base;
3490 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
3492 if (priv->prev_pic_reference)
3493 priv->prev_ref_frame_num = priv->frame_num;
3494 priv->prev_frame_num = priv->frame_num;
3495 priv->frame_num = slice_hdr->frame_num;
3496 picture->frame_num = priv->frame_num;
3497 picture->frame_num_wrap = priv->frame_num;
3498 picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
3500 /* If it's a cloned picture, it has some assignments from parent
3501 * picture already. In addition, base decoder doesn't set valid pts
3502 * to the frame corresponding to cloned picture.
3504 if (G_LIKELY (!base_picture->parent_picture)) {
3505 base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME (decoder)->pts;
3506 base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
3507 base_picture->view_id = pi->view_id;
3508 base_picture->voc = pi->voc;
3511 /* Initialize extensions */
3512 switch (pi->nalu.extension_type) {
3513 case GST_H264_NAL_EXTENSION_MVC:{
3514 GstH264NalUnitExtensionMVC *const mvc = &pi->nalu.extension.mvc;
3516 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_MVC);
3517 if (mvc->inter_view_flag)
3518 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
3519 if (mvc->anchor_pic_flag)
3520 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_ANCHOR);
3525 /* Reset decoder state for IDR pictures */
3526 if (pi->nalu.idr_pic_flag) {
3527 GST_DEBUG ("<IDR>");
3528 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_IDR);
3529 dpb_flush (decoder, picture);
3530 } else if (!fill_picture_gaps (decoder, picture, slice_hdr))
3533 /* Initialize picture structure */
3534 if (slice_hdr->field_pic_flag) {
3535 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
3536 priv->pic_structure = slice_hdr->bottom_field_flag ?
3537 GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD :
3538 GST_H264_SEI_PIC_STRUCT_TOP_FIELD;
3541 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
3542 switch (priv->pic_structure) {
3543 case GST_H264_SEI_PIC_STRUCT_TOP_FIELD:
3544 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
3545 if (GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture))
3546 priv->top_field_first = TRUE;
3548 case GST_H264_SEI_PIC_STRUCT_BOTTOM_FIELD:
3549 base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
3551 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM_TOP:
3552 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_RFF);
3554 case GST_H264_SEI_PIC_STRUCT_TOP_BOTTOM:
3555 if (GST_VAAPI_PICTURE_IS_FIRST_FIELD (picture))
3556 priv->top_field_first = TRUE;
3558 case GST_H264_SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM:
3559 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_RFF);
3561 case GST_H264_SEI_PIC_STRUCT_FRAME:
3562 if (!priv->progressive_sequence && priv->dpb_count == 0)
3563 priv->top_field_first = TRUE;
3566 picture->structure = base_picture->structure;
3567 if (priv->top_field_first)
3568 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_TFF);
3570 /* Initialize reference flags */
3571 if (pi->nalu.ref_idc) {
3572 GstH264DecRefPicMarking *const dec_ref_pic_marking =
3573 &slice_hdr->dec_ref_pic_marking;
3575 if (GST_VAAPI_PICTURE_IS_IDR (picture) &&
3576 dec_ref_pic_marking->long_term_reference_flag)
3577 GST_VAAPI_PICTURE_FLAG_SET (picture,
3578 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
3580 GST_VAAPI_PICTURE_FLAG_SET (picture,
3581 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
3584 init_picture_poc (decoder, picture, slice_hdr);
3588 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
3590 exec_ref_pic_marking_sliding_window (GstVaapiDecoderH264 * decoder)
3592 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3593 GstH264SPS *const sps = get_sps (decoder);
3594 GstVaapiPictureH264 *ref_picture;
3595 guint i, m, max_num_ref_frames;
3597 GST_DEBUG ("reference picture marking process (sliding window)");
3599 if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD (priv->current_picture))
3602 max_num_ref_frames = sps->num_ref_frames;
3603 if (max_num_ref_frames == 0)
3604 max_num_ref_frames = 1;
3605 if (!GST_VAAPI_PICTURE_IS_FRAME (priv->current_picture))
3606 max_num_ref_frames <<= 1;
3608 if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
3610 if (priv->short_ref_count < 1)
3613 for (m = 0, i = 1; i < priv->short_ref_count; i++) {
3614 GstVaapiPictureH264 *const picture = priv->short_ref[i];
3615 if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
3619 ref_picture = priv->short_ref[m];
3620 gst_vaapi_picture_h264_set_reference (ref_picture, 0, TRUE);
3621 ARRAY_REMOVE_INDEX (priv->short_ref, m);
3623 /* Both fields need to be marked as "unused for reference", so
3624 remove the other field from the short_ref[] list as well */
3625 if (!GST_VAAPI_PICTURE_IS_FRAME (priv->current_picture)
3626 && ref_picture->other_field) {
3627 for (i = 0; i < priv->short_ref_count; i++) {
3628 if (priv->short_ref[i] == ref_picture->other_field) {
3629 ARRAY_REMOVE_INDEX (priv->short_ref, i);
3637 static inline gint32
3638 get_picNumX (GstVaapiPictureH264 * picture,
3639 GstH264RefPicMarking * ref_pic_marking)
3643 if (GST_VAAPI_PICTURE_IS_FRAME (picture))
3644 pic_num = picture->frame_num_wrap;
3646 pic_num = 2 * picture->frame_num_wrap + 1;
3647 pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
3651 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
3653 exec_ref_pic_marking_adaptive_mmco_1 (GstVaapiDecoderH264 * decoder,
3654 GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
3656 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3659 picNumX = get_picNumX (picture, ref_pic_marking);
3660 i = find_short_term_reference (decoder, picNumX);
3664 gst_vaapi_picture_h264_set_reference (priv->short_ref[i], 0,
3665 GST_VAAPI_PICTURE_IS_FRAME (picture));
3666 ARRAY_REMOVE_INDEX (priv->short_ref, i);
3669 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
3671 exec_ref_pic_marking_adaptive_mmco_2 (GstVaapiDecoderH264 * decoder,
3672 GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
3674 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3677 i = find_long_term_reference (decoder, ref_pic_marking->long_term_pic_num);
3681 gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0,
3682 GST_VAAPI_PICTURE_IS_FRAME (picture));
3683 ARRAY_REMOVE_INDEX (priv->long_ref, i);
3686 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
3688 exec_ref_pic_marking_adaptive_mmco_3 (GstVaapiDecoderH264 * decoder,
3689 GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
3691 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3692 GstVaapiPictureH264 *ref_picture, *other_field;
3695 for (i = 0; i < priv->long_ref_count; i++) {
3696 if (priv->long_ref[i]->long_term_frame_idx ==
3697 ref_pic_marking->long_term_frame_idx)
3700 if (i != priv->long_ref_count) {
3701 gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0, TRUE);
3702 ARRAY_REMOVE_INDEX (priv->long_ref, i);
3705 picNumX = get_picNumX (picture, ref_pic_marking);
3706 i = find_short_term_reference (decoder, picNumX);
3710 ref_picture = priv->short_ref[i];
3711 ARRAY_REMOVE_INDEX (priv->short_ref, i);
3712 priv->long_ref[priv->long_ref_count++] = ref_picture;
3714 ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3715 gst_vaapi_picture_h264_set_reference (ref_picture,
3716 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3717 GST_VAAPI_PICTURE_IS_COMPLETE (picture));
3719 /* Assign LongTermFrameIdx to the other field if it was also
3720 marked as "used for long-term reference */
3721 other_field = ref_picture->other_field;
3722 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (other_field))
3723 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3726 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3727 * as "unused for reference" */
3729 exec_ref_pic_marking_adaptive_mmco_4 (GstVaapiDecoderH264 * decoder,
3730 GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
3732 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3733 gint32 i, long_term_frame_idx;
3735 long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3737 for (i = 0; i < priv->long_ref_count; i++) {
3738 if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3740 gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0, FALSE);
3741 ARRAY_REMOVE_INDEX (priv->long_ref, i);
3746 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3748 exec_ref_pic_marking_adaptive_mmco_5 (GstVaapiDecoderH264 * decoder,
3749 GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
3751 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3753 dpb_flush (decoder, picture);
3755 priv->prev_pic_has_mmco5 = TRUE;
3757 /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3758 priv->frame_num = 0;
3759 priv->frame_num_offset = 0;
3760 picture->frame_num = 0;
3762 /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3763 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3764 picture->field_poc[TOP_FIELD] -= picture->base.poc;
3765 if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3766 picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3767 picture->base.poc = 0;
3770 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3772 exec_ref_pic_marking_adaptive_mmco_6 (GstVaapiDecoderH264 * decoder,
3773 GstVaapiPictureH264 * picture, GstH264RefPicMarking * ref_pic_marking)
3775 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3776 GstVaapiPictureH264 *other_field;
3779 for (i = 0; i < priv->long_ref_count; i++) {
3780 if (priv->long_ref[i]->long_term_frame_idx ==
3781 ref_pic_marking->long_term_frame_idx)
3784 if (i != priv->long_ref_count) {
3785 gst_vaapi_picture_h264_set_reference (priv->long_ref[i], 0, TRUE);
3786 ARRAY_REMOVE_INDEX (priv->long_ref, i);
3789 picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3790 gst_vaapi_picture_h264_set_reference (picture,
3791 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3792 GST_VAAPI_PICTURE_IS_COMPLETE (picture));
3794 /* Assign LongTermFrameIdx to the other field if it was also
3795 marked as "used for long-term reference */
3796 other_field = GST_VAAPI_PICTURE_H264 (picture->base.parent_picture);
3797 if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (other_field))
3798 other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3801 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3803 exec_ref_pic_marking_adaptive (GstVaapiDecoderH264 * decoder,
3804 GstVaapiPictureH264 * picture,
3805 GstH264DecRefPicMarking * dec_ref_pic_marking)
3809 typedef void (*exec_ref_pic_marking_adaptive_mmco_func) (GstVaapiDecoderH264 *
3810 decoder, GstVaapiPictureH264 * picture,
3811 GstH264RefPicMarking * ref_pic_marking);
3813 static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3815 exec_ref_pic_marking_adaptive_mmco_1,
3816 exec_ref_pic_marking_adaptive_mmco_2,
3817 exec_ref_pic_marking_adaptive_mmco_3,
3818 exec_ref_pic_marking_adaptive_mmco_4,
3819 exec_ref_pic_marking_adaptive_mmco_5,
3820 exec_ref_pic_marking_adaptive_mmco_6,
3823 GST_DEBUG ("reference picture marking process (adaptive memory control)");
3825 for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3826 GstH264RefPicMarking *const ref_pic_marking =
3827 &dec_ref_pic_marking->ref_pic_marking[i];
3829 const guint mmco = ref_pic_marking->memory_management_control_operation;
3830 if (mmco < G_N_ELEMENTS (mmco_funcs) && mmco_funcs[mmco])
3831 mmco_funcs[mmco] (decoder, picture, ref_pic_marking);
3833 GST_ERROR ("unhandled MMCO %u", mmco);
3840 /* 8.2.5 - Execute reference picture marking process */
3842 exec_ref_pic_marking (GstVaapiDecoderH264 * decoder,
3843 GstVaapiPictureH264 * picture)
3845 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3847 priv->prev_pic_reference = GST_VAAPI_PICTURE_IS_REFERENCE (picture);
3848 priv->prev_pic_has_mmco5 = FALSE;
3849 priv->prev_pic_structure = picture->structure;
3851 if (GST_VAAPI_PICTURE_IS_INTER_VIEW (picture))
3852 g_ptr_array_add (priv->inter_views, gst_vaapi_picture_ref (picture));
3854 if (!priv->prev_pic_reference)
3857 if (!GST_VAAPI_PICTURE_IS_IDR (picture)) {
3858 GstH264DecRefPicMarking *const dec_ref_pic_marking =
3859 &picture->last_slice_hdr->dec_ref_pic_marking;
3860 if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3861 if (!exec_ref_pic_marking_adaptive (decoder, picture,
3862 dec_ref_pic_marking))
3865 if (!exec_ref_pic_marking_sliding_window (decoder))
3873 vaapi_init_picture (VAPictureH264 * pic)
3875 pic->picture_id = VA_INVALID_ID;
3877 pic->flags = VA_PICTURE_H264_INVALID;
3878 pic->TopFieldOrderCnt = 0;
3879 pic->BottomFieldOrderCnt = 0;
3883 vaapi_fill_picture (VAPictureH264 * pic, GstVaapiPictureH264 * picture,
3884 guint picture_structure)
3886 if (!picture_structure)
3887 picture_structure = picture->structure;
3889 pic->picture_id = picture->base.surface_id;
3892 if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE (picture)) {
3893 pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3894 pic->frame_idx = picture->long_term_frame_idx;
3896 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE (picture))
3897 pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3898 pic->frame_idx = picture->frame_num;
3901 switch (picture_structure) {
3902 case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3903 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3904 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3906 case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3907 pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3908 pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3909 pic->BottomFieldOrderCnt = 0;
3911 case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3912 pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3913 pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3914 pic->TopFieldOrderCnt = 0;
3920 vaapi_fill_picture_for_RefPicListX (VAPictureH264 * pic,
3921 GstVaapiPictureH264 * picture)
3923 vaapi_fill_picture (pic, picture, 0);
3925 /* H.8.4 - MVC inter prediction and inter-view prediction process */
3926 if (GST_VAAPI_PICTURE_IS_INTER_VIEW (picture)) {
3927 /* The inter-view reference components and inter-view only
3928 reference components that are included in the reference
3929 picture lists are considered as not being marked as "used for
3930 short-term reference" or "used for long-term reference" */
3931 pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE |
3932 VA_PICTURE_H264_LONG_TERM_REFERENCE);
3937 fill_picture (GstVaapiDecoderH264 * decoder, GstVaapiPictureH264 * picture)
3939 GstVaapiDecoderH264Private *const priv = &decoder->priv;
3940 GstVaapiPicture *const base_picture = &picture->base;
3941 GstH264PPS *const pps = get_pps (decoder);
3942 GstH264SPS *const sps = get_sps (decoder);
3943 VAPictureParameterBufferH264 *const pic_param = base_picture->param;
3946 /* Fill in VAPictureParameterBufferH264 */
3947 vaapi_fill_picture (&pic_param->CurrPic, picture, 0);
3949 for (i = 0, n = 0; i < priv->dpb_count; i++) {
3950 GstVaapiFrameStore *const fs = priv->dpb[i];
3951 if ((gst_vaapi_frame_store_has_reference (fs) &&
3952 fs->view_id == picture->base.view_id) ||
3953 (gst_vaapi_frame_store_has_inter_view (fs) &&
3954 is_inter_view_reference_for_picture (decoder, fs->view_id,
3956 vaapi_fill_picture (&pic_param->ReferenceFrames[n++], fs->buffers[0],
3958 if (n >= G_N_ELEMENTS (pic_param->ReferenceFrames))
3961 for (; n < G_N_ELEMENTS (pic_param->ReferenceFrames); n++)
3962 vaapi_init_picture (&pic_param->ReferenceFrames[n]);
3964 #define COPY_FIELD(s, f) \
3965 pic_param->f = (s)->f
3967 #define COPY_BFM(a, s, f) \
3968 pic_param->a.bits.f = (s)->f
3970 pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
3971 pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3972 pic_param->frame_num = priv->frame_num;
3974 COPY_FIELD (sps, bit_depth_luma_minus8);
3975 COPY_FIELD (sps, bit_depth_chroma_minus8);
3976 COPY_FIELD (sps, num_ref_frames);
3977 if (pic_param->num_ref_frames == 0)
3978 pic_param->num_ref_frames = priv->dpb_size;
3980 #if !VA_CHECK_VERSION(1,0,0)
3981 /* Deprecate H.264 baseline profile and FMO support */
3982 COPY_FIELD (pps, num_slice_groups_minus1);
3983 COPY_FIELD (pps, slice_group_map_type);
3984 COPY_FIELD (pps, slice_group_change_rate_minus1);
3986 COPY_FIELD (pps, pic_init_qp_minus26);
3987 COPY_FIELD (pps, pic_init_qs_minus26);
3988 COPY_FIELD (pps, chroma_qp_index_offset);
3989 COPY_FIELD (pps, second_chroma_qp_index_offset);
3991 pic_param->seq_fields.value = 0; /* reset all bits */
3992 pic_param->seq_fields.bits.residual_colour_transform_flag =
3993 sps->separate_colour_plane_flag;
3994 pic_param->seq_fields.bits.MinLumaBiPredSize8x8 = sps->level_idc >= 31; /* A.3.3.2 */
3996 COPY_BFM (seq_fields, sps, chroma_format_idc);
3997 COPY_BFM (seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3998 COPY_BFM (seq_fields, sps, frame_mbs_only_flag);
3999 COPY_BFM (seq_fields, sps, mb_adaptive_frame_field_flag);
4000 COPY_BFM (seq_fields, sps, direct_8x8_inference_flag);
4001 COPY_BFM (seq_fields, sps, log2_max_frame_num_minus4);
4002 COPY_BFM (seq_fields, sps, pic_order_cnt_type);
4003 COPY_BFM (seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
4004 COPY_BFM (seq_fields, sps, delta_pic_order_always_zero_flag);
4006 pic_param->pic_fields.value = 0; /* reset all bits */
4007 pic_param->pic_fields.bits.field_pic_flag =
4008 GST_VAAPI_PICTURE_IS_INTERLACED (picture);
4009 pic_param->pic_fields.bits.reference_pic_flag =
4010 GST_VAAPI_PICTURE_IS_REFERENCE (picture);
4012 COPY_BFM (pic_fields, pps, entropy_coding_mode_flag);
4013 COPY_BFM (pic_fields, pps, weighted_pred_flag);
4014 COPY_BFM (pic_fields, pps, weighted_bipred_idc);
4015 COPY_BFM (pic_fields, pps, transform_8x8_mode_flag);
4016 COPY_BFM (pic_fields, pps, constrained_intra_pred_flag);
4017 COPY_BFM (pic_fields, pps, pic_order_present_flag);
4018 COPY_BFM (pic_fields, pps, deblocking_filter_control_present_flag);
4019 COPY_BFM (pic_fields, pps, redundant_pic_cnt_present_flag);
4023 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
4025 is_new_picture (GstVaapiParserInfoH264 * pi, GstVaapiParserInfoH264 * prev_pi)
4027 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
4028 GstH264PPS *const pps = slice_hdr->pps;
4029 GstH264SPS *const sps = pps->sequence;
4030 GstH264SliceHdr *prev_slice_hdr;
4034 prev_slice_hdr = &prev_pi->data.slice_hdr;
4036 #define CHECK_EXPR(expr, field_name) do { \
4038 GST_DEBUG(field_name " differs in value"); \
4043 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
4044 CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
4046 /* view_id differs in value and VOIdx of current slice_hdr is less
4047 than the VOIdx of the prev_slice_hdr */
4048 CHECK_VALUE (pi, prev_pi, view_id);
4050 /* frame_num differs in value, regardless of inferred values to 0 */
4051 CHECK_VALUE (slice_hdr, prev_slice_hdr, frame_num);
4053 /* pic_parameter_set_id differs in value */
4054 CHECK_VALUE (slice_hdr, prev_slice_hdr, pps);
4056 /* field_pic_flag differs in value */
4057 CHECK_VALUE (slice_hdr, prev_slice_hdr, field_pic_flag);
4059 /* bottom_field_flag is present in both and differs in value */
4060 if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
4061 CHECK_VALUE (slice_hdr, prev_slice_hdr, bottom_field_flag);
4063 /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
4064 CHECK_EXPR ((pi->nalu.ref_idc != 0) ==
4065 (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
4067 /* POC type is 0 for both and either pic_order_cnt_lsb differs in
4068 value or delta_pic_order_cnt_bottom differs in value */
4069 if (sps->pic_order_cnt_type == 0) {
4070 CHECK_VALUE (slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
4071 if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
4072 CHECK_VALUE (slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
4075 /* POC type is 1 for both and either delta_pic_order_cnt[0]
4076 differs in value or delta_pic_order_cnt[1] differs in value */
4077 else if (sps->pic_order_cnt_type == 1) {
4078 CHECK_VALUE (slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
4079 CHECK_VALUE (slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
4082 /* IdrPicFlag differs in value */
4083 CHECK_VALUE (&pi->nalu, &prev_pi->nalu, idr_pic_flag);
4085 /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
4086 if (pi->nalu.idr_pic_flag)
4087 CHECK_VALUE (slice_hdr, prev_slice_hdr, idr_pic_id);
4094 /* Detection of a new access unit, assuming we are already in presence
4096 static inline gboolean
4097 is_new_access_unit (GstVaapiParserInfoH264 * pi,
4098 GstVaapiParserInfoH264 * prev_pi)
4100 if (!prev_pi || prev_pi->view_id == pi->view_id)
4102 return pi->voc < prev_pi->voc;
4105 /* Determines whether the supplied picture has the same field parity
4106 than a picture specified through the other slice header */
4107 static inline gboolean
4108 same_field_parity (GstVaapiPictureH264 * field, GstH264SliceHdr * slice_hdr)
4110 g_return_val_if_fail (GST_VAAPI_PICTURE_IS_INTERLACED (field), FALSE);
4112 return ((field->base.structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD) ^
4113 slice_hdr->bottom_field_flag) == 0;
4116 /* Finds the first field picture corresponding to the supplied picture */
4117 static GstVaapiPictureH264 *
4118 find_first_field (GstVaapiDecoderH264 * decoder, GstVaapiParserInfoH264 * pi)
4120 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4121 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
4122 GstVaapiFrameStore *fs;
4123 GstVaapiPictureH264 *f0, *f1;
4125 fs = priv->prev_frames[pi->voc];
4129 f0 = fs->buffers[0];
4130 if (!slice_hdr->field_pic_flag) {
4131 if (!gst_vaapi_frame_store_has_frame (fs))
4132 fill_picture_other_field_gap (decoder, f0);
4136 /* At this point, the current frame is known to be interlaced */
4137 if (gst_vaapi_frame_store_has_frame (fs)) {
4141 /* At this point, the previous frame is interlaced and contains a
4143 if (f0->frame_num == slice_hdr->frame_num) {
4145 if (same_field_parity (f0, slice_hdr)) {
4146 fill_picture_other_field_gap (decoder, f0);
4152 fill_picture_other_field_gap (decoder, f0);
4156 static GstVaapiDecoderStatus
4157 decode_picture (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
4159 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4160 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
4161 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
4162 GstH264PPS *const pps = ensure_pps (decoder, slice_hdr->pps);
4163 GstH264SPS *const sps = ensure_sps (decoder, slice_hdr->pps->sequence);
4164 GstVaapiPictureH264 *picture, *first_field;
4165 GstVaapiDecoderStatus status;
4168 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4170 status = ensure_context (decoder, sps);
4171 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4174 priv->decoder_state = 0;
4176 first_field = find_first_field (decoder, pi);
4178 /* Re-use current picture where the first field was decoded */
4179 picture = gst_vaapi_picture_h264_new_field (first_field);
4181 GST_ERROR ("failed to allocate field picture");
4182 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4185 /* Create new picture */
4186 picture = gst_vaapi_picture_h264_new (decoder);
4188 GST_ERROR ("failed to allocate picture");
4189 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4192 gst_vaapi_picture_replace (&priv->current_picture, picture);
4193 gst_vaapi_picture_unref (picture);
4195 /* Clear inter-view references list if this is the primary coded
4196 picture of the current access unit */
4197 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
4198 g_ptr_array_set_size (priv->inter_views, 0);
4200 /* Update cropping rectangle */
4201 if (sps->frame_cropping_flag) {
4202 GstVaapiRectangle crop_rect;
4203 crop_rect.x = sps->crop_rect_x;
4204 crop_rect.y = sps->crop_rect_y;
4205 crop_rect.width = sps->crop_rect_width;
4206 crop_rect.height = sps->crop_rect_height;
4207 gst_vaapi_picture_set_crop_rect (&picture->base, &crop_rect);
4210 status = ensure_quant_matrix (decoder, picture);
4211 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
4212 GST_ERROR ("failed to reset quantizer matrix");
4216 if (!init_picture (decoder, picture, pi))
4217 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4218 if (!fill_picture (decoder, picture))
4219 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4221 priv->decoder_state = pi->state;
4222 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4226 get_slice_data_bit_offset (GstH264SliceHdr * slice_hdr, guint nal_header_bytes)
4230 epb_count = slice_hdr->n_emulation_prevention_bytes;
4231 return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
4235 fill_pred_weight_table (GstVaapiDecoderH264 * decoder,
4236 GstVaapiSlice * slice, GstH264SliceHdr * slice_hdr)
4238 VASliceParameterBufferH264 *const slice_param = slice->param;
4239 GstH264PPS *const pps = get_pps (decoder);
4240 GstH264SPS *const sps = get_sps (decoder);
4241 GstH264PredWeightTable *const w = &slice_hdr->pred_weight_table;
4242 guint num_weight_tables = 0;
4245 if (pps->weighted_pred_flag &&
4246 (GST_H264_IS_P_SLICE (slice_hdr) || GST_H264_IS_SP_SLICE (slice_hdr)))
4247 num_weight_tables = 1;
4248 else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE (slice_hdr))
4249 num_weight_tables = 2;
4251 num_weight_tables = 0;
4253 slice_param->luma_log2_weight_denom = 0;
4254 slice_param->chroma_log2_weight_denom = 0;
4255 slice_param->luma_weight_l0_flag = 0;
4256 slice_param->chroma_weight_l0_flag = 0;
4257 slice_param->luma_weight_l1_flag = 0;
4258 slice_param->chroma_weight_l1_flag = 0;
4260 if (num_weight_tables < 1)
4263 slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
4264 slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
4266 slice_param->luma_weight_l0_flag = 1;
4267 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
4268 slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
4269 slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
4272 slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
4273 if (slice_param->chroma_weight_l0_flag) {
4274 for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
4275 for (j = 0; j < 2; j++) {
4276 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
4277 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
4282 if (num_weight_tables < 2)
4285 slice_param->luma_weight_l1_flag = 1;
4286 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
4287 slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
4288 slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
4291 slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
4292 if (slice_param->chroma_weight_l1_flag) {
4293 for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
4294 for (j = 0; j < 2; j++) {
4295 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
4296 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
4304 fill_RefPicList (GstVaapiDecoderH264 * decoder,
4305 GstVaapiSlice * slice, GstH264SliceHdr * slice_hdr)
4307 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4308 VASliceParameterBufferH264 *const slice_param = slice->param;
4309 guint i, num_ref_lists = 0;
4311 slice_param->num_ref_idx_l0_active_minus1 = 0;
4312 slice_param->num_ref_idx_l1_active_minus1 = 0;
4314 /* ensure empty list by default */
4315 vaapi_init_picture (&slice_param->RefPicList0[0]);
4316 vaapi_init_picture (&slice_param->RefPicList1[0]);
4318 if (GST_H264_IS_B_SLICE (slice_hdr))
4320 else if (GST_H264_IS_I_SLICE (slice_hdr))
4325 if (num_ref_lists < 1)
4328 slice_param->num_ref_idx_l0_active_minus1 =
4329 slice_hdr->num_ref_idx_l0_active_minus1;
4331 for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
4332 vaapi_fill_picture_for_RefPicListX (&slice_param->RefPicList0[i],
4333 priv->RefPicList0[i]);
4335 vaapi_init_picture (&slice_param->RefPicList0[i]);
4337 if (num_ref_lists < 2)
4340 slice_param->num_ref_idx_l1_active_minus1 =
4341 slice_hdr->num_ref_idx_l1_active_minus1;
4343 for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
4344 vaapi_fill_picture_for_RefPicListX (&slice_param->RefPicList1[i],
4345 priv->RefPicList1[i]);
4347 vaapi_init_picture (&slice_param->RefPicList1[i]);
4353 fill_slice (GstVaapiDecoderH264 * decoder,
4354 GstVaapiSlice * slice, GstVaapiParserInfoH264 * pi)
4356 VASliceParameterBufferH264 *const slice_param = slice->param;
4357 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
4359 /* Fill in VASliceParameterBufferH264 */
4360 slice_param->slice_data_bit_offset =
4361 get_slice_data_bit_offset (slice_hdr, pi->nalu.header_bytes);
4362 slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
4363 slice_param->slice_type = slice_hdr->type % 5;
4364 slice_param->direct_spatial_mv_pred_flag =
4365 slice_hdr->direct_spatial_mv_pred_flag;
4366 slice_param->cabac_init_idc = slice_hdr->cabac_init_idc;
4367 slice_param->slice_qp_delta = slice_hdr->slice_qp_delta;
4368 slice_param->disable_deblocking_filter_idc =
4369 slice_hdr->disable_deblocking_filter_idc;
4370 slice_param->slice_alpha_c0_offset_div2 =
4371 slice_hdr->slice_alpha_c0_offset_div2;
4372 slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
4374 if (!fill_RefPicList (decoder, slice, slice_hdr))
4376 if (!fill_pred_weight_table (decoder, slice, slice_hdr))
4381 static GstVaapiDecoderStatus
4382 decode_slice (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
4384 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4385 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
4386 GstVaapiPictureH264 *const picture = priv->current_picture;
4387 GstH264SliceHdr *const slice_hdr = &pi->data.slice_hdr;
4388 GstVaapiSlice *slice;
4389 GstBuffer *const buffer =
4390 GST_VAAPI_DECODER_CODEC_FRAME (decoder)->input_buffer;
4391 GstMapInfo map_info;
4393 GST_DEBUG ("slice (%u bytes)", pi->nalu.size);
4395 if (!is_valid_state (pi->state, GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
4396 GST_WARNING ("failed to receive enough headers to decode slice");
4397 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4400 if (!ensure_pps (decoder, slice_hdr->pps)) {
4401 GST_ERROR ("failed to activate PPS");
4402 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4405 if (!ensure_sps (decoder, slice_hdr->pps->sequence)) {
4406 GST_ERROR ("failed to activate SPS");
4407 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4410 if (!gst_buffer_map (buffer, &map_info, GST_MAP_READ)) {
4411 GST_ERROR ("failed to map buffer");
4412 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4415 /* Check wether this is the first/last slice in the current access unit */
4416 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
4417 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_AU_START);
4418 if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
4419 GST_VAAPI_PICTURE_FLAG_SET (picture, GST_VAAPI_PICTURE_FLAG_AU_END);
4421 slice = GST_VAAPI_SLICE_NEW (H264, decoder,
4422 (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
4423 gst_buffer_unmap (buffer, &map_info);
4425 GST_ERROR ("failed to allocate slice");
4426 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4429 if (!init_picture_refs (decoder, picture, slice_hdr)) {
4430 gst_vaapi_mini_object_unref (GST_VAAPI_MINI_OBJECT (slice));
4431 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4434 if (!fill_slice (decoder, slice, pi)) {
4435 gst_vaapi_mini_object_unref (GST_VAAPI_MINI_OBJECT (slice));
4436 return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
4439 gst_vaapi_picture_add_slice (GST_VAAPI_PICTURE_CAST (picture), slice);
4440 picture->last_slice_hdr = slice_hdr;
4441 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4445 scan_for_start_code (GstAdapter * adapter, guint ofs, guint size, guint32 * scp)
4450 return (gint) gst_adapter_masked_scan_uint32_peek (adapter,
4451 0xffffff00, 0x00000100, ofs, size, scp);
4454 static GstVaapiDecoderStatus
4455 decode_unit (GstVaapiDecoderH264 * decoder, GstVaapiDecoderUnit * unit)
4457 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4458 GstVaapiParserInfoH264 *const pi = unit->parsed_info;
4459 GstVaapiDecoderStatus status;
4461 priv->decoder_state |= pi->state;
4462 switch (pi->nalu.type) {
4463 case GST_H264_NAL_SPS:
4464 status = decode_sps (decoder, unit);
4466 case GST_H264_NAL_SUBSET_SPS:
4467 status = decode_subset_sps (decoder, unit);
4469 case GST_H264_NAL_PPS:
4470 status = decode_pps (decoder, unit);
4472 case GST_H264_NAL_SLICE_EXT:
4473 case GST_H264_NAL_SLICE_IDR:
4474 /* fall-through. IDR specifics are handled in init_picture() */
4475 case GST_H264_NAL_SLICE:
4476 status = decode_slice (decoder, unit);
4478 case GST_H264_NAL_SEQ_END:
4479 case GST_H264_NAL_STREAM_END:
4480 status = decode_sequence_end (decoder);
4482 case GST_H264_NAL_SEI:
4483 status = decode_sei (decoder, unit);
4485 case GST_H264_NAL_SLICE_DPA:
4486 case GST_H264_NAL_SLICE_DPB:
4487 case GST_H264_NAL_SLICE_DPC:
4488 case GST_H264_NAL_AU_DELIMITER:
4489 case GST_H264_NAL_FILLER_DATA:
4490 case GST_H264_NAL_SPS_EXT:
4491 case GST_H264_NAL_PREFIX_UNIT:
4492 case GST_H264_NAL_DEPTH_SPS:
4493 case GST_H264_NAL_SLICE_AUX:
4494 case GST_H264_NAL_SLICE_DEPTH:
4495 GST_DEBUG ("unsupported NAL unit type %d, just skip", pi->nalu.type);
4496 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4499 GST_WARNING ("unknown NAL unit type id %d, skip", pi->nalu.type);
4500 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4506 static GstVaapiDecoderStatus
4507 gst_vaapi_decoder_h264_decode_codec_data (GstVaapiDecoder * base_decoder,
4508 const guchar * buf, guint buf_size)
4510 GstVaapiDecoderH264 *const decoder =
4511 GST_VAAPI_DECODER_H264_CAST (base_decoder);
4512 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4513 GstVaapiDecoderStatus status;
4514 GstVaapiDecoderUnit unit;
4515 GstVaapiParserInfoH264 *pi = NULL;
4516 GstH264ParserResult result;
4517 guint i, ofs, num_sps, num_pps;
4519 if (!priv->is_opened)
4520 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4522 unit.parsed_info = NULL;
4525 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4528 GST_ERROR ("failed to decode codec-data, not in avcC format");
4529 return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
4532 priv->nal_length_size = (buf[4] & 0x03) + 1;
4534 num_sps = buf[5] & 0x1f;
4537 for (i = 0; i < num_sps; i++) {
4538 pi = gst_vaapi_parser_info_h264_new ();
4540 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4541 unit.parsed_info = pi;
4543 result = gst_h264_parser_identify_nalu_avc (priv->parser,
4544 buf, ofs, buf_size, 2, &pi->nalu);
4545 if (result != GST_H264_PARSER_OK) {
4546 status = get_status (result);
4550 status = parse_sps (decoder, &unit);
4551 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4553 ofs = pi->nalu.offset + pi->nalu.size;
4555 pi->state = priv->parser_state;
4558 status = decode_sps (decoder, &unit);
4559 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4561 gst_vaapi_parser_info_h264_replace (&pi, NULL);
4567 for (i = 0; i < num_pps; i++) {
4568 pi = gst_vaapi_parser_info_h264_new ();
4570 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4571 unit.parsed_info = pi;
4573 result = gst_h264_parser_identify_nalu_avc (priv->parser,
4574 buf, ofs, buf_size, 2, &pi->nalu);
4575 if (result != GST_H264_PARSER_OK) {
4576 status = get_status (result);
4580 status = parse_pps (decoder, &unit);
4581 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4583 ofs = pi->nalu.offset + pi->nalu.size;
4585 pi->state = priv->parser_state;
4588 status = decode_pps (decoder, &unit);
4589 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4591 gst_vaapi_parser_info_h264_replace (&pi, NULL);
4594 priv->is_avcC = TRUE;
4595 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4599 gst_vaapi_parser_info_h264_replace (&pi, NULL);
4604 static GstVaapiDecoderStatus
4605 ensure_decoder (GstVaapiDecoderH264 * decoder)
4607 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4608 GstVaapiDecoderStatus status;
4610 if (!priv->is_opened) {
4611 priv->is_opened = gst_vaapi_decoder_h264_open (decoder);
4612 if (!priv->is_opened)
4613 return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
4616 gst_vaapi_decoder_decode_codec_data (GST_VAAPI_DECODER_CAST (decoder));
4617 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4620 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4623 static GstVaapiDecoderStatus
4624 gst_vaapi_decoder_h264_parse (GstVaapiDecoder * base_decoder,
4625 GstAdapter * adapter, gboolean at_eos, GstVaapiDecoderUnit * unit)
4627 GstVaapiDecoderH264 *const decoder =
4628 GST_VAAPI_DECODER_H264_CAST (base_decoder);
4629 GstVaapiDecoderH264Private *const priv = &decoder->priv;
4630 GstVaapiParserState *const ps = GST_VAAPI_PARSER_STATE (base_decoder);
4631 GstVaapiParserInfoH264 *pi;
4632 GstVaapiDecoderStatus status;
4633 GstH264ParserResult result;
4635 guint i, size, buf_size, nalu_size, flags;
4638 gboolean at_au_end = FALSE;
4640 status = ensure_decoder (decoder);
4641 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4644 switch (priv->stream_alignment) {
4645 case GST_VAAPI_STREAM_ALIGN_H264_NALU:
4646 case GST_VAAPI_STREAM_ALIGN_H264_AU:
4647 size = gst_adapter_available_fast (adapter);
4650 size = gst_adapter_available (adapter);
4654 if (priv->is_avcC) {
4655 if (size < priv->nal_length_size)
4656 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4658 buf = (guchar *) & start_code;
4659 g_assert (priv->nal_length_size <= sizeof (start_code));
4660 gst_adapter_copy (adapter, buf, 0, priv->nal_length_size);
4663 for (i = 0; i < priv->nal_length_size; i++)
4664 nalu_size = (nalu_size << 8) | buf[i];
4666 buf_size = priv->nal_length_size + nalu_size;
4667 if (size < buf_size)
4668 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4669 else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
4670 at_au_end = (buf_size == size);
4673 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4675 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU) {
4677 ofs = scan_for_start_code (adapter, 4, size - 4, NULL);
4681 ofs = scan_for_start_code (adapter, 0, size, NULL);
4683 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4686 gst_adapter_flush (adapter, ofs);
4690 ofs2 = ps->input_offset2 - ofs - 4;
4694 ofs = G_UNLIKELY (size < ofs2 + 4) ? -1 :
4695 scan_for_start_code (adapter, ofs2, size - ofs2, NULL);
4697 // Assume the whole NAL unit is present if end-of-stream
4698 // or stream buffers aligned on access unit boundaries
4699 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
4702 ps->input_offset2 = size;
4703 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4710 ps->input_offset2 = 0;
4712 buf = (guchar *) gst_adapter_map (adapter, buf_size);
4714 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
4716 unit->size = buf_size;
4718 pi = gst_vaapi_parser_info_h264_new ();
4720 return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4722 gst_vaapi_decoder_unit_set_parsed_info (unit,
4723 pi, (GDestroyNotify) gst_vaapi_mini_object_unref);
4726 result = gst_h264_parser_identify_nalu_avc (priv->parser,
4727 buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
4729 result = gst_h264_parser_identify_nalu_unchecked (priv->parser,
4730 buf, 0, buf_size, &pi->nalu);
4731 status = get_status (result);
4732 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4735 if (priv->base_only && (pi->nalu.type == GST_H264_NAL_PREFIX_UNIT
4736 || pi->nalu.type == GST_H264_NAL_SUBSET_SPS
4737 || pi->nalu.type == GST_H264_NAL_SLICE_EXT)) {
4738 GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, GST_VAAPI_DECODER_UNIT_FLAG_SKIP);
4739 pi->nalu.valid = FALSE;
4740 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4742 switch (pi->nalu.type) {
4743 case GST_H264_NAL_SPS:
4744 status = parse_sps (decoder, unit);
4746 case GST_H264_NAL_SUBSET_SPS:
4747 status = parse_subset_sps (decoder, unit);
4749 case GST_H264_NAL_PPS:
4750 status = parse_pps (decoder, unit);
4752 case GST_H264_NAL_SEI:
4753 status = parse_sei (decoder, unit);
4755 case GST_H264_NAL_SLICE_EXT:
4756 if (!GST_H264_IS_MVC_NALU (&pi->nalu)) {
4757 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4761 case GST_H264_NAL_SLICE_IDR:
4762 case GST_H264_NAL_SLICE:
4763 status = parse_slice (decoder, unit);
4766 status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4769 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4774 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
4775 GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4777 switch (pi->nalu.type) {
4778 case GST_H264_NAL_AU_DELIMITER:
4779 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4780 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4782 case GST_H264_NAL_FILLER_DATA:
4783 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4785 case GST_H264_NAL_STREAM_END:
4786 flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4788 case GST_H264_NAL_SEQ_END:
4789 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4790 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4792 case GST_H264_NAL_SPS:
4793 case GST_H264_NAL_SUBSET_SPS:
4794 case GST_H264_NAL_PPS:
4795 case GST_H264_NAL_SEI:
4796 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4797 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4799 case GST_H264_NAL_SLICE_EXT:
4800 if (!GST_H264_IS_MVC_NALU (&pi->nalu)) {
4801 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4805 case GST_H264_NAL_SLICE_IDR:
4806 case GST_H264_NAL_SLICE:
4807 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4808 if (priv->prev_pi &&
4809 (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
4810 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4811 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4812 } else if (is_new_picture (pi, priv->prev_slice_pi)) {
4813 flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4814 if (is_new_access_unit (pi, priv->prev_slice_pi))
4815 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4817 gst_vaapi_parser_info_h264_replace (&priv->prev_slice_pi, pi);
4819 case GST_H264_NAL_SPS_EXT:
4820 case GST_H264_NAL_SLICE_AUX:
4821 /* skip SPS extension and auxiliary slice for now */
4822 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4824 case GST_H264_NAL_PREFIX_UNIT:
4825 /* skip Prefix NAL units for now */
4826 flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4827 GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4828 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4831 if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4832 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4833 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4836 if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4837 priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4838 GST_VAAPI_DECODER_UNIT_FLAG_SET (unit, flags);
4840 pi->nalu.data = NULL;
4841 pi->state = priv->parser_state;
4843 gst_vaapi_parser_info_h264_replace (&priv->prev_pi, pi);
4844 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4848 gst_adapter_flush (adapter, unit->size);
4849 gst_vaapi_parser_info_h264_unref (pi);
4854 static GstVaapiDecoderStatus
4855 gst_vaapi_decoder_h264_decode (GstVaapiDecoder * base_decoder,
4856 GstVaapiDecoderUnit * unit)
4858 GstVaapiDecoderH264 *const decoder =
4859 GST_VAAPI_DECODER_H264_CAST (base_decoder);
4860 GstVaapiDecoderStatus status;
4862 status = ensure_decoder (decoder);
4863 if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4865 return decode_unit (decoder, unit);
4868 static GstVaapiDecoderStatus
4869 gst_vaapi_decoder_h264_start_frame (GstVaapiDecoder * base_decoder,
4870 GstVaapiDecoderUnit * unit)
4872 GstVaapiDecoderH264 *const decoder =
4873 GST_VAAPI_DECODER_H264_CAST (base_decoder);
4875 return decode_picture (decoder, unit);
4878 static GstVaapiDecoderStatus
4879 gst_vaapi_decoder_h264_end_frame (GstVaapiDecoder * base_decoder)
4881 GstVaapiDecoderH264 *const decoder =
4882 GST_VAAPI_DECODER_H264_CAST (base_decoder);
4884 return decode_current_picture (decoder);
4887 static GstVaapiDecoderStatus
4888 gst_vaapi_decoder_h264_flush (GstVaapiDecoder * base_decoder)
4890 GstVaapiDecoderH264 *const decoder =
4891 GST_VAAPI_DECODER_H264_CAST (base_decoder);
4893 dpb_flush (decoder, NULL);
4894 return GST_VAAPI_DECODER_STATUS_SUCCESS;
4898 gst_vaapi_decoder_h264_finalize (GObject * object)
4900 GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (object);
4902 gst_vaapi_decoder_h264_destroy (base_decoder);
4903 G_OBJECT_CLASS (gst_vaapi_decoder_h264_parent_class)->finalize (object);
4907 gst_vaapi_decoder_h264_class_init (GstVaapiDecoderH264Class * klass)
4909 GObjectClass *const object_class = G_OBJECT_CLASS (klass);
4910 GstVaapiDecoderClass *const decoder_class = GST_VAAPI_DECODER_CLASS (klass);
4912 decoder_class->reset = gst_vaapi_decoder_h264_reset;
4913 decoder_class->parse = gst_vaapi_decoder_h264_parse;
4914 decoder_class->decode = gst_vaapi_decoder_h264_decode;
4915 decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
4916 decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
4917 decoder_class->flush = gst_vaapi_decoder_h264_flush;
4918 decoder_class->decode_codec_data = gst_vaapi_decoder_h264_decode_codec_data;
4920 object_class->finalize = gst_vaapi_decoder_h264_finalize;
4924 gst_vaapi_decoder_h264_init (GstVaapiDecoderH264 * decoder)
4926 GstVaapiDecoder *const base_decoder = GST_VAAPI_DECODER (decoder);
4928 gst_vaapi_decoder_h264_create (base_decoder);
4932 * gst_vaapi_decoder_h264_set_alignment:
4933 * @decoder: a #GstVaapiDecoderH264
4934 * @alignment: the #GstVaapiStreamAlignH264
4936 * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4937 * of each buffer that is supplied to the decoder. This could be no
4938 * specific alignment, NAL unit boundaries, or access unit boundaries.
4941 gst_vaapi_decoder_h264_set_alignment (GstVaapiDecoderH264 * decoder,
4942 GstVaapiStreamAlignH264 alignment)
4944 g_return_if_fail (decoder != NULL);
4946 decoder->priv.stream_alignment = alignment;
4950 * gst_vaapi_decoder_h264_set_base_only:
4951 * @decoder: a #GstVaapiDecoderH264
4952 * @base_only: %TRUE to force decoding the base view only
4954 * if @base_only is %TRUE only the base view of MVC or SVC encoded streams
4959 gst_vaapi_decoder_h264_set_base_only (GstVaapiDecoderH264 * decoder,
4962 g_return_if_fail (decoder != NULL);
4964 decoder->priv.base_only = base_only;
4968 * gst_vaapi_decoder_h264_set_low_latency:
4969 * @decoder: a #GstVaapiDecoderH264
4970 * @force_low_latency: %TRUE if force low latency
4972 * if @force_low_latency is %TRUE the decoded frames are pushed soon
4973 * as possible, instead of to wait until decoded picture buffer (DPB)
4976 * This violate the H.264 specification but it is useful for some live
4980 gst_vaapi_decoder_h264_set_low_latency (GstVaapiDecoderH264 * decoder,
4981 gboolean force_low_latency)
4983 g_return_if_fail (decoder != NULL);
4985 decoder->priv.force_low_latency = force_low_latency;
4989 * gst_vaapi_decoder_h264_get_low_latency:
4990 * @decoder: a #GstVaapiDecoderH264
4992 * Returns: %TRUE if the low latency mode is enabled; otherwise
4996 gst_vaapi_decoder_h264_get_low_latency (GstVaapiDecoderH264 * decoder)
4998 g_return_val_if_fail (decoder != NULL, FALSE);
5000 return decoder->priv.force_low_latency;
5004 * gst_vaapi_decoder_h264_new:
5005 * @display: a #GstVaapiDisplay
5006 * @caps: a #GstCaps holding codec information
5008 * Creates a new #GstVaapiDecoder for MPEG-2 decoding. The @caps can
5009 * hold extra information like codec-data and pictured coded size.
5011 * Return value: the newly allocated #GstVaapiDecoder object
5014 gst_vaapi_decoder_h264_new (GstVaapiDisplay * display, GstCaps * caps)
5016 return g_object_new (GST_TYPE_VAAPI_DECODER_H264, "display", display,
5017 "caps", caps, NULL);