h264: improve robustness when packets are missing.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2013 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 #define GST_VAAPI_PARSER_INFO_H264(obj) \
61     ((GstVaapiParserInfoH264 *)(obj))
62
63 struct _GstVaapiParserInfoH264 {
64     GstVaapiMiniObject  parent_instance;
65     GstH264NalUnit      nalu;
66     union {
67         GstH264SPS      sps;
68         GstH264PPS      pps;
69         GstH264SliceHdr slice_hdr;
70     }                   data;
71 };
72
73 static inline const GstVaapiMiniObjectClass *
74 gst_vaapi_parser_info_h264_class(void)
75 {
76     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
77         sizeof(GstVaapiParserInfoH264),
78         NULL
79     };
80     return &GstVaapiParserInfoH264Class;
81 }
82
83 static inline GstVaapiParserInfoH264 *
84 gst_vaapi_parser_info_h264_new(void)
85 {
86     return (GstVaapiParserInfoH264 *)
87         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
88 }
89
90 #define gst_vaapi_parser_info_h264_ref(pi) \
91     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
92
93 #define gst_vaapi_parser_info_h264_unref(pi) \
94     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
95
96 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
97     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
98         (GstVaapiMiniObject *)(new_pi))
99
100 /* ------------------------------------------------------------------------- */
101 /* --- H.264 Pictures                                                    --- */
102 /* ------------------------------------------------------------------------- */
103
104 /*
105  * Extended picture flags:
106  *
107  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
108  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
109  *     "used for short-term reference"
110  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
111  *     "used for long-term reference"
112  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
113  *     reference picture (short-term reference or long-term reference)
114  */
115 enum {
116     GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
117
118     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
119         GST_VAAPI_PICTURE_FLAG_REFERENCE),
120     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
121         GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
122     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
123         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
124         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
125 };
126
127 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
128     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
129
130 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
131     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
132       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
133      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
134
135 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
136     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
137       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
138      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
139
140 struct _GstVaapiPictureH264 {
141     GstVaapiPicture             base;
142     GstH264PPS                 *pps;
143     GstH264SliceHdr            *last_slice_hdr;
144     guint                       structure;
145     gint32                      field_poc[2];
146     gint32                      frame_num;              // Original frame_num from slice_header()
147     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
148     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
149     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
150     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
151     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
152     guint                       output_flag             : 1;
153     guint                       output_needed           : 1;
154 };
155
156 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
157
158 void
159 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
160 {
161     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
162 }
163
164 gboolean
165 gst_vaapi_picture_h264_create(
166     GstVaapiPictureH264                      *picture,
167     const GstVaapiCodecObjectConstructorArgs *args
168 )
169 {
170     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
171         return FALSE;
172
173     picture->field_poc[0]       = G_MAXINT32;
174     picture->field_poc[1]       = G_MAXINT32;
175     picture->output_needed      = FALSE;
176     return TRUE;
177 }
178
179 static inline GstVaapiPictureH264 *
180 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
181 {
182     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
183         &GstVaapiPictureH264Class,
184         GST_VAAPI_CODEC_BASE(decoder),
185         NULL, sizeof(VAPictureParameterBufferH264),
186         NULL, 0,
187         0);
188 }
189
190 static inline void
191 gst_vaapi_picture_h264_set_reference(
192     GstVaapiPictureH264 *picture,
193     guint                reference_flags,
194     gboolean             other_field
195 )
196 {
197     if (!picture)
198         return;
199     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
200     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
201
202     if (!other_field || !(picture = picture->other_field))
203         return;
204     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
205     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
206 }
207
208 static inline GstVaapiPictureH264 *
209 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
210 {
211     g_return_val_if_fail(picture, NULL);
212
213     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
214 }
215
216 /* ------------------------------------------------------------------------- */
217 /* --- Frame Buffers (DPB)                                               --- */
218 /* ------------------------------------------------------------------------- */
219
220 struct _GstVaapiFrameStore {
221     /*< private >*/
222     GstVaapiMiniObject          parent_instance;
223
224     guint                       structure;
225     GstVaapiPictureH264        *buffers[2];
226     guint                       num_buffers;
227     guint                       output_needed;
228 };
229
230 static void
231 gst_vaapi_frame_store_finalize(gpointer object)
232 {
233     GstVaapiFrameStore * const fs = object;
234     guint i;
235
236     for (i = 0; i < fs->num_buffers; i++)
237         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
238 }
239
240 static GstVaapiFrameStore *
241 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
242 {
243     GstVaapiFrameStore *fs;
244
245     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
246         sizeof(GstVaapiFrameStore),
247         gst_vaapi_frame_store_finalize
248     };
249
250     fs = (GstVaapiFrameStore *)
251         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
252     if (!fs)
253         return NULL;
254
255     fs->structure       = picture->structure;
256     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
257     fs->buffers[1]      = NULL;
258     fs->num_buffers     = 1;
259     fs->output_needed   = picture->output_needed;
260     return fs;
261 }
262
263 static gboolean
264 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
265 {
266     guint field;
267
268     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
269     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
270     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
271
272     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
273     if (picture->output_flag) {
274         picture->output_needed = TRUE;
275         fs->output_needed++;
276     }
277
278     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
279
280     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
281         TOP_FIELD : BOTTOM_FIELD;
282     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
283     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
284     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
285     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
286     return TRUE;
287 }
288
289 static gboolean
290 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
291 {
292     GstVaapiPictureH264 * const first_field = fs->buffers[0];
293     GstVaapiPictureH264 *second_field;
294
295     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
296
297     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
298     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
299
300     second_field = gst_vaapi_picture_h264_new_field(first_field);
301     if (!second_field)
302         return FALSE;
303     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
304     gst_vaapi_picture_unref(second_field);
305
306     second_field->frame_num    = first_field->frame_num;
307     second_field->field_poc[0] = first_field->field_poc[0];
308     second_field->field_poc[1] = first_field->field_poc[1];
309     second_field->output_flag  = first_field->output_flag;
310     if (second_field->output_flag) {
311         second_field->output_needed = TRUE;
312         fs->output_needed++;
313     }
314     return TRUE;
315 }
316
317 static inline gboolean
318 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
319 {
320     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
321 }
322
323 static inline gboolean
324 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
325 {
326     guint i;
327
328     for (i = 0; i < fs->num_buffers; i++) {
329         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
330             return TRUE;
331     }
332     return FALSE;
333 }
334
335 #define gst_vaapi_frame_store_ref(fs) \
336     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
337
338 #define gst_vaapi_frame_store_unref(fs) \
339     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
340
341 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
342     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
343         (GstVaapiMiniObject *)(new_fs))
344
345 /* ------------------------------------------------------------------------- */
346 /* --- H.264 Decoder                                                     --- */
347 /* ------------------------------------------------------------------------- */
348
349 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
350     ((GstVaapiDecoderH264 *)(decoder))
351
352 typedef enum {
353     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
354     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
355     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
356
357     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
358         GST_H264_VIDEO_STATE_GOT_SPS |
359         GST_H264_VIDEO_STATE_GOT_PPS),
360     GST_H264_VIDEO_STATE_VALID_PICTURE = (
361         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
362         GST_H264_VIDEO_STATE_GOT_SLICE)
363 } GstH264VideoState;
364
365 struct _GstVaapiDecoderH264Private {
366     GstH264NalParser           *parser;
367     guint                       parser_state;
368     guint                       decoder_state;
369     GstVaapiPictureH264        *current_picture;
370     GstVaapiParserInfoH264     *prev_slice_pi;
371     GstVaapiFrameStore         *prev_frame;
372     GstVaapiFrameStore         *dpb[16];
373     guint                       dpb_count;
374     guint                       dpb_size;
375     GstVaapiProfile             profile;
376     GstVaapiEntrypoint          entrypoint;
377     GstVaapiChromaType          chroma_type;
378     GstVaapiPictureH264        *short_ref[32];
379     guint                       short_ref_count;
380     GstVaapiPictureH264        *long_ref[32];
381     guint                       long_ref_count;
382     GstVaapiPictureH264        *RefPicList0[32];
383     guint                       RefPicList0_count;
384     GstVaapiPictureH264        *RefPicList1[32];
385     guint                       RefPicList1_count;
386     guint                       nal_length_size;
387     guint                       mb_width;
388     guint                       mb_height;
389     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
390     gint32                      poc_msb;                // PicOrderCntMsb
391     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
392     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
393     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
394     gint32                      frame_num_offset;       // FrameNumOffset
395     gint32                      frame_num;              // frame_num (from slice_header())
396     gint32                      prev_frame_num;         // prevFrameNum
397     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
398     gboolean                    prev_pic_structure;     // previous picture structure
399     guint                       is_opened               : 1;
400     guint                       is_avcC                 : 1;
401     guint                       has_context             : 1;
402     guint                       progressive_sequence    : 1;
403 };
404
405 /**
406  * GstVaapiDecoderH264:
407  *
408  * A decoder based on H264.
409  */
410 struct _GstVaapiDecoderH264 {
411     /*< private >*/
412     GstVaapiDecoder             parent_instance;
413     GstVaapiDecoderH264Private  priv;
414 };
415
416 /**
417  * GstVaapiDecoderH264Class:
418  *
419  * A decoder class based on H264.
420  */
421 struct _GstVaapiDecoderH264Class {
422     /*< private >*/
423     GstVaapiDecoderClass parent_class;
424 };
425
426 static gboolean
427 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
428
429 /* Get number of reference frames to use */
430 static guint
431 get_max_dec_frame_buffering(GstH264SPS *sps)
432 {
433     guint max_dec_frame_buffering, PicSizeMbs;
434     GstVaapiLevelH264 level;
435     const GstVaapiH264LevelLimits *level_limits;
436
437     /* Table A-1 - Level limits */
438     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
439         level = GST_VAAPI_LEVEL_H264_L1b;
440     else
441         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
442     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
443     if (!level_limits)
444         return 16;
445
446     PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
447                   (sps->pic_height_in_map_units_minus1 + 1) *
448                   (sps->frame_mbs_only_flag ? 1 : 2));
449     max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
450
451     /* VUI parameters */
452     if (sps->vui_parameters_present_flag) {
453         GstH264VUIParams * const vui_params = &sps->vui_parameters;
454         if (vui_params->bitstream_restriction_flag)
455             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
456         else {
457             switch (sps->profile_idc) {
458             case 44:  // CAVLC 4:4:4 Intra profile
459             case GST_H264_PROFILE_SCALABLE_HIGH:
460             case GST_H264_PROFILE_HIGH:
461             case GST_H264_PROFILE_HIGH10:
462             case GST_H264_PROFILE_HIGH_422:
463             case GST_H264_PROFILE_HIGH_444:
464                 if (sps->constraint_set3_flag)
465                     max_dec_frame_buffering = 0;
466                 break;
467             }
468         }
469     }
470
471     if (max_dec_frame_buffering > 16)
472         max_dec_frame_buffering = 16;
473     else if (max_dec_frame_buffering < sps->num_ref_frames)
474         max_dec_frame_buffering = sps->num_ref_frames;
475     return MAX(1, max_dec_frame_buffering);
476 }
477
478 static void
479 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
480 {
481     gpointer * const entries = array;
482     guint num_entries = *array_length_ptr;
483
484     g_return_if_fail(index < num_entries);
485
486     if (index != --num_entries)
487         entries[index] = entries[num_entries];
488     entries[num_entries] = NULL;
489     *array_length_ptr = num_entries;
490 }
491
492 #if 1
493 static inline void
494 array_remove_index(void *array, guint *array_length_ptr, guint index)
495 {
496     array_remove_index_fast(array, array_length_ptr, index);
497 }
498 #else
499 static void
500 array_remove_index(void *array, guint *array_length_ptr, guint index)
501 {
502     gpointer * const entries = array;
503     const guint num_entries = *array_length_ptr - 1;
504     guint i;
505
506     g_return_if_fail(index <= num_entries);
507
508     for (i = index; i < num_entries; i++)
509         entries[i] = entries[i + 1];
510     entries[num_entries] = NULL;
511     *array_length_ptr = num_entries;
512 }
513 #endif
514
515 #define ARRAY_REMOVE_INDEX(array, index) \
516     array_remove_index(array, &array##_count, index)
517
518 static void
519 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
520 {
521     GstVaapiDecoderH264Private * const priv = &decoder->priv;
522     guint i, num_frames = --priv->dpb_count;
523
524     if (USE_STRICT_DPB_ORDERING) {
525         for (i = index; i < num_frames; i++)
526             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
527     }
528     else if (index != num_frames)
529         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
530     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
531 }
532
533 static gboolean
534 dpb_output(
535     GstVaapiDecoderH264 *decoder,
536     GstVaapiFrameStore  *fs,
537     GstVaapiPictureH264 *picture
538 )
539 {
540     picture->output_needed = FALSE;
541
542     if (fs) {
543         if (--fs->output_needed > 0)
544             return TRUE;
545         picture = fs->buffers[0];
546     }
547     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
548 }
549
550 static inline void
551 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
552 {
553     GstVaapiDecoderH264Private * const priv = &decoder->priv;
554     GstVaapiFrameStore * const fs = priv->dpb[i];
555
556     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
557         dpb_remove_index(decoder, i);
558 }
559
560 static gboolean
561 dpb_bump(GstVaapiDecoderH264 *decoder)
562 {
563     GstVaapiDecoderH264Private * const priv = &decoder->priv;
564     GstVaapiPictureH264 *found_picture = NULL;
565     guint i, j, found_index;
566     gboolean success;
567
568     for (i = 0; i < priv->dpb_count; i++) {
569         GstVaapiFrameStore * const fs = priv->dpb[i];
570         if (!fs->output_needed)
571             continue;
572         for (j = 0; j < fs->num_buffers; j++) {
573             GstVaapiPictureH264 * const picture = fs->buffers[j];
574             if (!picture->output_needed)
575                 continue;
576             if (!found_picture || found_picture->base.poc > picture->base.poc)
577                 found_picture = picture, found_index = i;
578         }
579     }
580     if (!found_picture)
581         return FALSE;
582
583     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
584     dpb_evict(decoder, found_picture, found_index);
585     return success;
586 }
587
588 static void
589 dpb_clear(GstVaapiDecoderH264 *decoder)
590 {
591     GstVaapiDecoderH264Private * const priv = &decoder->priv;
592     guint i;
593
594     for (i = 0; i < priv->dpb_count; i++)
595         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
596     priv->dpb_count = 0;
597
598     gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
599 }
600
601 static void
602 dpb_flush(GstVaapiDecoderH264 *decoder)
603 {
604     while (dpb_bump(decoder))
605         ;
606     dpb_clear(decoder);
607 }
608
609 static gboolean
610 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
611 {
612     GstVaapiDecoderH264Private * const priv = &decoder->priv;
613     GstVaapiFrameStore *fs;
614     guint i, j;
615
616     // Remove all unused pictures
617     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
618         i = 0;
619         while (i < priv->dpb_count) {
620             GstVaapiFrameStore * const fs = priv->dpb[i];
621             if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
622                 dpb_remove_index(decoder, i);
623             else
624                 i++;
625         }
626     }
627
628     // Check if picture is the second field and the first field is still in DPB
629     fs = priv->prev_frame;
630     if (fs && !gst_vaapi_frame_store_has_frame(fs))
631         return gst_vaapi_frame_store_add(fs, picture);
632
633     // Create new frame store, and split fields if necessary
634     fs = gst_vaapi_frame_store_new(picture);
635     if (!fs)
636         return FALSE;
637     gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
638     gst_vaapi_frame_store_unref(fs);
639
640     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
641         if (!gst_vaapi_frame_store_split_fields(fs))
642             return FALSE;
643     }
644
645     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
646     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
647         while (priv->dpb_count == priv->dpb_size) {
648             if (!dpb_bump(decoder))
649                 return FALSE;
650         }
651         gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
652         if (picture->output_flag) {
653             picture->output_needed = TRUE;
654             fs->output_needed++;
655         }
656     }
657
658     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
659     else {
660         if (!picture->output_flag)
661             return TRUE;
662         while (priv->dpb_count == priv->dpb_size) {
663             gboolean found_picture = FALSE;
664             for (i = 0; !found_picture && i < priv->dpb_count; i++) {
665                 GstVaapiFrameStore * const fs = priv->dpb[i];
666                 if (!fs->output_needed)
667                     continue;
668                 for (j = 0; !found_picture && j < fs->num_buffers; j++)
669                     found_picture = fs->buffers[j]->output_needed &&
670                         fs->buffers[j]->base.poc < picture->base.poc;
671             }
672             if (!found_picture)
673                 return dpb_output(decoder, NULL, picture);
674             if (!dpb_bump(decoder))
675                 return FALSE;
676         }
677         gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
678         picture->output_needed = TRUE;
679         fs->output_needed++;
680     }
681     return TRUE;
682 }
683
684 static inline void
685 dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
686 {
687     GstVaapiDecoderH264Private * const priv = &decoder->priv;
688
689     priv->dpb_size = get_max_dec_frame_buffering(sps);
690     GST_DEBUG("DPB size %u", priv->dpb_size);
691 }
692
693 static GstVaapiDecoderStatus
694 get_status(GstH264ParserResult result)
695 {
696     GstVaapiDecoderStatus status;
697
698     switch (result) {
699     case GST_H264_PARSER_OK:
700         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
701         break;
702     case GST_H264_PARSER_NO_NAL_END:
703         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
704         break;
705     case GST_H264_PARSER_ERROR:
706         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
707         break;
708     default:
709         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
710         break;
711     }
712     return status;
713 }
714
715 static void
716 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
717 {
718     GstVaapiDecoderH264Private * const priv = &decoder->priv;
719
720     gst_vaapi_picture_replace(&priv->current_picture, NULL);
721     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
722
723     dpb_clear(decoder);
724
725     if (priv->parser) {
726         gst_h264_nal_parser_free(priv->parser);
727         priv->parser = NULL;
728     }
729 }
730
731 static gboolean
732 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
733 {
734     GstVaapiDecoderH264Private * const priv = &decoder->priv;
735
736     gst_vaapi_decoder_h264_close(decoder);
737
738     priv->parser = gst_h264_nal_parser_new();
739     if (!priv->parser)
740         return FALSE;
741     return TRUE;
742 }
743
744 static void
745 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
746 {
747     GstVaapiDecoderH264 * const decoder =
748         GST_VAAPI_DECODER_H264_CAST(base_decoder);
749
750     gst_vaapi_decoder_h264_close(decoder);
751 }
752
753 static gboolean
754 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
755 {
756     GstVaapiDecoderH264 * const decoder =
757         GST_VAAPI_DECODER_H264_CAST(base_decoder);
758     GstVaapiDecoderH264Private * const priv = &decoder->priv;
759
760     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
761     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
762     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
763     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
764     priv->progressive_sequence  = TRUE;
765     return TRUE;
766 }
767
768 static void
769 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
770     GstVaapiProfile profile)
771 {
772     guint n_profiles = *n_profiles_ptr;
773
774     profiles[n_profiles++] = profile;
775     switch (profile) {
776     case GST_VAAPI_PROFILE_H264_MAIN:
777         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
778         break;
779     default:
780         break;
781     }
782     *n_profiles_ptr = n_profiles;
783 }
784
785 static GstVaapiProfile
786 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
787 {
788     GstVaapiDecoderH264Private * const priv = &decoder->priv;
789     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
790     GstVaapiProfile profile, profiles[4];
791     guint i, n_profiles = 0;
792
793     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
794     if (!profile)
795         return GST_VAAPI_PROFILE_UNKNOWN;
796
797     fill_profiles(profiles, &n_profiles, profile);
798     switch (profile) {
799     case GST_VAAPI_PROFILE_H264_BASELINE:
800         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
801             fill_profiles(profiles, &n_profiles,
802                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
803             fill_profiles(profiles, &n_profiles,
804                 GST_VAAPI_PROFILE_H264_MAIN);
805         }
806         break;
807     case GST_VAAPI_PROFILE_H264_EXTENDED:
808         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
809             fill_profiles(profiles, &n_profiles,
810                 GST_VAAPI_PROFILE_H264_MAIN);
811         }
812         break;
813     default:
814         break;
815     }
816
817     /* If the preferred profile (profiles[0]) matches one that we already
818        found, then just return it now instead of searching for it again */
819     if (profiles[0] == priv->profile)
820         return priv->profile;
821
822     for (i = 0; i < n_profiles; i++) {
823         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
824             return profiles[i];
825     }
826     return GST_VAAPI_PROFILE_UNKNOWN;
827 }
828
829 static GstVaapiDecoderStatus
830 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
831 {
832     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
833     GstVaapiDecoderH264Private * const priv = &decoder->priv;
834     GstVaapiContextInfo info;
835     GstVaapiProfile profile;
836     GstVaapiChromaType chroma_type;
837     gboolean reset_context = FALSE;
838     guint mb_width, mb_height;
839
840     profile = get_profile(decoder, sps);
841     if (!profile) {
842         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
843         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
844     }
845
846     if (priv->profile != profile) {
847         GST_DEBUG("profile changed");
848         reset_context = TRUE;
849         priv->profile = profile;
850     }
851
852     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
853     if (!chroma_type || chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420) {
854         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
855         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
856     }
857
858     if (priv->chroma_type != chroma_type) {
859         GST_DEBUG("chroma format changed");
860         reset_context     = TRUE;
861         priv->chroma_type = chroma_type;
862     }
863
864     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
865     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
866         !sps->frame_mbs_only_flag;
867     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
868         GST_DEBUG("size changed");
869         reset_context   = TRUE;
870         priv->mb_width  = mb_width;
871         priv->mb_height = mb_height;
872     }
873
874     priv->progressive_sequence = sps->frame_mbs_only_flag;
875 #if 0
876     /* XXX: we only output complete frames for now */
877     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
878 #endif
879
880     gst_vaapi_decoder_set_pixel_aspect_ratio(
881         base_decoder,
882         sps->vui_parameters.par_n,
883         sps->vui_parameters.par_d
884     );
885
886     if (!reset_context && priv->has_context)
887         return GST_VAAPI_DECODER_STATUS_SUCCESS;
888
889     /* XXX: fix surface size when cropping is implemented */
890     info.profile    = priv->profile;
891     info.entrypoint = priv->entrypoint;
892     info.width      = sps->width;
893     info.height     = sps->height;
894     info.ref_frames = get_max_dec_frame_buffering(sps);
895
896     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
897         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
898     priv->has_context = TRUE;
899
900     /* Reset DPB */
901     dpb_reset(decoder, sps);
902     return GST_VAAPI_DECODER_STATUS_SUCCESS;
903 }
904
905 static void
906 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
907 {
908     guint i;
909
910     /* There are always 6 4x4 scaling lists */
911     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
912     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
913
914     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
915         gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
916             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
917 }
918
919 static void
920 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
921 {
922     const GstH264SPS * const sps = pps->sequence;
923     guint i, n;
924
925     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
926     if (!pps->transform_8x8_mode_flag)
927         return;
928
929     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
930     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
931
932     n = (sps->chroma_format_idc != 3) ? 2 : 6;
933     for (i = 0; i < n; i++) {
934         gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
935             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
936     }
937 }
938
939 static GstVaapiDecoderStatus
940 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
941 {
942     GstVaapiPicture * const base_picture = &picture->base;
943     GstH264PPS * const pps = picture->pps;
944     GstH264SPS * const sps = pps->sequence;
945     VAIQMatrixBufferH264 *iq_matrix;
946
947     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
948     if (!base_picture->iq_matrix) {
949         GST_ERROR("failed to allocate IQ matrix");
950         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
951     }
952     iq_matrix = base_picture->iq_matrix->param;
953
954     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
955        is not large enough to hold lists for 4:4:4 */
956     if (sps->chroma_format_idc == 3)
957         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
958
959     fill_iq_matrix_4x4(iq_matrix, pps);
960     fill_iq_matrix_8x8(iq_matrix, pps);
961
962     return GST_VAAPI_DECODER_STATUS_SUCCESS;
963 }
964
965 static inline gboolean
966 is_valid_state(guint state, guint ref_state)
967 {
968     return (state & ref_state) == ref_state;
969 }
970
971 static GstVaapiDecoderStatus
972 decode_current_picture(GstVaapiDecoderH264 *decoder)
973 {
974     GstVaapiDecoderH264Private * const priv = &decoder->priv;
975     GstVaapiPictureH264 * const picture = priv->current_picture;
976
977     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
978         goto drop_frame;
979     priv->decoder_state = 0;
980
981     if (!picture)
982         return GST_VAAPI_DECODER_STATUS_SUCCESS;
983
984     if (!exec_ref_pic_marking(decoder, picture))
985         goto error;
986     if (!dpb_add(decoder, picture))
987         goto error;
988     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
989         goto error;
990     if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
991         gst_vaapi_picture_replace(&priv->current_picture, NULL);
992     return GST_VAAPI_DECODER_STATUS_SUCCESS;
993
994 error:
995     /* XXX: fix for cases where first field failed to be decoded */
996     gst_vaapi_picture_replace(&priv->current_picture, NULL);
997     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
998
999 drop_frame:
1000     priv->decoder_state = 0;
1001     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1002 }
1003
1004 static GstVaapiDecoderStatus
1005 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1006 {
1007     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1008     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1009     GstH264SPS * const sps = &pi->data.sps;
1010     GstH264ParserResult result;
1011
1012     GST_DEBUG("parse SPS");
1013
1014     priv->parser_state = 0;
1015
1016     /* Variables that don't have inferred values per the H.264
1017        standard but that should get a default value anyway */
1018     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1019
1020     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1021     if (result != GST_H264_PARSER_OK)
1022         return get_status(result);
1023
1024     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1025     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1026 }
1027
1028 static GstVaapiDecoderStatus
1029 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1030 {
1031     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1032     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1033     GstH264PPS * const pps = &pi->data.pps;
1034     GstH264ParserResult result;
1035
1036     GST_DEBUG("parse PPS");
1037
1038     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1039
1040     /* Variables that don't have inferred values per the H.264
1041        standard but that should get a default value anyway */
1042     pps->slice_group_map_type = 0;
1043     pps->slice_group_change_rate_minus1 = 0;
1044
1045     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1046     if (result != GST_H264_PARSER_OK)
1047         return get_status(result);
1048
1049     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1050     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1051 }
1052
1053 static GstVaapiDecoderStatus
1054 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1055 {
1056     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1057     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1058     GstH264SEIMessage sei;
1059     GstH264ParserResult result;
1060
1061     GST_DEBUG("parse SEI");
1062
1063     memset(&sei, 0, sizeof(sei));
1064     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, &sei);
1065     if (result != GST_H264_PARSER_OK) {
1066         GST_WARNING("failed to parse SEI, payload type:%d", sei.payloadType);
1067         return get_status(result);
1068     }
1069
1070     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1071 }
1072
1073 static GstVaapiDecoderStatus
1074 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1075 {
1076     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1077     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1078     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1079     GstH264ParserResult result;
1080
1081     GST_DEBUG("parse slice");
1082
1083     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1084                            GST_H264_VIDEO_STATE_GOT_PPS);
1085
1086     /* Variables that don't have inferred values per the H.264
1087        standard but that should get a default value anyway */
1088     slice_hdr->cabac_init_idc = 0;
1089     slice_hdr->direct_spatial_mv_pred_flag = 0;
1090
1091     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1092         slice_hdr, TRUE, TRUE);
1093     if (result != GST_H264_PARSER_OK)
1094         return get_status(result);
1095
1096     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1097     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1098 }
1099
1100 static GstVaapiDecoderStatus
1101 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1102 {
1103     GstVaapiDecoderStatus status;
1104
1105     GST_DEBUG("decode sequence-end");
1106
1107     status = decode_current_picture(decoder);
1108     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1109         return status;
1110
1111     dpb_flush(decoder);
1112     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1113 }
1114
1115 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1116 static void
1117 init_picture_poc_0(
1118     GstVaapiDecoderH264 *decoder,
1119     GstVaapiPictureH264 *picture,
1120     GstH264SliceHdr     *slice_hdr
1121 )
1122 {
1123     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1124     GstH264PPS * const pps = slice_hdr->pps;
1125     GstH264SPS * const sps = pps->sequence;
1126     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1127     gint32 temp_poc;
1128
1129     GST_DEBUG("decode picture order count type 0");
1130
1131     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1132         priv->prev_poc_msb = 0;
1133         priv->prev_poc_lsb = 0;
1134     }
1135     else if (priv->prev_pic_has_mmco5) {
1136         priv->prev_poc_msb = 0;
1137         priv->prev_poc_lsb =
1138             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1139              0 : priv->field_poc[TOP_FIELD]);
1140     }
1141     else {
1142         priv->prev_poc_msb = priv->poc_msb;
1143         priv->prev_poc_lsb = priv->poc_lsb;
1144     }
1145
1146     // (8-3)
1147     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1148     if (priv->poc_lsb < priv->prev_poc_lsb &&
1149         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1150         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1151     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1152              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1153         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1154     else
1155         priv->poc_msb = priv->prev_poc_msb;
1156
1157     temp_poc = priv->poc_msb + priv->poc_lsb;
1158     switch (picture->structure) {
1159     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1160         // (8-4, 8-5)
1161         priv->field_poc[TOP_FIELD] = temp_poc;
1162         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1163             slice_hdr->delta_pic_order_cnt_bottom;
1164         break;
1165     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1166         // (8-4)
1167         priv->field_poc[TOP_FIELD] = temp_poc;
1168         break;
1169     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1170         // (8-5)
1171         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1172         break;
1173     }
1174 }
1175
1176 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1177 static void
1178 init_picture_poc_1(
1179     GstVaapiDecoderH264 *decoder,
1180     GstVaapiPictureH264 *picture,
1181     GstH264SliceHdr     *slice_hdr
1182 )
1183 {
1184     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1185     GstH264PPS * const pps = slice_hdr->pps;
1186     GstH264SPS * const sps = pps->sequence;
1187     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1188     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1189     guint i;
1190
1191     GST_DEBUG("decode picture order count type 1");
1192
1193     if (priv->prev_pic_has_mmco5)
1194         prev_frame_num_offset = 0;
1195     else
1196         prev_frame_num_offset = priv->frame_num_offset;
1197
1198     // (8-6)
1199     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1200         priv->frame_num_offset = 0;
1201     else if (priv->prev_frame_num > priv->frame_num)
1202         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1203     else
1204         priv->frame_num_offset = prev_frame_num_offset;
1205
1206     // (8-7)
1207     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1208         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1209     else
1210         abs_frame_num = 0;
1211     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1212         abs_frame_num = abs_frame_num - 1;
1213
1214     if (abs_frame_num > 0) {
1215         gint32 expected_delta_per_poc_cycle;
1216         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1217
1218         expected_delta_per_poc_cycle = 0;
1219         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1220             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1221
1222         // (8-8)
1223         poc_cycle_cnt = (abs_frame_num - 1) /
1224             sps->num_ref_frames_in_pic_order_cnt_cycle;
1225         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1226             sps->num_ref_frames_in_pic_order_cnt_cycle;
1227
1228         // (8-9)
1229         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1230         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1231             expected_poc += sps->offset_for_ref_frame[i];
1232     }
1233     else
1234         expected_poc = 0;
1235     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1236         expected_poc += sps->offset_for_non_ref_pic;
1237
1238     // (8-10)
1239     switch (picture->structure) {
1240     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1241         priv->field_poc[TOP_FIELD] = expected_poc +
1242             slice_hdr->delta_pic_order_cnt[0];
1243         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1244             sps->offset_for_top_to_bottom_field +
1245             slice_hdr->delta_pic_order_cnt[1];
1246         break;
1247     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1248         priv->field_poc[TOP_FIELD] = expected_poc +
1249             slice_hdr->delta_pic_order_cnt[0];
1250         break;
1251     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1252         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1253             sps->offset_for_top_to_bottom_field +
1254             slice_hdr->delta_pic_order_cnt[0];
1255         break;
1256     }
1257 }
1258
1259 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1260 static void
1261 init_picture_poc_2(
1262     GstVaapiDecoderH264 *decoder,
1263     GstVaapiPictureH264 *picture,
1264     GstH264SliceHdr     *slice_hdr
1265 )
1266 {
1267     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1268     GstH264PPS * const pps = slice_hdr->pps;
1269     GstH264SPS * const sps = pps->sequence;
1270     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1271     gint32 prev_frame_num_offset, temp_poc;
1272
1273     GST_DEBUG("decode picture order count type 2");
1274
1275     if (priv->prev_pic_has_mmco5)
1276         prev_frame_num_offset = 0;
1277     else
1278         prev_frame_num_offset = priv->frame_num_offset;
1279
1280     // (8-11)
1281     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1282         priv->frame_num_offset = 0;
1283     else if (priv->prev_frame_num > priv->frame_num)
1284         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1285     else
1286         priv->frame_num_offset = prev_frame_num_offset;
1287
1288     // (8-12)
1289     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1290         temp_poc = 0;
1291     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1292         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1293     else
1294         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1295
1296     // (8-13)
1297     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1298         priv->field_poc[TOP_FIELD] = temp_poc;
1299     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1300         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1301 }
1302
1303 /* 8.2.1 - Decoding process for picture order count */
1304 static void
1305 init_picture_poc(
1306     GstVaapiDecoderH264 *decoder,
1307     GstVaapiPictureH264 *picture,
1308     GstH264SliceHdr     *slice_hdr
1309 )
1310 {
1311     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1312     GstH264PPS * const pps = slice_hdr->pps;
1313     GstH264SPS * const sps = pps->sequence;
1314
1315     switch (sps->pic_order_cnt_type) {
1316     case 0:
1317         init_picture_poc_0(decoder, picture, slice_hdr);
1318         break;
1319     case 1:
1320         init_picture_poc_1(decoder, picture, slice_hdr);
1321         break;
1322     case 2:
1323         init_picture_poc_2(decoder, picture, slice_hdr);
1324         break;
1325     }
1326
1327     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1328         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1329     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1330         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1331     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1332 }
1333
1334 static int
1335 compare_picture_pic_num_dec(const void *a, const void *b)
1336 {
1337     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1338     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1339
1340     return picB->pic_num - picA->pic_num;
1341 }
1342
1343 static int
1344 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1345 {
1346     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1347     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1348
1349     return picA->long_term_pic_num - picB->long_term_pic_num;
1350 }
1351
1352 static int
1353 compare_picture_poc_dec(const void *a, const void *b)
1354 {
1355     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1356     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1357
1358     return picB->base.poc - picA->base.poc;
1359 }
1360
1361 static int
1362 compare_picture_poc_inc(const void *a, const void *b)
1363 {
1364     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1365     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1366
1367     return picA->base.poc - picB->base.poc;
1368 }
1369
1370 static int
1371 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1372 {
1373     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1374     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1375
1376     return picB->frame_num_wrap - picA->frame_num_wrap;
1377 }
1378
1379 static int
1380 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1381 {
1382     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1383     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1384
1385     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1386 }
1387
1388 /* 8.2.4.1 - Decoding process for picture numbers */
1389 static void
1390 init_picture_refs_pic_num(
1391     GstVaapiDecoderH264 *decoder,
1392     GstVaapiPictureH264 *picture,
1393     GstH264SliceHdr     *slice_hdr
1394 )
1395 {
1396     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1397     GstH264PPS * const pps = slice_hdr->pps;
1398     GstH264SPS * const sps = pps->sequence;
1399     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1400     guint i;
1401
1402     GST_DEBUG("decode picture numbers");
1403
1404     for (i = 0; i < priv->short_ref_count; i++) {
1405         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1406
1407         // (8-27)
1408         if (pic->frame_num > priv->frame_num)
1409             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1410         else
1411             pic->frame_num_wrap = pic->frame_num;
1412
1413         // (8-28, 8-30, 8-31)
1414         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1415             pic->pic_num = pic->frame_num_wrap;
1416         else {
1417             if (pic->structure == picture->structure)
1418                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1419             else
1420                 pic->pic_num = 2 * pic->frame_num_wrap;
1421         }
1422     }
1423
1424     for (i = 0; i < priv->long_ref_count; i++) {
1425         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1426
1427         // (8-29, 8-32, 8-33)
1428         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1429             pic->long_term_pic_num = pic->long_term_frame_idx;
1430         else {
1431             if (pic->structure == picture->structure)
1432                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
1433             else
1434                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
1435         }
1436     }
1437 }
1438
1439 #define SORT_REF_LIST(list, n, compare_func) \
1440     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
1441
1442 static void
1443 init_picture_refs_fields_1(
1444     guint                picture_structure,
1445     GstVaapiPictureH264 *RefPicList[32],
1446     guint               *RefPicList_count,
1447     GstVaapiPictureH264 *ref_list[32],
1448     guint                ref_list_count
1449 )
1450 {
1451     guint i, j, n;
1452
1453     i = 0;
1454     j = 0;
1455     n = *RefPicList_count;
1456     do {
1457         g_assert(n < 32);
1458         for (; i < ref_list_count; i++) {
1459             if (ref_list[i]->structure == picture_structure) {
1460                 RefPicList[n++] = ref_list[i++];
1461                 break;
1462             }
1463         }
1464         for (; j < ref_list_count; j++) {
1465             if (ref_list[j]->structure != picture_structure) {
1466                 RefPicList[n++] = ref_list[j++];
1467                 break;
1468             }
1469         }
1470     } while (i < ref_list_count || j < ref_list_count);
1471     *RefPicList_count = n;
1472 }
1473
1474 static inline void
1475 init_picture_refs_fields(
1476     GstVaapiPictureH264 *picture,
1477     GstVaapiPictureH264 *RefPicList[32],
1478     guint               *RefPicList_count,
1479     GstVaapiPictureH264 *short_ref[32],
1480     guint                short_ref_count,
1481     GstVaapiPictureH264 *long_ref[32],
1482     guint                long_ref_count
1483 )
1484 {
1485     guint n = 0;
1486
1487     /* 8.2.4.2.5 - reference picture lists in fields */
1488     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
1489         short_ref, short_ref_count);
1490     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
1491         long_ref, long_ref_count);
1492     *RefPicList_count = n;
1493 }
1494
1495 static void
1496 init_picture_refs_p_slice(
1497     GstVaapiDecoderH264 *decoder,
1498     GstVaapiPictureH264 *picture,
1499     GstH264SliceHdr     *slice_hdr
1500 )
1501 {
1502     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1503     GstVaapiPictureH264 **ref_list;
1504     guint i;
1505
1506     GST_DEBUG("decode reference picture list for P and SP slices");
1507
1508     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1509         /* 8.2.4.2.1 - P and SP slices in frames */
1510         if (priv->short_ref_count > 0) {
1511             ref_list = priv->RefPicList0;
1512             for (i = 0; i < priv->short_ref_count; i++)
1513                 ref_list[i] = priv->short_ref[i];
1514             SORT_REF_LIST(ref_list, i, pic_num_dec);
1515             priv->RefPicList0_count += i;
1516         }
1517
1518         if (priv->long_ref_count > 0) {
1519             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1520             for (i = 0; i < priv->long_ref_count; i++)
1521                 ref_list[i] = priv->long_ref[i];
1522             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
1523             priv->RefPicList0_count += i;
1524         }
1525     }
1526     else {
1527         /* 8.2.4.2.2 - P and SP slices in fields */
1528         GstVaapiPictureH264 *short_ref[32];
1529         guint short_ref_count = 0;
1530         GstVaapiPictureH264 *long_ref[32];
1531         guint long_ref_count = 0;
1532
1533         if (priv->short_ref_count > 0) {
1534             for (i = 0; i < priv->short_ref_count; i++)
1535                 short_ref[i] = priv->short_ref[i];
1536             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
1537             short_ref_count = i;
1538         }
1539
1540         if (priv->long_ref_count > 0) {
1541             for (i = 0; i < priv->long_ref_count; i++)
1542                 long_ref[i] = priv->long_ref[i];
1543             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
1544             long_ref_count = i;
1545         }
1546
1547         init_picture_refs_fields(
1548             picture,
1549             priv->RefPicList0, &priv->RefPicList0_count,
1550             short_ref,          short_ref_count,
1551             long_ref,           long_ref_count
1552         );
1553     }
1554 }
1555
1556 static void
1557 init_picture_refs_b_slice(
1558     GstVaapiDecoderH264 *decoder,
1559     GstVaapiPictureH264 *picture,
1560     GstH264SliceHdr     *slice_hdr
1561 )
1562 {
1563     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1564     GstVaapiPictureH264 **ref_list;
1565     guint i, n;
1566
1567     GST_DEBUG("decode reference picture list for B slices");
1568
1569     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1570         /* 8.2.4.2.3 - B slices in frames */
1571
1572         /* RefPicList0 */
1573         if (priv->short_ref_count > 0) {
1574             // 1. Short-term references
1575             ref_list = priv->RefPicList0;
1576             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1577                 if (priv->short_ref[i]->base.poc < picture->base.poc)
1578                     ref_list[n++] = priv->short_ref[i];
1579             }
1580             SORT_REF_LIST(ref_list, n, poc_dec);
1581             priv->RefPicList0_count += n;
1582
1583             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1584             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1585                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
1586                     ref_list[n++] = priv->short_ref[i];
1587             }
1588             SORT_REF_LIST(ref_list, n, poc_inc);
1589             priv->RefPicList0_count += n;
1590         }
1591
1592         if (priv->long_ref_count > 0) {
1593             // 2. Long-term references
1594             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1595             for (n = 0, i = 0; i < priv->long_ref_count; i++)
1596                 ref_list[n++] = priv->long_ref[i];
1597             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
1598             priv->RefPicList0_count += n;
1599         }
1600
1601         /* RefPicList1 */
1602         if (priv->short_ref_count > 0) {
1603             // 1. Short-term references
1604             ref_list = priv->RefPicList1;
1605             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1606                 if (priv->short_ref[i]->base.poc > picture->base.poc)
1607                     ref_list[n++] = priv->short_ref[i];
1608             }
1609             SORT_REF_LIST(ref_list, n, poc_inc);
1610             priv->RefPicList1_count += n;
1611
1612             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
1613             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1614                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1615                     ref_list[n++] = priv->short_ref[i];
1616             }
1617             SORT_REF_LIST(ref_list, n, poc_dec);
1618             priv->RefPicList1_count += n;
1619         }
1620
1621         if (priv->long_ref_count > 0) {
1622             // 2. Long-term references
1623             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
1624             for (n = 0, i = 0; i < priv->long_ref_count; i++)
1625                 ref_list[n++] = priv->long_ref[i];
1626             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
1627             priv->RefPicList1_count += n;
1628         }
1629     }
1630     else {
1631         /* 8.2.4.2.4 - B slices in fields */
1632         GstVaapiPictureH264 *short_ref0[32];
1633         guint short_ref0_count = 0;
1634         GstVaapiPictureH264 *short_ref1[32];
1635         guint short_ref1_count = 0;
1636         GstVaapiPictureH264 *long_ref[32];
1637         guint long_ref_count = 0;
1638
1639         /* refFrameList0ShortTerm */
1640         if (priv->short_ref_count > 0) {
1641             ref_list = short_ref0;
1642             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1643                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1644                     ref_list[n++] = priv->short_ref[i];
1645             }
1646             SORT_REF_LIST(ref_list, n, poc_dec);
1647             short_ref0_count += n;
1648
1649             ref_list = &short_ref0[short_ref0_count];
1650             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1651                 if (priv->short_ref[i]->base.poc > picture->base.poc)
1652                     ref_list[n++] = priv->short_ref[i];
1653             }
1654             SORT_REF_LIST(ref_list, n, poc_inc);
1655             short_ref0_count += n;
1656         }
1657
1658         /* refFrameList1ShortTerm */
1659         if (priv->short_ref_count > 0) {
1660             ref_list = short_ref1;
1661             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1662                 if (priv->short_ref[i]->base.poc > picture->base.poc)
1663                     ref_list[n++] = priv->short_ref[i];
1664             }
1665             SORT_REF_LIST(ref_list, n, poc_inc);
1666             short_ref1_count += n;
1667
1668             ref_list = &short_ref1[short_ref1_count];
1669             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1670                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1671                     ref_list[n++] = priv->short_ref[i];
1672             }
1673             SORT_REF_LIST(ref_list, n, poc_dec);
1674             short_ref1_count += n;
1675         }
1676
1677         /* refFrameListLongTerm */
1678         if (priv->long_ref_count > 0) {
1679             for (i = 0; i < priv->long_ref_count; i++)
1680                 long_ref[i] = priv->long_ref[i];
1681             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
1682             long_ref_count = i;
1683         }
1684
1685         init_picture_refs_fields(
1686             picture,
1687             priv->RefPicList0, &priv->RefPicList0_count,
1688             short_ref0,         short_ref0_count,
1689             long_ref,           long_ref_count
1690         );
1691
1692         init_picture_refs_fields(
1693             picture,
1694             priv->RefPicList1, &priv->RefPicList1_count,
1695             short_ref1,         short_ref1_count,
1696             long_ref,           long_ref_count
1697         );
1698    }
1699
1700     /* Check whether RefPicList1 is identical to RefPicList0, then
1701        swap if necessary */
1702     if (priv->RefPicList1_count > 1 &&
1703         priv->RefPicList1_count == priv->RefPicList0_count &&
1704         memcmp(priv->RefPicList0, priv->RefPicList1,
1705                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
1706         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
1707         priv->RefPicList1[0] = priv->RefPicList1[1];
1708         priv->RefPicList1[1] = tmp;
1709     }
1710 }
1711
1712 #undef SORT_REF_LIST
1713
1714 static gint
1715 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
1716 {
1717     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1718     guint i;
1719
1720     for (i = 0; i < priv->short_ref_count; i++) {
1721         if (priv->short_ref[i]->pic_num == pic_num)
1722             return i;
1723     }
1724     GST_ERROR("found no short-term reference picture with PicNum = %d",
1725               pic_num);
1726     return -1;
1727 }
1728
1729 static gint
1730 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
1731 {
1732     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1733     guint i;
1734
1735     for (i = 0; i < priv->long_ref_count; i++) {
1736         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
1737             return i;
1738     }
1739     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
1740               long_term_pic_num);
1741     return -1;
1742 }
1743
1744 static void
1745 exec_picture_refs_modification_1(
1746     GstVaapiDecoderH264           *decoder,
1747     GstVaapiPictureH264           *picture,
1748     GstH264SliceHdr               *slice_hdr,
1749     guint                          list
1750 )
1751 {
1752     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1753     GstH264PPS * const pps = slice_hdr->pps;
1754     GstH264SPS * const sps = pps->sequence;
1755     GstH264RefPicListModification *ref_pic_list_modification;
1756     guint num_ref_pic_list_modifications;
1757     GstVaapiPictureH264 **ref_list;
1758     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
1759     guint i, j, n, num_refs;
1760     gint found_ref_idx;
1761     gint32 MaxPicNum, CurrPicNum, picNumPred;
1762
1763     GST_DEBUG("modification process of reference picture list %u", list);
1764
1765     if (list == 0) {
1766         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
1767         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
1768         ref_list                       = priv->RefPicList0;
1769         ref_list_count_ptr             = &priv->RefPicList0_count;
1770         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
1771     }
1772     else {
1773         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
1774         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
1775         ref_list                       = priv->RefPicList1;
1776         ref_list_count_ptr             = &priv->RefPicList1_count;
1777         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
1778     }
1779     ref_list_count = *ref_list_count_ptr;
1780
1781     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1782         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
1783         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
1784     }
1785     else {
1786         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
1787         CurrPicNum = slice_hdr->frame_num;                      // frame_num
1788     }
1789
1790     picNumPred = CurrPicNum;
1791
1792     for (i = 0; i < num_ref_pic_list_modifications; i++) {
1793         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
1794         if (l->modification_of_pic_nums_idc == 3)
1795             break;
1796
1797         /* 8.2.4.3.1 - Short-term reference pictures */
1798         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
1799             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
1800             gint32 picNum, picNumNoWrap;
1801
1802             // (8-34)
1803             if (l->modification_of_pic_nums_idc == 0) {
1804                 picNumNoWrap = picNumPred - abs_diff_pic_num;
1805                 if (picNumNoWrap < 0)
1806                     picNumNoWrap += MaxPicNum;
1807             }
1808
1809             // (8-35)
1810             else {
1811                 picNumNoWrap = picNumPred + abs_diff_pic_num;
1812                 if (picNumNoWrap >= MaxPicNum)
1813                     picNumNoWrap -= MaxPicNum;
1814             }
1815             picNumPred = picNumNoWrap;
1816
1817             // (8-36)
1818             picNum = picNumNoWrap;
1819             if (picNum > CurrPicNum)
1820                 picNum -= MaxPicNum;
1821
1822             // (8-37)
1823             for (j = num_refs; j > ref_list_idx; j--)
1824                 ref_list[j] = ref_list[j - 1];
1825             found_ref_idx = find_short_term_reference(decoder, picNum);
1826             ref_list[ref_list_idx++] =
1827                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
1828             n = ref_list_idx;
1829             for (j = ref_list_idx; j <= num_refs; j++) {
1830                 gint32 PicNumF;
1831                 if (!ref_list[j])
1832                     continue;
1833                 PicNumF =
1834                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
1835                     ref_list[j]->pic_num : MaxPicNum;
1836                 if (PicNumF != picNum)
1837                     ref_list[n++] = ref_list[j];
1838             }
1839         }
1840
1841         /* 8.2.4.3.2 - Long-term reference pictures */
1842         else {
1843
1844             for (j = num_refs; j > ref_list_idx; j--)
1845                 ref_list[j] = ref_list[j - 1];
1846             found_ref_idx =
1847                 find_long_term_reference(decoder, l->value.long_term_pic_num);
1848             ref_list[ref_list_idx++] =
1849                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
1850             n = ref_list_idx;
1851             for (j = ref_list_idx; j <= num_refs; j++) {
1852                 gint32 LongTermPicNumF;
1853                 if (!ref_list[j])
1854                     continue;
1855                 LongTermPicNumF =
1856                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
1857                     ref_list[j]->long_term_pic_num : INT_MAX;
1858                 if (LongTermPicNumF != l->value.long_term_pic_num)
1859                     ref_list[n++] = ref_list[j];
1860             }
1861         }
1862     }
1863
1864 #if DEBUG
1865     for (i = 0; i < num_refs; i++)
1866         if (!ref_list[i])
1867             GST_ERROR("list %u entry %u is empty", list, i);
1868 #endif
1869     *ref_list_count_ptr = num_refs;
1870 }
1871
1872 /* 8.2.4.3 - Modification process for reference picture lists */
1873 static void
1874 exec_picture_refs_modification(
1875     GstVaapiDecoderH264 *decoder,
1876     GstVaapiPictureH264 *picture,
1877     GstH264SliceHdr     *slice_hdr
1878 )
1879 {
1880     GST_DEBUG("execute ref_pic_list_modification()");
1881
1882     /* RefPicList0 */
1883     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
1884         slice_hdr->ref_pic_list_modification_flag_l0)
1885         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
1886
1887     /* RefPicList1 */
1888     if (GST_H264_IS_B_SLICE(slice_hdr) &&
1889         slice_hdr->ref_pic_list_modification_flag_l1)
1890         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
1891 }
1892
1893 static void
1894 init_picture_ref_lists(GstVaapiDecoderH264 *decoder)
1895 {
1896     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1897     guint i, j, short_ref_count, long_ref_count;
1898
1899     short_ref_count = 0;
1900     long_ref_count  = 0;
1901     if (GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture)) {
1902         for (i = 0; i < priv->dpb_count; i++) {
1903             GstVaapiFrameStore * const fs = priv->dpb[i];
1904             GstVaapiPictureH264 *picture;
1905             if (!gst_vaapi_frame_store_has_frame(fs))
1906                 continue;
1907             picture = fs->buffers[0];
1908             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1909                 priv->short_ref[short_ref_count++] = picture;
1910             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1911                 priv->long_ref[long_ref_count++] = picture;
1912             picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1913             picture->other_field = fs->buffers[1];
1914         }
1915     }
1916     else {
1917         for (i = 0; i < priv->dpb_count; i++) {
1918             GstVaapiFrameStore * const fs = priv->dpb[i];
1919             for (j = 0; j < fs->num_buffers; j++) {
1920                 GstVaapiPictureH264 * const picture = fs->buffers[j];
1921                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1922                     priv->short_ref[short_ref_count++] = picture;
1923                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1924                     priv->long_ref[long_ref_count++] = picture;
1925                 picture->structure = picture->base.structure;
1926                 picture->other_field = fs->buffers[j ^ 1];
1927             }
1928         }
1929     }
1930
1931     for (i = short_ref_count; i < priv->short_ref_count; i++)
1932         priv->short_ref[i] = NULL;
1933     priv->short_ref_count = short_ref_count;
1934
1935     for (i = long_ref_count; i < priv->long_ref_count; i++)
1936         priv->long_ref[i] = NULL;
1937     priv->long_ref_count = long_ref_count;
1938 }
1939
1940 static void
1941 init_picture_refs(
1942     GstVaapiDecoderH264 *decoder,
1943     GstVaapiPictureH264 *picture,
1944     GstH264SliceHdr     *slice_hdr
1945 )
1946 {
1947     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1948     GstVaapiPicture * const base_picture = &picture->base;
1949     guint i, num_refs;
1950
1951     init_picture_ref_lists(decoder);
1952     init_picture_refs_pic_num(decoder, picture, slice_hdr);
1953
1954     priv->RefPicList0_count = 0;
1955     priv->RefPicList1_count = 0;
1956
1957     switch (base_picture->type) {
1958     case GST_VAAPI_PICTURE_TYPE_P:
1959     case GST_VAAPI_PICTURE_TYPE_SP:
1960         init_picture_refs_p_slice(decoder, picture, slice_hdr);
1961         break;
1962     case GST_VAAPI_PICTURE_TYPE_B:
1963         init_picture_refs_b_slice(decoder, picture, slice_hdr);
1964         break;
1965     default:
1966         break;
1967     }
1968
1969     exec_picture_refs_modification(decoder, picture, slice_hdr);
1970
1971     switch (base_picture->type) {
1972     case GST_VAAPI_PICTURE_TYPE_B:
1973         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
1974         for (i = priv->RefPicList1_count; i < num_refs; i++)
1975             priv->RefPicList1[i] = NULL;
1976         priv->RefPicList1_count = num_refs;
1977
1978         // fall-through
1979     case GST_VAAPI_PICTURE_TYPE_P:
1980     case GST_VAAPI_PICTURE_TYPE_SP:
1981         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
1982         for (i = priv->RefPicList0_count; i < num_refs; i++)
1983             priv->RefPicList0[i] = NULL;
1984         priv->RefPicList0_count = num_refs;
1985         break;
1986     default:
1987         break;
1988     }
1989 }
1990
1991 static gboolean
1992 init_picture(
1993     GstVaapiDecoderH264 *decoder,
1994     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
1995 {
1996     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1997     GstVaapiPicture * const base_picture = &picture->base;
1998     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1999
2000     priv->prev_frame_num        = priv->frame_num;
2001     priv->frame_num             = slice_hdr->frame_num;
2002     picture->frame_num          = priv->frame_num;
2003     picture->frame_num_wrap     = priv->frame_num;
2004     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2005     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2006
2007     /* Reset decoder state for IDR pictures */
2008     if (pi->nalu.type == GST_H264_NAL_SLICE_IDR) {
2009         GST_DEBUG("<IDR>");
2010         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2011         dpb_flush(decoder);
2012     }
2013
2014     /* Initialize slice type */
2015     switch (slice_hdr->type % 5) {
2016     case GST_H264_P_SLICE:
2017         base_picture->type = GST_VAAPI_PICTURE_TYPE_P;
2018         break;
2019     case GST_H264_B_SLICE:
2020         base_picture->type = GST_VAAPI_PICTURE_TYPE_B;
2021         break;
2022     case GST_H264_I_SLICE:
2023         base_picture->type = GST_VAAPI_PICTURE_TYPE_I;
2024         break;
2025     case GST_H264_SP_SLICE:
2026         base_picture->type = GST_VAAPI_PICTURE_TYPE_SP;
2027         break;
2028     case GST_H264_SI_SLICE:
2029         base_picture->type = GST_VAAPI_PICTURE_TYPE_SI;
2030         break;
2031     }
2032
2033     /* Initialize picture structure */
2034     if (!slice_hdr->field_pic_flag)
2035         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2036     else {
2037         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2038         if (!slice_hdr->bottom_field_flag)
2039             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2040         else
2041             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2042     }
2043     picture->structure = base_picture->structure;
2044
2045     /* Initialize reference flags */
2046     if (pi->nalu.ref_idc) {
2047         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2048             &slice_hdr->dec_ref_pic_marking;
2049
2050         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2051             dec_ref_pic_marking->long_term_reference_flag)
2052             GST_VAAPI_PICTURE_FLAG_SET(picture,
2053                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2054         else
2055             GST_VAAPI_PICTURE_FLAG_SET(picture,
2056                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2057     }
2058
2059     init_picture_poc(decoder, picture, slice_hdr);
2060     init_picture_refs(decoder, picture, slice_hdr);
2061     return TRUE;
2062 }
2063
2064 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2065 static gboolean
2066 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2067 {
2068     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2069     GstH264PPS * const pps = priv->current_picture->pps;
2070     GstH264SPS * const sps = pps->sequence;
2071     GstVaapiPictureH264 *ref_picture;
2072     guint i, m, max_num_ref_frames;
2073
2074     GST_DEBUG("reference picture marking process (sliding window)");
2075
2076     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2077         return TRUE;
2078
2079     max_num_ref_frames = sps->num_ref_frames;
2080     if (max_num_ref_frames == 0)
2081         max_num_ref_frames = 1;
2082     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2083         max_num_ref_frames <<= 1;
2084
2085     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2086         return TRUE;
2087     if (priv->short_ref_count < 1)
2088         return FALSE;
2089
2090     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2091         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2092         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2093             m = i;
2094     }
2095
2096     ref_picture = priv->short_ref[m];
2097     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2098     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2099
2100     /* Both fields need to be marked as "unused for reference", so
2101        remove the other field from the short_ref[] list as well */
2102     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2103         for (i = 0; i < priv->short_ref_count; i++) {
2104             if (priv->short_ref[i] == ref_picture->other_field) {
2105                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2106                 break;
2107             }
2108         }
2109     }
2110     return TRUE;
2111 }
2112
2113 static inline gint32
2114 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2115 {
2116     gint32 pic_num;
2117
2118     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2119         pic_num = picture->frame_num_wrap;
2120     else
2121         pic_num = 2 * picture->frame_num_wrap + 1;
2122     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2123     return pic_num;
2124 }
2125
2126 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2127 static void
2128 exec_ref_pic_marking_adaptive_mmco_1(
2129     GstVaapiDecoderH264  *decoder,
2130     GstVaapiPictureH264  *picture,
2131     GstH264RefPicMarking *ref_pic_marking
2132 )
2133 {
2134     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2135     gint32 i, picNumX;
2136
2137     picNumX = get_picNumX(picture, ref_pic_marking);
2138     i = find_short_term_reference(decoder, picNumX);
2139     if (i < 0)
2140         return;
2141
2142     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2143         GST_VAAPI_PICTURE_IS_FRAME(picture));
2144     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2145 }
2146
2147 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2148 static void
2149 exec_ref_pic_marking_adaptive_mmco_2(
2150     GstVaapiDecoderH264  *decoder,
2151     GstVaapiPictureH264  *picture,
2152     GstH264RefPicMarking *ref_pic_marking
2153 )
2154 {
2155     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2156     gint32 i;
2157
2158     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2159     if (i < 0)
2160         return;
2161
2162     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2163         GST_VAAPI_PICTURE_IS_FRAME(picture));
2164     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2165 }
2166
2167 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2168 static void
2169 exec_ref_pic_marking_adaptive_mmco_3(
2170     GstVaapiDecoderH264  *decoder,
2171     GstVaapiPictureH264  *picture,
2172     GstH264RefPicMarking *ref_pic_marking
2173 )
2174 {
2175     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2176     GstVaapiPictureH264 *ref_picture;
2177     gint32 i, picNumX;
2178
2179     for (i = 0; i < priv->long_ref_count; i++) {
2180         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2181             break;
2182     }
2183     if (i != priv->long_ref_count) {
2184         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2185         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2186     }
2187
2188     picNumX = get_picNumX(picture, ref_pic_marking);
2189     i = find_short_term_reference(decoder, picNumX);
2190     if (i < 0)
2191         return;
2192
2193     ref_picture = priv->short_ref[i];
2194     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2195     priv->long_ref[priv->long_ref_count++] = ref_picture;
2196
2197     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2198     gst_vaapi_picture_h264_set_reference(ref_picture,
2199         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
2200         GST_VAAPI_PICTURE_IS_FRAME(picture));
2201 }
2202
2203 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
2204  * as "unused for reference" */
2205 static void
2206 exec_ref_pic_marking_adaptive_mmco_4(
2207     GstVaapiDecoderH264  *decoder,
2208     GstVaapiPictureH264  *picture,
2209     GstH264RefPicMarking *ref_pic_marking
2210 )
2211 {
2212     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2213     gint32 i, long_term_frame_idx;
2214
2215     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
2216
2217     for (i = 0; i < priv->long_ref_count; i++) {
2218         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
2219             continue;
2220         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
2221         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2222         i--;
2223     }
2224 }
2225
2226 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
2227 static void
2228 exec_ref_pic_marking_adaptive_mmco_5(
2229     GstVaapiDecoderH264  *decoder,
2230     GstVaapiPictureH264  *picture,
2231     GstH264RefPicMarking *ref_pic_marking
2232 )
2233 {
2234     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2235
2236     dpb_flush(decoder);
2237
2238     priv->prev_pic_has_mmco5 = TRUE;
2239
2240     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
2241     priv->frame_num = 0;
2242     priv->frame_num_offset = 0;
2243     picture->frame_num = 0;
2244
2245     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
2246     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2247         picture->field_poc[TOP_FIELD] -= picture->base.poc;
2248     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2249         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
2250     picture->base.poc = 0;
2251 }
2252
2253 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
2254 static void
2255 exec_ref_pic_marking_adaptive_mmco_6(
2256     GstVaapiDecoderH264  *decoder,
2257     GstVaapiPictureH264  *picture,
2258     GstH264RefPicMarking *ref_pic_marking
2259 )
2260 {
2261     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2262     gst_vaapi_picture_h264_set_reference(picture,
2263         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, FALSE);
2264 }
2265
2266 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
2267 static gboolean
2268 exec_ref_pic_marking_adaptive(
2269     GstVaapiDecoderH264     *decoder,
2270     GstVaapiPictureH264     *picture,
2271     GstH264DecRefPicMarking *dec_ref_pic_marking
2272 )
2273 {
2274     guint i;
2275
2276     GST_DEBUG("reference picture marking process (adaptive memory control)");
2277
2278     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
2279         GstVaapiDecoderH264  *decoder,
2280         GstVaapiPictureH264  *picture,
2281         GstH264RefPicMarking *ref_pic_marking
2282     );
2283
2284     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
2285         NULL,
2286         exec_ref_pic_marking_adaptive_mmco_1,
2287         exec_ref_pic_marking_adaptive_mmco_2,
2288         exec_ref_pic_marking_adaptive_mmco_3,
2289         exec_ref_pic_marking_adaptive_mmco_4,
2290         exec_ref_pic_marking_adaptive_mmco_5,
2291         exec_ref_pic_marking_adaptive_mmco_6,
2292     };
2293
2294     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
2295         GstH264RefPicMarking * const ref_pic_marking =
2296             &dec_ref_pic_marking->ref_pic_marking[i];
2297
2298         const guint mmco = ref_pic_marking->memory_management_control_operation;
2299         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
2300             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
2301         else {
2302             GST_ERROR("unhandled MMCO %u", mmco);
2303             return FALSE;
2304         }
2305     }
2306     return TRUE;
2307 }
2308
2309 /* 8.2.5 - Execute reference picture marking process */
2310 static gboolean
2311 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
2312 {
2313     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2314
2315     priv->prev_pic_has_mmco5 = FALSE;
2316     priv->prev_pic_structure = picture->structure;
2317
2318     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
2319         return TRUE;
2320
2321     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
2322         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2323             &picture->last_slice_hdr->dec_ref_pic_marking;
2324         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
2325             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
2326                 return FALSE;
2327         }
2328         else {
2329             if (!exec_ref_pic_marking_sliding_window(decoder))
2330                 return FALSE;
2331         }
2332     }
2333     return TRUE;
2334 }
2335
2336 static void
2337 vaapi_init_picture(VAPictureH264 *pic)
2338 {
2339     pic->picture_id           = VA_INVALID_ID;
2340     pic->frame_idx            = 0;
2341     pic->flags                = VA_PICTURE_H264_INVALID;
2342     pic->TopFieldOrderCnt     = 0;
2343     pic->BottomFieldOrderCnt  = 0;
2344 }
2345
2346 static void
2347 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
2348     guint picture_structure)
2349 {
2350     if (!picture_structure)
2351         picture_structure = picture->structure;
2352
2353     pic->picture_id = picture->base.surface_id;
2354     pic->flags = 0;
2355
2356     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
2357         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
2358         pic->frame_idx = picture->long_term_frame_idx;
2359     }
2360     else {
2361         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
2362             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
2363         pic->frame_idx = picture->frame_num;
2364     }
2365
2366     switch (picture_structure) {
2367     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
2368         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
2369         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
2370         break;
2371     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
2372         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
2373         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
2374         pic->BottomFieldOrderCnt = 0;
2375         break;
2376     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
2377         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
2378         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
2379         pic->TopFieldOrderCnt = 0;
2380         break;
2381     }
2382 }
2383
2384 static gboolean
2385 fill_picture(GstVaapiDecoderH264 *decoder,
2386     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2387 {
2388     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2389     GstVaapiPicture * const base_picture = &picture->base;
2390     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2391     GstH264PPS * const pps = picture->pps;
2392     GstH264SPS * const sps = pps->sequence;
2393     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
2394     guint i, n;
2395
2396     /* Fill in VAPictureParameterBufferH264 */
2397     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
2398
2399     for (i = 0, n = 0; i < priv->dpb_count; i++) {
2400         GstVaapiFrameStore * const fs = priv->dpb[i];
2401         if (gst_vaapi_frame_store_has_reference(fs))
2402             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
2403                 fs->buffers[0], fs->structure);
2404     }
2405     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
2406         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
2407
2408 #define COPY_FIELD(s, f) \
2409     pic_param->f = (s)->f
2410
2411 #define COPY_BFM(a, s, f) \
2412     pic_param->a.bits.f = (s)->f
2413
2414     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
2415     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
2416     pic_param->frame_num                    = priv->frame_num;
2417
2418     COPY_FIELD(sps, bit_depth_luma_minus8);
2419     COPY_FIELD(sps, bit_depth_chroma_minus8);
2420     COPY_FIELD(sps, num_ref_frames);
2421     COPY_FIELD(pps, num_slice_groups_minus1);
2422     COPY_FIELD(pps, slice_group_map_type);
2423     COPY_FIELD(pps, slice_group_change_rate_minus1);
2424     COPY_FIELD(pps, pic_init_qp_minus26);
2425     COPY_FIELD(pps, pic_init_qs_minus26);
2426     COPY_FIELD(pps, chroma_qp_index_offset);
2427     COPY_FIELD(pps, second_chroma_qp_index_offset);
2428
2429     pic_param->seq_fields.value                                         = 0; /* reset all bits */
2430     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
2431     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
2432
2433     COPY_BFM(seq_fields, sps, chroma_format_idc);
2434     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
2435     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
2436     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
2437     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
2438     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
2439     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
2440     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
2441     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
2442
2443     pic_param->pic_fields.value                                         = 0; /* reset all bits */
2444     pic_param->pic_fields.bits.field_pic_flag                           = slice_hdr->field_pic_flag;
2445     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
2446
2447     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
2448     COPY_BFM(pic_fields, pps, weighted_pred_flag);
2449     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
2450     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
2451     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
2452     COPY_BFM(pic_fields, pps, pic_order_present_flag);
2453     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
2454     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
2455     return TRUE;
2456 }
2457
2458 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
2459 static gboolean
2460 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
2461 {
2462     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2463     GstH264PPS * const pps = slice_hdr->pps;
2464     GstH264SPS * const sps = pps->sequence;
2465     GstH264SliceHdr *prev_slice_hdr;
2466
2467     if (!prev_pi)
2468         return TRUE;
2469     prev_slice_hdr = &prev_pi->data.slice_hdr;
2470
2471 #define CHECK_EXPR(expr, field_name) do {              \
2472         if (!(expr)) {                                 \
2473             GST_DEBUG(field_name " differs in value"); \
2474             return TRUE;                               \
2475         }                                              \
2476     } while (0)
2477
2478 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
2479     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
2480
2481     /* frame_num differs in value, regardless of inferred values to 0 */
2482     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
2483
2484     /* pic_parameter_set_id differs in value */
2485     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
2486
2487     /* field_pic_flag differs in value */
2488     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
2489
2490     /* bottom_field_flag is present in both and differs in value */
2491     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
2492         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
2493
2494     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
2495     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
2496                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
2497
2498     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
2499        value or delta_pic_order_cnt_bottom differs in value */
2500     if (sps->pic_order_cnt_type == 0) {
2501         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
2502         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
2503             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
2504     }
2505
2506     /* POC type is 1 for both and either delta_pic_order_cnt[0]
2507        differs in value or delta_pic_order_cnt[1] differs in value */
2508     else if (sps->pic_order_cnt_type == 1) {
2509         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
2510         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
2511     }
2512
2513     /* IdrPicFlag differs in value */
2514     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
2515
2516     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
2517     if (pi->nalu.idr_pic_flag)
2518         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
2519
2520 #undef CHECK_EXPR
2521 #undef CHECK_VALUE
2522     return FALSE;
2523 }
2524
2525 static GstVaapiDecoderStatus
2526 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2527 {
2528     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2529     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2530     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2531     GstH264PPS * const pps = slice_hdr->pps;
2532     GstH264SPS * const sps = pps->sequence;
2533     GstVaapiPictureH264 *picture;
2534     GstVaapiDecoderStatus status;
2535
2536     status = ensure_context(decoder, sps);
2537     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2538         return status;
2539
2540     priv->decoder_state = 0;
2541
2542     if (priv->current_picture) {
2543         /* Re-use current picture where the first field was decoded */
2544         picture = gst_vaapi_picture_h264_new_field(priv->current_picture);
2545         if (!picture) {
2546             GST_ERROR("failed to allocate field picture");
2547             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2548         }
2549     }
2550     else {
2551         /* Create new picture */
2552         picture = gst_vaapi_picture_h264_new(decoder);
2553         if (!picture) {
2554             GST_ERROR("failed to allocate picture");
2555             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2556         }
2557     }
2558     gst_vaapi_picture_replace(&priv->current_picture, picture);
2559     gst_vaapi_picture_unref(picture);
2560
2561     /* Update cropping rectangle */
2562     if (sps->frame_cropping_flag) {
2563         GstVaapiRectangle crop_rect;
2564         crop_rect.x = sps->crop_rect_x;
2565         crop_rect.y = sps->crop_rect_y;
2566         crop_rect.width = sps->crop_rect_width;
2567         crop_rect.height = sps->crop_rect_height;
2568         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
2569     }
2570
2571     picture->pps = pps;
2572
2573     status = ensure_quant_matrix(decoder, picture);
2574     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
2575         GST_ERROR("failed to reset quantizer matrix");
2576         return status;
2577     }
2578
2579     if (!init_picture(decoder, picture, pi))
2580         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2581     if (!fill_picture(decoder, picture, pi))
2582         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2583
2584     priv->decoder_state = priv->parser_state & (
2585         GST_H264_VIDEO_STATE_GOT_SPS |
2586         GST_H264_VIDEO_STATE_GOT_PPS);
2587     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2588 }
2589
2590 static inline guint
2591 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr)
2592 {
2593     guint epb_count;
2594
2595     epb_count = slice_hdr->n_emulation_prevention_bytes;
2596     return 8 /* nal_unit_type */ + slice_hdr->header_size - epb_count * 8;
2597 }
2598
2599 static gboolean
2600 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
2601     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2602 {
2603     VASliceParameterBufferH264 * const slice_param = slice->param;
2604     GstH264PPS * const pps = slice_hdr->pps;
2605     GstH264SPS * const sps = pps->sequence;
2606     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
2607     guint num_weight_tables = 0;
2608     gint i, j;
2609
2610     if (pps->weighted_pred_flag &&
2611         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
2612         num_weight_tables = 1;
2613     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
2614         num_weight_tables = 2;
2615     else
2616         num_weight_tables = 0;
2617
2618     slice_param->luma_log2_weight_denom   = 0;
2619     slice_param->chroma_log2_weight_denom = 0;
2620     slice_param->luma_weight_l0_flag      = 0;
2621     slice_param->chroma_weight_l0_flag    = 0;
2622     slice_param->luma_weight_l1_flag      = 0;
2623     slice_param->chroma_weight_l1_flag    = 0;
2624
2625     if (num_weight_tables < 1)
2626         return TRUE;
2627
2628     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
2629     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
2630
2631     slice_param->luma_weight_l0_flag = 1;
2632     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
2633         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
2634         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
2635     }
2636
2637     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
2638     if (slice_param->chroma_weight_l0_flag) {
2639         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
2640             for (j = 0; j < 2; j++) {
2641                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
2642                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
2643             }
2644         }
2645     }
2646
2647     if (num_weight_tables < 2)
2648         return TRUE;
2649
2650     slice_param->luma_weight_l1_flag = 1;
2651     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
2652         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
2653         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
2654     }
2655
2656     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
2657     if (slice_param->chroma_weight_l1_flag) {
2658         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
2659             for (j = 0; j < 2; j++) {
2660                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
2661                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
2662             }
2663         }
2664     }
2665     return TRUE;
2666 }
2667
2668 static gboolean
2669 fill_RefPicList(GstVaapiDecoderH264 *decoder,
2670     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2671 {
2672     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2673     VASliceParameterBufferH264 * const slice_param = slice->param;
2674     guint i, num_ref_lists = 0;
2675
2676     slice_param->num_ref_idx_l0_active_minus1 = 0;
2677     slice_param->num_ref_idx_l1_active_minus1 = 0;
2678
2679     if (GST_H264_IS_B_SLICE(slice_hdr))
2680         num_ref_lists = 2;
2681     else if (GST_H264_IS_I_SLICE(slice_hdr))
2682         num_ref_lists = 0;
2683     else
2684         num_ref_lists = 1;
2685
2686     if (num_ref_lists < 1)
2687         return TRUE;
2688
2689     slice_param->num_ref_idx_l0_active_minus1 =
2690         slice_hdr->num_ref_idx_l0_active_minus1;
2691
2692     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
2693         vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
2694     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
2695         vaapi_init_picture(&slice_param->RefPicList0[i]);
2696
2697     if (num_ref_lists < 2)
2698         return TRUE;
2699
2700     slice_param->num_ref_idx_l1_active_minus1 =
2701         slice_hdr->num_ref_idx_l1_active_minus1;
2702
2703     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
2704         vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
2705     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
2706         vaapi_init_picture(&slice_param->RefPicList1[i]);
2707     return TRUE;
2708 }
2709
2710 static gboolean
2711 fill_slice(GstVaapiDecoderH264 *decoder,
2712     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2713 {
2714     VASliceParameterBufferH264 * const slice_param = slice->param;
2715
2716     /* Fill in VASliceParameterBufferH264 */
2717     slice_param->slice_data_bit_offset          = get_slice_data_bit_offset(slice_hdr);
2718     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
2719     slice_param->slice_type                     = slice_hdr->type % 5;
2720     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
2721     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
2722     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
2723     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
2724     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
2725     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
2726
2727     if (!fill_RefPicList(decoder, slice, slice_hdr))
2728         return FALSE;
2729     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
2730         return FALSE;
2731     return TRUE;
2732 }
2733
2734 static GstVaapiDecoderStatus
2735 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2736 {
2737     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2738     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2739     GstVaapiPictureH264 * const picture = priv->current_picture;
2740     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2741     GstVaapiSlice *slice;
2742     GstBuffer * const buffer =
2743         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
2744     GstMapInfo map_info;
2745
2746     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
2747
2748     if (!is_valid_state(priv->decoder_state,
2749             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
2750         GST_WARNING("failed to receive enough headers to decode slice");
2751         return GST_VAAPI_DECODER_STATUS_SUCCESS;
2752     }
2753
2754     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
2755         GST_ERROR("failed to map buffer");
2756         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2757     }
2758
2759     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
2760         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
2761     gst_buffer_unmap(buffer, &map_info);
2762     if (!slice) {
2763         GST_ERROR("failed to allocate slice");
2764         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2765     }
2766
2767     if (!fill_slice(decoder, slice, slice_hdr)) {
2768         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
2769         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2770     }
2771
2772     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
2773     picture->last_slice_hdr = slice_hdr;
2774     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
2775     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2776 }
2777
2778 static inline gint
2779 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
2780 {
2781     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
2782                                                      0xffffff00, 0x00000100,
2783                                                      ofs, size,
2784                                                      scp);
2785 }
2786
2787 static GstVaapiDecoderStatus
2788 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2789 {
2790     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2791     GstVaapiDecoderStatus status;
2792
2793     switch (pi->nalu.type) {
2794     case GST_H264_NAL_SLICE_IDR:
2795         /* fall-through. IDR specifics are handled in init_picture() */
2796     case GST_H264_NAL_SLICE:
2797         status = decode_slice(decoder, unit);
2798         break;
2799     case GST_H264_NAL_SEQ_END:
2800     case GST_H264_NAL_STREAM_END:
2801         status = decode_sequence_end(decoder);
2802         break;
2803     case GST_H264_NAL_SEI:
2804         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
2805         break;
2806     default:
2807         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
2808         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
2809         break;
2810     }
2811     return status;
2812 }
2813
2814 static GstVaapiDecoderStatus
2815 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
2816     const guchar *buf, guint buf_size)
2817 {
2818     GstVaapiDecoderH264 * const decoder =
2819         GST_VAAPI_DECODER_H264_CAST(base_decoder);
2820     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2821     GstVaapiDecoderStatus status;
2822     GstVaapiDecoderUnit unit;
2823     GstVaapiParserInfoH264 pi;
2824     GstH264ParserResult result;
2825     guint i, ofs, num_sps, num_pps;
2826
2827     unit.parsed_info = &pi;
2828
2829     if (buf_size < 8)
2830         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2831
2832     if (buf[0] != 1) {
2833         GST_ERROR("failed to decode codec-data, not in avcC format");
2834         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
2835     }
2836
2837     priv->nal_length_size = (buf[4] & 0x03) + 1;
2838
2839     num_sps = buf[5] & 0x1f;
2840     ofs = 6;
2841
2842     for (i = 0; i < num_sps; i++) {
2843         result = gst_h264_parser_identify_nalu_avc(
2844             priv->parser,
2845             buf, ofs, buf_size, 2,
2846             &pi.nalu
2847         );
2848         if (result != GST_H264_PARSER_OK)
2849             return get_status(result);
2850
2851         status = parse_sps(decoder, &unit);
2852         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2853             return status;
2854         ofs = pi.nalu.offset + pi.nalu.size;
2855     }
2856
2857     num_pps = buf[ofs];
2858     ofs++;
2859
2860     for (i = 0; i < num_pps; i++) {
2861         result = gst_h264_parser_identify_nalu_avc(
2862             priv->parser,
2863             buf, ofs, buf_size, 2,
2864             &pi.nalu
2865         );
2866         if (result != GST_H264_PARSER_OK)
2867             return get_status(result);
2868
2869         status = parse_pps(decoder, &unit);
2870         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2871             return status;
2872         ofs = pi.nalu.offset + pi.nalu.size;
2873     }
2874
2875     priv->is_avcC = TRUE;
2876     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2877 }
2878
2879 static GstVaapiDecoderStatus
2880 ensure_decoder(GstVaapiDecoderH264 *decoder)
2881 {
2882     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2883     GstVaapiDecoderStatus status;
2884
2885     if (!priv->is_opened) {
2886         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
2887         if (!priv->is_opened)
2888             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
2889
2890         status = gst_vaapi_decoder_decode_codec_data(
2891             GST_VAAPI_DECODER_CAST(decoder));
2892         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2893             return status;
2894     }
2895     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2896 }
2897
2898 static GstVaapiDecoderStatus
2899 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
2900     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
2901 {
2902     GstVaapiDecoderH264 * const decoder =
2903         GST_VAAPI_DECODER_H264_CAST(base_decoder);
2904     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2905     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
2906     GstVaapiParserInfoH264 *pi;
2907     GstVaapiDecoderStatus status;
2908     GstH264ParserResult result;
2909     guchar *buf;
2910     guint i, size, buf_size, nalu_size, flags;
2911     guint32 start_code;
2912     gint ofs, ofs2;
2913
2914     status = ensure_decoder(decoder);
2915     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2916         return status;
2917
2918     size = gst_adapter_available(adapter);
2919
2920     if (priv->is_avcC) {
2921         if (size < priv->nal_length_size)
2922             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2923
2924         buf = (guchar *)&start_code;
2925         g_assert(priv->nal_length_size <= sizeof(start_code));
2926         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
2927
2928         nalu_size = 0;
2929         for (i = 0; i < priv->nal_length_size; i++)
2930             nalu_size = (nalu_size << 8) | buf[i];
2931
2932         buf_size = priv->nal_length_size + nalu_size;
2933         if (size < buf_size)
2934             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2935     }
2936     else {
2937         if (size < 4)
2938             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2939
2940         ofs = scan_for_start_code(adapter, 0, size, NULL);
2941         if (ofs < 0)
2942             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2943
2944         if (ofs > 0) {
2945             gst_adapter_flush(adapter, ofs);
2946             size -= ofs;
2947         }
2948
2949         ofs2 = ps->input_offset2 - ofs - 4;
2950         if (ofs2 < 4)
2951             ofs2 = 4;
2952
2953         ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
2954             scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
2955         if (ofs < 0) {
2956             // Assume the whole NAL unit is present if end-of-stream
2957             if (!at_eos) {
2958                 ps->input_offset2 = size;
2959                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2960             }
2961             ofs = size;
2962         }
2963         buf_size = ofs;
2964     }
2965     ps->input_offset2 = 0;
2966
2967     buf = (guchar *)gst_adapter_map(adapter, buf_size);
2968     if (!buf)
2969         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2970
2971     unit->size = buf_size;
2972
2973     pi = gst_vaapi_parser_info_h264_new();
2974     if (!pi)
2975         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2976
2977     gst_vaapi_decoder_unit_set_parsed_info(unit,
2978         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
2979
2980     if (priv->is_avcC)
2981         result = gst_h264_parser_identify_nalu_avc(priv->parser,
2982             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
2983     else
2984         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
2985             buf, 0, buf_size, &pi->nalu);
2986     status = get_status(result);
2987     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2988         return status;
2989
2990     switch (pi->nalu.type) {
2991     case GST_H264_NAL_SPS:
2992         status = parse_sps(decoder, unit);
2993         break;
2994     case GST_H264_NAL_PPS:
2995         status = parse_pps(decoder, unit);
2996         break;
2997     case GST_H264_NAL_SEI:
2998         status = parse_sei(decoder, unit);
2999         break;
3000     case GST_H264_NAL_SLICE_IDR:
3001     case GST_H264_NAL_SLICE:
3002         status = parse_slice(decoder, unit);
3003         break;
3004     default:
3005         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3006         break;
3007     }
3008     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3009         return status;
3010
3011     flags = 0;
3012     switch (pi->nalu.type) {
3013     case GST_H264_NAL_AU_DELIMITER:
3014         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3015         /* fall-through */
3016     case GST_H264_NAL_FILLER_DATA:
3017         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3018         break;
3019     case GST_H264_NAL_STREAM_END:
3020         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3021         /* fall-through */
3022     case GST_H264_NAL_SEQ_END:
3023         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
3024         break;
3025     case GST_H264_NAL_SPS:
3026     case GST_H264_NAL_PPS:
3027         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3028         /* fall-through */
3029     case GST_H264_NAL_SEI:
3030         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3031         break;
3032     case GST_H264_NAL_SLICE_IDR:
3033     case GST_H264_NAL_SLICE:
3034         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
3035         if (is_new_picture(pi, priv->prev_slice_pi))
3036             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3037         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
3038         break;
3039     default:
3040         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
3041             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3042         break;
3043     }
3044     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
3045
3046     pi->nalu.data = NULL;
3047     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3048 }
3049
3050 static GstVaapiDecoderStatus
3051 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
3052     GstVaapiDecoderUnit *unit)
3053 {
3054     GstVaapiDecoderH264 * const decoder =
3055         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3056     GstVaapiDecoderStatus status;
3057
3058     status = ensure_decoder(decoder);
3059     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3060         return status;
3061     return decode_unit(decoder, unit);
3062 }
3063
3064 static GstVaapiDecoderStatus
3065 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
3066     GstVaapiDecoderUnit *unit)
3067 {
3068     GstVaapiDecoderH264 * const decoder =
3069         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3070
3071     return decode_picture(decoder, unit);
3072 }
3073
3074 static GstVaapiDecoderStatus
3075 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
3076 {
3077     GstVaapiDecoderH264 * const decoder =
3078         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3079
3080     return decode_current_picture(decoder);
3081 }
3082
3083 static GstVaapiDecoderStatus
3084 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
3085 {
3086     GstVaapiDecoderH264 * const decoder =
3087         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3088
3089     dpb_flush(decoder);
3090     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3091 }
3092
3093 static void
3094 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
3095 {
3096     GstVaapiMiniObjectClass * const object_class =
3097         GST_VAAPI_MINI_OBJECT_CLASS(klass);
3098     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
3099
3100     object_class->size          = sizeof(GstVaapiDecoderH264);
3101     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
3102
3103     decoder_class->create       = gst_vaapi_decoder_h264_create;
3104     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
3105     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
3106     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
3107     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
3108     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
3109     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
3110
3111     decoder_class->decode_codec_data =
3112         gst_vaapi_decoder_h264_decode_codec_data;
3113 }
3114
3115 static inline const GstVaapiDecoderClass *
3116 gst_vaapi_decoder_h264_class(void)
3117 {
3118     static GstVaapiDecoderH264Class g_class;
3119     static gsize g_class_init = FALSE;
3120
3121     if (g_once_init_enter(&g_class_init)) {
3122         gst_vaapi_decoder_h264_class_init(&g_class);
3123         g_once_init_leave(&g_class_init, TRUE);
3124     }
3125     return GST_VAAPI_DECODER_CLASS(&g_class);
3126 }
3127
3128 /**
3129  * gst_vaapi_decoder_h264_new:
3130  * @display: a #GstVaapiDisplay
3131  * @caps: a #GstCaps holding codec information
3132  *
3133  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
3134  * hold extra information like codec-data and pictured coded size.
3135  *
3136  * Return value: the newly allocated #GstVaapiDecoder object
3137  */
3138 GstVaapiDecoder *
3139 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
3140 {
3141     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
3142 }