decoder: h264: fix decoding of BA3_SVA_C.264.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2013 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 #define GST_VAAPI_PARSER_INFO_H264(obj) \
61     ((GstVaapiParserInfoH264 *)(obj))
62
63 struct _GstVaapiParserInfoH264 {
64     GstVaapiMiniObject  parent_instance;
65     GstH264NalUnit      nalu;
66     union {
67         GstH264SPS      sps;
68         GstH264PPS      pps;
69         GstH264SliceHdr slice_hdr;
70     }                   data;
71 };
72
73 static inline const GstVaapiMiniObjectClass *
74 gst_vaapi_parser_info_h264_class(void)
75 {
76     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
77         sizeof(GstVaapiParserInfoH264),
78         NULL
79     };
80     return &GstVaapiParserInfoH264Class;
81 }
82
83 static inline GstVaapiParserInfoH264 *
84 gst_vaapi_parser_info_h264_new(void)
85 {
86     return (GstVaapiParserInfoH264 *)
87         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
88 }
89
90 #define gst_vaapi_parser_info_h264_ref(pi) \
91     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
92
93 #define gst_vaapi_parser_info_h264_unref(pi) \
94     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
95
96 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
97     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
98         (GstVaapiMiniObject *)(new_pi))
99
100 /* ------------------------------------------------------------------------- */
101 /* --- H.264 Pictures                                                    --- */
102 /* ------------------------------------------------------------------------- */
103
104 /*
105  * Extended picture flags:
106  *
107  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
108  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
109  *     "used for short-term reference"
110  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
111  *     "used for long-term reference"
112  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
113  *     reference picture (short-term reference or long-term reference)
114  */
115 enum {
116     GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
117
118     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
119         GST_VAAPI_PICTURE_FLAG_REFERENCE),
120     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
121         GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
122     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
123         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
124         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
125 };
126
127 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
128     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
129
130 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
131     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
132       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
133      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
134
135 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
136     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
137       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
138      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
139
140 struct _GstVaapiPictureH264 {
141     GstVaapiPicture             base;
142     GstH264PPS                 *pps;
143     GstH264SliceHdr            *last_slice_hdr;
144     guint                       structure;
145     gint32                      field_poc[2];
146     gint32                      frame_num;              // Original frame_num from slice_header()
147     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
148     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
149     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
150     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
151     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
152     guint                       output_flag             : 1;
153     guint                       output_needed           : 1;
154 };
155
156 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
157
158 void
159 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
160 {
161     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
162 }
163
164 gboolean
165 gst_vaapi_picture_h264_create(
166     GstVaapiPictureH264                      *picture,
167     const GstVaapiCodecObjectConstructorArgs *args
168 )
169 {
170     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
171         return FALSE;
172
173     picture->field_poc[0]       = G_MAXINT32;
174     picture->field_poc[1]       = G_MAXINT32;
175     picture->output_needed      = FALSE;
176     return TRUE;
177 }
178
179 static inline GstVaapiPictureH264 *
180 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
181 {
182     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
183         &GstVaapiPictureH264Class,
184         GST_VAAPI_CODEC_BASE(decoder),
185         NULL, sizeof(VAPictureParameterBufferH264),
186         NULL, 0,
187         0);
188 }
189
190 static inline void
191 gst_vaapi_picture_h264_set_reference(
192     GstVaapiPictureH264 *picture,
193     guint                reference_flags,
194     gboolean             other_field
195 )
196 {
197     if (!picture)
198         return;
199     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
200     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
201
202     if (!other_field || !(picture = picture->other_field))
203         return;
204     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
205     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
206 }
207
208 static inline GstVaapiPictureH264 *
209 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
210 {
211     g_return_val_if_fail(picture, NULL);
212
213     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
214 }
215
216 /* ------------------------------------------------------------------------- */
217 /* --- Frame Buffers (DPB)                                               --- */
218 /* ------------------------------------------------------------------------- */
219
220 struct _GstVaapiFrameStore {
221     /*< private >*/
222     GstVaapiMiniObject          parent_instance;
223
224     guint                       structure;
225     GstVaapiPictureH264        *buffers[2];
226     guint                       num_buffers;
227     guint                       output_needed;
228 };
229
230 static void
231 gst_vaapi_frame_store_finalize(gpointer object)
232 {
233     GstVaapiFrameStore * const fs = object;
234     guint i;
235
236     for (i = 0; i < fs->num_buffers; i++)
237         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
238 }
239
240 static GstVaapiFrameStore *
241 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
242 {
243     GstVaapiFrameStore *fs;
244
245     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
246         sizeof(GstVaapiFrameStore),
247         gst_vaapi_frame_store_finalize
248     };
249
250     fs = (GstVaapiFrameStore *)
251         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
252     if (!fs)
253         return NULL;
254
255     fs->structure       = picture->structure;
256     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
257     fs->buffers[1]      = NULL;
258     fs->num_buffers     = 1;
259     fs->output_needed   = picture->output_needed;
260     return fs;
261 }
262
263 static gboolean
264 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
265 {
266     guint field;
267
268     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
269     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
270     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
271
272     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
273     if (picture->output_flag) {
274         picture->output_needed = TRUE;
275         fs->output_needed++;
276     }
277
278     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
279
280     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
281         TOP_FIELD : BOTTOM_FIELD;
282     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
283     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
284     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
285     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
286     return TRUE;
287 }
288
289 static gboolean
290 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
291 {
292     GstVaapiPictureH264 * const first_field = fs->buffers[0];
293     GstVaapiPictureH264 *second_field;
294
295     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
296
297     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
298     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
299
300     second_field = gst_vaapi_picture_h264_new_field(first_field);
301     if (!second_field)
302         return FALSE;
303     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
304     gst_vaapi_picture_unref(second_field);
305
306     second_field->frame_num    = first_field->frame_num;
307     second_field->field_poc[0] = first_field->field_poc[0];
308     second_field->field_poc[1] = first_field->field_poc[1];
309     second_field->output_flag  = first_field->output_flag;
310     if (second_field->output_flag) {
311         second_field->output_needed = TRUE;
312         fs->output_needed++;
313     }
314     return TRUE;
315 }
316
317 static inline gboolean
318 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
319 {
320     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
321 }
322
323 static inline gboolean
324 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
325 {
326     guint i;
327
328     for (i = 0; i < fs->num_buffers; i++) {
329         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
330             return TRUE;
331     }
332     return FALSE;
333 }
334
335 #define gst_vaapi_frame_store_ref(fs) \
336     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
337
338 #define gst_vaapi_frame_store_unref(fs) \
339     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
340
341 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
342     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
343         (GstVaapiMiniObject *)(new_fs))
344
345 /* ------------------------------------------------------------------------- */
346 /* --- H.264 Decoder                                                     --- */
347 /* ------------------------------------------------------------------------- */
348
349 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
350     ((GstVaapiDecoderH264 *)(decoder))
351
352 struct _GstVaapiDecoderH264Private {
353     GstH264NalParser           *parser;
354     GstVaapiPictureH264        *current_picture;
355     GstVaapiParserInfoH264     *prev_slice_pi;
356     GstVaapiFrameStore         *prev_frame;
357     GstVaapiFrameStore         *dpb[16];
358     guint                       dpb_count;
359     guint                       dpb_size;
360     GstVaapiProfile             profile;
361     GstVaapiEntrypoint          entrypoint;
362     GstVaapiChromaType          chroma_type;
363     GstVaapiPictureH264        *short_ref[32];
364     guint                       short_ref_count;
365     GstVaapiPictureH264        *long_ref[32];
366     guint                       long_ref_count;
367     GstVaapiPictureH264        *RefPicList0[32];
368     guint                       RefPicList0_count;
369     GstVaapiPictureH264        *RefPicList1[32];
370     guint                       RefPicList1_count;
371     guint                       nal_length_size;
372     guint                       mb_width;
373     guint                       mb_height;
374     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
375     gint32                      poc_msb;                // PicOrderCntMsb
376     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
377     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
378     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
379     gint32                      frame_num_offset;       // FrameNumOffset
380     gint32                      frame_num;              // frame_num (from slice_header())
381     gint32                      prev_frame_num;         // prevFrameNum
382     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
383     gboolean                    prev_pic_structure;     // previous picture structure
384     guint                       is_opened               : 1;
385     guint                       is_avcC                 : 1;
386     guint                       got_sps                 : 1;
387     guint                       got_pps                 : 1;
388     guint                       has_context             : 1;
389     guint                       progressive_sequence    : 1;
390 };
391
392 /**
393  * GstVaapiDecoderH264:
394  *
395  * A decoder based on H264.
396  */
397 struct _GstVaapiDecoderH264 {
398     /*< private >*/
399     GstVaapiDecoder             parent_instance;
400     GstVaapiDecoderH264Private  priv;
401 };
402
403 /**
404  * GstVaapiDecoderH264Class:
405  *
406  * A decoder class based on H264.
407  */
408 struct _GstVaapiDecoderH264Class {
409     /*< private >*/
410     GstVaapiDecoderClass parent_class;
411 };
412
413 static gboolean
414 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
415
416 /* Get number of reference frames to use */
417 static guint
418 get_max_dec_frame_buffering(GstH264SPS *sps)
419 {
420     guint max_dec_frame_buffering, PicSizeMbs;
421     GstVaapiLevelH264 level;
422     const GstVaapiH264LevelLimits *level_limits;
423
424     /* Table A-1 - Level limits */
425     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
426         level = GST_VAAPI_LEVEL_H264_L1b;
427     else
428         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
429     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
430     if (!level_limits)
431         return 16;
432
433     PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
434                   (sps->pic_height_in_map_units_minus1 + 1) *
435                   (sps->frame_mbs_only_flag ? 1 : 2));
436     max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
437
438     /* VUI parameters */
439     if (sps->vui_parameters_present_flag) {
440         GstH264VUIParams * const vui_params = &sps->vui_parameters;
441         if (vui_params->bitstream_restriction_flag)
442             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
443         else {
444             switch (sps->profile_idc) {
445             case 44:  // CAVLC 4:4:4 Intra profile
446             case GST_H264_PROFILE_SCALABLE_HIGH:
447             case GST_H264_PROFILE_HIGH:
448             case GST_H264_PROFILE_HIGH10:
449             case GST_H264_PROFILE_HIGH_422:
450             case GST_H264_PROFILE_HIGH_444:
451                 if (sps->constraint_set3_flag)
452                     max_dec_frame_buffering = 0;
453                 break;
454             }
455         }
456     }
457
458     if (max_dec_frame_buffering > 16)
459         max_dec_frame_buffering = 16;
460     else if (max_dec_frame_buffering < sps->num_ref_frames)
461         max_dec_frame_buffering = sps->num_ref_frames;
462     return MAX(1, max_dec_frame_buffering);
463 }
464
465 static void
466 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
467 {
468     gpointer * const entries = array;
469     guint num_entries = *array_length_ptr;
470
471     g_return_if_fail(index < num_entries);
472
473     if (index != --num_entries)
474         entries[index] = entries[num_entries];
475     entries[num_entries] = NULL;
476     *array_length_ptr = num_entries;
477 }
478
479 #if 1
480 static inline void
481 array_remove_index(void *array, guint *array_length_ptr, guint index)
482 {
483     array_remove_index_fast(array, array_length_ptr, index);
484 }
485 #else
486 static void
487 array_remove_index(void *array, guint *array_length_ptr, guint index)
488 {
489     gpointer * const entries = array;
490     const guint num_entries = *array_length_ptr - 1;
491     guint i;
492
493     g_return_if_fail(index <= num_entries);
494
495     for (i = index; i < num_entries; i++)
496         entries[i] = entries[i + 1];
497     entries[num_entries] = NULL;
498     *array_length_ptr = num_entries;
499 }
500 #endif
501
502 #define ARRAY_REMOVE_INDEX(array, index) \
503     array_remove_index(array, &array##_count, index)
504
505 static void
506 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
507 {
508     GstVaapiDecoderH264Private * const priv = &decoder->priv;
509     guint i, num_frames = --priv->dpb_count;
510
511     if (USE_STRICT_DPB_ORDERING) {
512         for (i = index; i < num_frames; i++)
513             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
514     }
515     else if (index != num_frames)
516         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
517     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
518 }
519
520 static gboolean
521 dpb_output(
522     GstVaapiDecoderH264 *decoder,
523     GstVaapiFrameStore  *fs,
524     GstVaapiPictureH264 *picture
525 )
526 {
527     picture->output_needed = FALSE;
528
529     if (fs) {
530         if (--fs->output_needed > 0)
531             return TRUE;
532         picture = fs->buffers[0];
533     }
534     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
535 }
536
537 static inline void
538 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
539 {
540     GstVaapiDecoderH264Private * const priv = &decoder->priv;
541     GstVaapiFrameStore * const fs = priv->dpb[i];
542
543     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
544         dpb_remove_index(decoder, i);
545 }
546
547 static gboolean
548 dpb_bump(GstVaapiDecoderH264 *decoder)
549 {
550     GstVaapiDecoderH264Private * const priv = &decoder->priv;
551     GstVaapiPictureH264 *found_picture = NULL;
552     guint i, j, found_index;
553     gboolean success;
554
555     for (i = 0; i < priv->dpb_count; i++) {
556         GstVaapiFrameStore * const fs = priv->dpb[i];
557         if (!fs->output_needed)
558             continue;
559         for (j = 0; j < fs->num_buffers; j++) {
560             GstVaapiPictureH264 * const picture = fs->buffers[j];
561             if (!picture->output_needed)
562                 continue;
563             if (!found_picture || found_picture->base.poc > picture->base.poc)
564                 found_picture = picture, found_index = i;
565         }
566     }
567     if (!found_picture)
568         return FALSE;
569
570     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
571     dpb_evict(decoder, found_picture, found_index);
572     return success;
573 }
574
575 static void
576 dpb_clear(GstVaapiDecoderH264 *decoder)
577 {
578     GstVaapiDecoderH264Private * const priv = &decoder->priv;
579     guint i;
580
581     for (i = 0; i < priv->dpb_count; i++)
582         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
583     priv->dpb_count = 0;
584
585     gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
586 }
587
588 static void
589 dpb_flush(GstVaapiDecoderH264 *decoder)
590 {
591     while (dpb_bump(decoder))
592         ;
593     dpb_clear(decoder);
594 }
595
596 static gboolean
597 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
598 {
599     GstVaapiDecoderH264Private * const priv = &decoder->priv;
600     GstVaapiFrameStore *fs;
601     guint i, j;
602
603     // Remove all unused pictures
604     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
605         i = 0;
606         while (i < priv->dpb_count) {
607             GstVaapiFrameStore * const fs = priv->dpb[i];
608             if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
609                 dpb_remove_index(decoder, i);
610             else
611                 i++;
612         }
613     }
614
615     // Check if picture is the second field and the first field is still in DPB
616     fs = priv->prev_frame;
617     if (fs && !gst_vaapi_frame_store_has_frame(fs))
618         return gst_vaapi_frame_store_add(fs, picture);
619
620     // Create new frame store, and split fields if necessary
621     fs = gst_vaapi_frame_store_new(picture);
622     if (!fs)
623         return FALSE;
624     gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
625     gst_vaapi_frame_store_unref(fs);
626
627     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
628         if (!gst_vaapi_frame_store_split_fields(fs))
629             return FALSE;
630     }
631
632     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
633     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
634         while (priv->dpb_count == priv->dpb_size) {
635             if (!dpb_bump(decoder))
636                 return FALSE;
637         }
638         gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
639         if (picture->output_flag) {
640             picture->output_needed = TRUE;
641             fs->output_needed++;
642         }
643     }
644
645     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
646     else {
647         if (!picture->output_flag)
648             return TRUE;
649         while (priv->dpb_count == priv->dpb_size) {
650             gboolean found_picture = FALSE;
651             for (i = 0; !found_picture && i < priv->dpb_count; i++) {
652                 GstVaapiFrameStore * const fs = priv->dpb[i];
653                 if (!fs->output_needed)
654                     continue;
655                 for (j = 0; !found_picture && j < fs->num_buffers; j++)
656                     found_picture = fs->buffers[j]->output_needed &&
657                         fs->buffers[j]->base.poc < picture->base.poc;
658             }
659             if (!found_picture)
660                 return dpb_output(decoder, NULL, picture);
661             if (!dpb_bump(decoder))
662                 return FALSE;
663         }
664         gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
665         picture->output_needed = TRUE;
666         fs->output_needed++;
667     }
668     return TRUE;
669 }
670
671 static inline void
672 dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
673 {
674     GstVaapiDecoderH264Private * const priv = &decoder->priv;
675
676     priv->dpb_size = get_max_dec_frame_buffering(sps);
677     GST_DEBUG("DPB size %u", priv->dpb_size);
678 }
679
680 static GstVaapiDecoderStatus
681 get_status(GstH264ParserResult result)
682 {
683     GstVaapiDecoderStatus status;
684
685     switch (result) {
686     case GST_H264_PARSER_OK:
687         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
688         break;
689     case GST_H264_PARSER_NO_NAL_END:
690         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
691         break;
692     case GST_H264_PARSER_ERROR:
693         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
694         break;
695     default:
696         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
697         break;
698     }
699     return status;
700 }
701
702 static void
703 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
704 {
705     GstVaapiDecoderH264Private * const priv = &decoder->priv;
706
707     gst_vaapi_picture_replace(&priv->current_picture, NULL);
708     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
709
710     dpb_clear(decoder);
711
712     if (priv->parser) {
713         gst_h264_nal_parser_free(priv->parser);
714         priv->parser = NULL;
715     }
716 }
717
718 static gboolean
719 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
720 {
721     GstVaapiDecoderH264Private * const priv = &decoder->priv;
722
723     gst_vaapi_decoder_h264_close(decoder);
724
725     priv->parser = gst_h264_nal_parser_new();
726     if (!priv->parser)
727         return FALSE;
728     return TRUE;
729 }
730
731 static void
732 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
733 {
734     GstVaapiDecoderH264 * const decoder =
735         GST_VAAPI_DECODER_H264_CAST(base_decoder);
736
737     gst_vaapi_decoder_h264_close(decoder);
738 }
739
740 static gboolean
741 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
742 {
743     GstVaapiDecoderH264 * const decoder =
744         GST_VAAPI_DECODER_H264_CAST(base_decoder);
745     GstVaapiDecoderH264Private * const priv = &decoder->priv;
746
747     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
748     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
749     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
750     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
751     priv->progressive_sequence  = TRUE;
752     return TRUE;
753 }
754
755 static void
756 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
757     GstVaapiProfile profile)
758 {
759     guint n_profiles = *n_profiles_ptr;
760
761     profiles[n_profiles++] = profile;
762     switch (profile) {
763     case GST_VAAPI_PROFILE_H264_MAIN:
764         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
765         break;
766     default:
767         break;
768     }
769     *n_profiles_ptr = n_profiles;
770 }
771
772 static GstVaapiProfile
773 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
774 {
775     GstVaapiDecoderH264Private * const priv = &decoder->priv;
776     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
777     GstVaapiProfile profile, profiles[4];
778     guint i, n_profiles = 0;
779
780     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
781     if (!profile)
782         return GST_VAAPI_PROFILE_UNKNOWN;
783
784     fill_profiles(profiles, &n_profiles, profile);
785     switch (profile) {
786     case GST_VAAPI_PROFILE_H264_BASELINE:
787         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
788             fill_profiles(profiles, &n_profiles,
789                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
790             fill_profiles(profiles, &n_profiles,
791                 GST_VAAPI_PROFILE_H264_MAIN);
792         }
793         break;
794     case GST_VAAPI_PROFILE_H264_EXTENDED:
795         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
796             fill_profiles(profiles, &n_profiles,
797                 GST_VAAPI_PROFILE_H264_MAIN);
798         }
799         break;
800     default:
801         break;
802     }
803
804     /* If the preferred profile (profiles[0]) matches one that we already
805        found, then just return it now instead of searching for it again */
806     if (profiles[0] == priv->profile)
807         return priv->profile;
808
809     for (i = 0; i < n_profiles; i++) {
810         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
811             return profiles[i];
812     }
813     return GST_VAAPI_PROFILE_UNKNOWN;
814 }
815
816 static GstVaapiDecoderStatus
817 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
818 {
819     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
820     GstVaapiDecoderH264Private * const priv = &decoder->priv;
821     GstVaapiContextInfo info;
822     GstVaapiProfile profile;
823     GstVaapiChromaType chroma_type;
824     gboolean reset_context = FALSE;
825     guint mb_width, mb_height;
826
827     profile = get_profile(decoder, sps);
828     if (!profile) {
829         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
830         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
831     }
832
833     if (priv->profile != profile) {
834         GST_DEBUG("profile changed");
835         reset_context = TRUE;
836         priv->profile = profile;
837     }
838
839     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
840     if (!chroma_type || chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420) {
841         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
842         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
843     }
844
845     if (priv->chroma_type != chroma_type) {
846         GST_DEBUG("chroma format changed");
847         reset_context     = TRUE;
848         priv->chroma_type = chroma_type;
849     }
850
851     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
852     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
853         !sps->frame_mbs_only_flag;
854     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
855         GST_DEBUG("size changed");
856         reset_context   = TRUE;
857         priv->mb_width  = mb_width;
858         priv->mb_height = mb_height;
859     }
860
861     priv->progressive_sequence = sps->frame_mbs_only_flag;
862 #if 0
863     /* XXX: we only output complete frames for now */
864     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
865 #endif
866
867     gst_vaapi_decoder_set_pixel_aspect_ratio(
868         base_decoder,
869         sps->vui_parameters.par_n,
870         sps->vui_parameters.par_d
871     );
872
873     if (!reset_context && priv->has_context)
874         return GST_VAAPI_DECODER_STATUS_SUCCESS;
875
876     /* XXX: fix surface size when cropping is implemented */
877     info.profile    = priv->profile;
878     info.entrypoint = priv->entrypoint;
879     info.width      = sps->width;
880     info.height     = sps->height;
881     info.ref_frames = get_max_dec_frame_buffering(sps);
882
883     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
884         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
885     priv->has_context = TRUE;
886
887     /* Reset DPB */
888     dpb_reset(decoder, sps);
889     return GST_VAAPI_DECODER_STATUS_SUCCESS;
890 }
891
892 static void
893 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
894 {
895     guint i;
896
897     /* There are always 6 4x4 scaling lists */
898     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
899     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
900
901     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
902         gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
903             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
904 }
905
906 static void
907 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
908 {
909     const GstH264SPS * const sps = pps->sequence;
910     guint i, n;
911
912     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
913     if (!pps->transform_8x8_mode_flag)
914         return;
915
916     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
917     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
918
919     n = (sps->chroma_format_idc != 3) ? 2 : 6;
920     for (i = 0; i < n; i++) {
921         gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
922             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
923     }
924 }
925
926 static GstVaapiDecoderStatus
927 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
928 {
929     GstVaapiPicture * const base_picture = &picture->base;
930     GstH264PPS * const pps = picture->pps;
931     GstH264SPS * const sps = pps->sequence;
932     VAIQMatrixBufferH264 *iq_matrix;
933
934     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
935     if (!base_picture->iq_matrix) {
936         GST_ERROR("failed to allocate IQ matrix");
937         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
938     }
939     iq_matrix = base_picture->iq_matrix->param;
940
941     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
942        is not large enough to hold lists for 4:4:4 */
943     if (sps->chroma_format_idc == 3)
944         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
945
946     fill_iq_matrix_4x4(iq_matrix, pps);
947     fill_iq_matrix_8x8(iq_matrix, pps);
948
949     return GST_VAAPI_DECODER_STATUS_SUCCESS;
950 }
951
952 static GstVaapiDecoderStatus
953 decode_current_picture(GstVaapiDecoderH264 *decoder)
954 {
955     GstVaapiDecoderH264Private * const priv = &decoder->priv;
956     GstVaapiPictureH264 * const picture = priv->current_picture;
957
958     if (!picture)
959         return GST_VAAPI_DECODER_STATUS_SUCCESS;
960
961     if (!exec_ref_pic_marking(decoder, picture))
962         goto error;
963     if (!dpb_add(decoder, picture))
964         goto error;
965     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
966         goto error;
967     if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
968         gst_vaapi_picture_replace(&priv->current_picture, NULL);
969     return GST_VAAPI_DECODER_STATUS_SUCCESS;
970
971 error:
972     /* XXX: fix for cases where first field failed to be decoded */
973     gst_vaapi_picture_replace(&priv->current_picture, NULL);
974     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
975 }
976
977 static GstVaapiDecoderStatus
978 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
979 {
980     GstVaapiDecoderH264Private * const priv = &decoder->priv;
981     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
982     GstH264SPS * const sps = &pi->data.sps;
983     GstH264ParserResult result;
984
985     GST_DEBUG("parse SPS");
986
987     /* Variables that don't have inferred values per the H.264
988        standard but that should get a default value anyway */
989     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
990
991     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
992     if (result != GST_H264_PARSER_OK)
993         return get_status(result);
994
995     priv->got_sps = TRUE;
996     return GST_VAAPI_DECODER_STATUS_SUCCESS;
997 }
998
999 static GstVaapiDecoderStatus
1000 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1001 {
1002     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1003     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1004     GstH264PPS * const pps = &pi->data.pps;
1005     GstH264ParserResult result;
1006
1007     GST_DEBUG("parse PPS");
1008
1009     /* Variables that don't have inferred values per the H.264
1010        standard but that should get a default value anyway */
1011     pps->slice_group_map_type = 0;
1012     pps->slice_group_change_rate_minus1 = 0;
1013
1014     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1015     if (result != GST_H264_PARSER_OK)
1016         return get_status(result);
1017
1018     priv->got_pps = TRUE;
1019     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1020 }
1021
1022 static GstVaapiDecoderStatus
1023 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1024 {
1025     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1026     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1027     GstH264SEIMessage sei;
1028     GstH264ParserResult result;
1029
1030     GST_DEBUG("parse SEI");
1031
1032     memset(&sei, 0, sizeof(sei));
1033     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, &sei);
1034     if (result != GST_H264_PARSER_OK) {
1035         GST_WARNING("failed to parse SEI, payload type:%d", sei.payloadType);
1036         return get_status(result);
1037     }
1038
1039     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1040 }
1041
1042 static GstVaapiDecoderStatus
1043 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1044 {
1045     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1046     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1047     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1048     GstH264ParserResult result;
1049
1050     GST_DEBUG("parse slice");
1051
1052     /* Variables that don't have inferred values per the H.264
1053        standard but that should get a default value anyway */
1054     slice_hdr->cabac_init_idc = 0;
1055     slice_hdr->direct_spatial_mv_pred_flag = 0;
1056
1057     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1058         slice_hdr, TRUE, TRUE);
1059     if (result != GST_H264_PARSER_OK)
1060         return get_status(result);
1061
1062     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1063 }
1064
1065 static GstVaapiDecoderStatus
1066 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1067 {
1068     GstVaapiDecoderStatus status;
1069
1070     GST_DEBUG("decode sequence-end");
1071
1072     status = decode_current_picture(decoder);
1073     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1074         return status;
1075
1076     dpb_flush(decoder);
1077     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1078 }
1079
1080 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1081 static void
1082 init_picture_poc_0(
1083     GstVaapiDecoderH264 *decoder,
1084     GstVaapiPictureH264 *picture,
1085     GstH264SliceHdr     *slice_hdr
1086 )
1087 {
1088     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1089     GstH264PPS * const pps = slice_hdr->pps;
1090     GstH264SPS * const sps = pps->sequence;
1091     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1092     gint32 temp_poc;
1093
1094     GST_DEBUG("decode picture order count type 0");
1095
1096     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1097         priv->prev_poc_msb = 0;
1098         priv->prev_poc_lsb = 0;
1099     }
1100     else if (priv->prev_pic_has_mmco5) {
1101         priv->prev_poc_msb = 0;
1102         priv->prev_poc_lsb =
1103             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1104              0 : priv->field_poc[TOP_FIELD]);
1105     }
1106     else {
1107         priv->prev_poc_msb = priv->poc_msb;
1108         priv->prev_poc_lsb = priv->poc_lsb;
1109     }
1110
1111     // (8-3)
1112     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1113     if (priv->poc_lsb < priv->prev_poc_lsb &&
1114         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1115         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1116     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1117              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1118         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1119     else
1120         priv->poc_msb = priv->prev_poc_msb;
1121
1122     temp_poc = priv->poc_msb + priv->poc_lsb;
1123     switch (picture->structure) {
1124     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1125         // (8-4, 8-5)
1126         priv->field_poc[TOP_FIELD] = temp_poc;
1127         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1128             slice_hdr->delta_pic_order_cnt_bottom;
1129         break;
1130     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1131         // (8-4)
1132         priv->field_poc[TOP_FIELD] = temp_poc;
1133         break;
1134     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1135         // (8-5)
1136         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1137         break;
1138     }
1139 }
1140
1141 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1142 static void
1143 init_picture_poc_1(
1144     GstVaapiDecoderH264 *decoder,
1145     GstVaapiPictureH264 *picture,
1146     GstH264SliceHdr     *slice_hdr
1147 )
1148 {
1149     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1150     GstH264PPS * const pps = slice_hdr->pps;
1151     GstH264SPS * const sps = pps->sequence;
1152     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1153     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1154     guint i;
1155
1156     GST_DEBUG("decode picture order count type 1");
1157
1158     if (priv->prev_pic_has_mmco5)
1159         prev_frame_num_offset = 0;
1160     else
1161         prev_frame_num_offset = priv->frame_num_offset;
1162
1163     // (8-6)
1164     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1165         priv->frame_num_offset = 0;
1166     else if (priv->prev_frame_num > priv->frame_num)
1167         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1168     else
1169         priv->frame_num_offset = prev_frame_num_offset;
1170
1171     // (8-7)
1172     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1173         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1174     else
1175         abs_frame_num = 0;
1176     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1177         abs_frame_num = abs_frame_num - 1;
1178
1179     if (abs_frame_num > 0) {
1180         gint32 expected_delta_per_poc_cycle;
1181         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1182
1183         expected_delta_per_poc_cycle = 0;
1184         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1185             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1186
1187         // (8-8)
1188         poc_cycle_cnt = (abs_frame_num - 1) /
1189             sps->num_ref_frames_in_pic_order_cnt_cycle;
1190         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1191             sps->num_ref_frames_in_pic_order_cnt_cycle;
1192
1193         // (8-9)
1194         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1195         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1196             expected_poc += sps->offset_for_ref_frame[i];
1197     }
1198     else
1199         expected_poc = 0;
1200     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1201         expected_poc += sps->offset_for_non_ref_pic;
1202
1203     // (8-10)
1204     switch (picture->structure) {
1205     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1206         priv->field_poc[TOP_FIELD] = expected_poc +
1207             slice_hdr->delta_pic_order_cnt[0];
1208         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1209             sps->offset_for_top_to_bottom_field +
1210             slice_hdr->delta_pic_order_cnt[1];
1211         break;
1212     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1213         priv->field_poc[TOP_FIELD] = expected_poc +
1214             slice_hdr->delta_pic_order_cnt[0];
1215         break;
1216     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1217         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1218             sps->offset_for_top_to_bottom_field +
1219             slice_hdr->delta_pic_order_cnt[0];
1220         break;
1221     }
1222 }
1223
1224 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1225 static void
1226 init_picture_poc_2(
1227     GstVaapiDecoderH264 *decoder,
1228     GstVaapiPictureH264 *picture,
1229     GstH264SliceHdr     *slice_hdr
1230 )
1231 {
1232     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1233     GstH264PPS * const pps = slice_hdr->pps;
1234     GstH264SPS * const sps = pps->sequence;
1235     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1236     gint32 prev_frame_num_offset, temp_poc;
1237
1238     GST_DEBUG("decode picture order count type 2");
1239
1240     if (priv->prev_pic_has_mmco5)
1241         prev_frame_num_offset = 0;
1242     else
1243         prev_frame_num_offset = priv->frame_num_offset;
1244
1245     // (8-11)
1246     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1247         priv->frame_num_offset = 0;
1248     else if (priv->prev_frame_num > priv->frame_num)
1249         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1250     else
1251         priv->frame_num_offset = prev_frame_num_offset;
1252
1253     // (8-12)
1254     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1255         temp_poc = 0;
1256     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1257         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1258     else
1259         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1260
1261     // (8-13)
1262     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1263         priv->field_poc[TOP_FIELD] = temp_poc;
1264     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1265         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1266 }
1267
1268 /* 8.2.1 - Decoding process for picture order count */
1269 static void
1270 init_picture_poc(
1271     GstVaapiDecoderH264 *decoder,
1272     GstVaapiPictureH264 *picture,
1273     GstH264SliceHdr     *slice_hdr
1274 )
1275 {
1276     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1277     GstH264PPS * const pps = slice_hdr->pps;
1278     GstH264SPS * const sps = pps->sequence;
1279
1280     switch (sps->pic_order_cnt_type) {
1281     case 0:
1282         init_picture_poc_0(decoder, picture, slice_hdr);
1283         break;
1284     case 1:
1285         init_picture_poc_1(decoder, picture, slice_hdr);
1286         break;
1287     case 2:
1288         init_picture_poc_2(decoder, picture, slice_hdr);
1289         break;
1290     }
1291
1292     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1293         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1294     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1295         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1296     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1297 }
1298
1299 static int
1300 compare_picture_pic_num_dec(const void *a, const void *b)
1301 {
1302     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1303     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1304
1305     return picB->pic_num - picA->pic_num;
1306 }
1307
1308 static int
1309 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1310 {
1311     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1312     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1313
1314     return picA->long_term_pic_num - picB->long_term_pic_num;
1315 }
1316
1317 static int
1318 compare_picture_poc_dec(const void *a, const void *b)
1319 {
1320     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1321     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1322
1323     return picB->base.poc - picA->base.poc;
1324 }
1325
1326 static int
1327 compare_picture_poc_inc(const void *a, const void *b)
1328 {
1329     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1330     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1331
1332     return picA->base.poc - picB->base.poc;
1333 }
1334
1335 static int
1336 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1337 {
1338     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1339     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1340
1341     return picB->frame_num_wrap - picA->frame_num_wrap;
1342 }
1343
1344 static int
1345 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1346 {
1347     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1348     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1349
1350     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1351 }
1352
1353 /* 8.2.4.1 - Decoding process for picture numbers */
1354 static void
1355 init_picture_refs_pic_num(
1356     GstVaapiDecoderH264 *decoder,
1357     GstVaapiPictureH264 *picture,
1358     GstH264SliceHdr     *slice_hdr
1359 )
1360 {
1361     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1362     GstH264PPS * const pps = slice_hdr->pps;
1363     GstH264SPS * const sps = pps->sequence;
1364     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1365     guint i;
1366
1367     GST_DEBUG("decode picture numbers");
1368
1369     for (i = 0; i < priv->short_ref_count; i++) {
1370         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1371
1372         // (8-27)
1373         if (pic->frame_num > priv->frame_num)
1374             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1375         else
1376             pic->frame_num_wrap = pic->frame_num;
1377
1378         // (8-28, 8-30, 8-31)
1379         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1380             pic->pic_num = pic->frame_num_wrap;
1381         else {
1382             if (pic->structure == picture->structure)
1383                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1384             else
1385                 pic->pic_num = 2 * pic->frame_num_wrap;
1386         }
1387     }
1388
1389     for (i = 0; i < priv->long_ref_count; i++) {
1390         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1391
1392         // (8-29, 8-32, 8-33)
1393         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1394             pic->long_term_pic_num = pic->long_term_frame_idx;
1395         else {
1396             if (pic->structure == picture->structure)
1397                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
1398             else
1399                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
1400         }
1401     }
1402 }
1403
1404 #define SORT_REF_LIST(list, n, compare_func) \
1405     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
1406
1407 static void
1408 init_picture_refs_fields_1(
1409     guint                picture_structure,
1410     GstVaapiPictureH264 *RefPicList[32],
1411     guint               *RefPicList_count,
1412     GstVaapiPictureH264 *ref_list[32],
1413     guint                ref_list_count
1414 )
1415 {
1416     guint i, j, n;
1417
1418     i = 0;
1419     j = 0;
1420     n = *RefPicList_count;
1421     do {
1422         g_assert(n < 32);
1423         for (; i < ref_list_count; i++) {
1424             if (ref_list[i]->structure == picture_structure) {
1425                 RefPicList[n++] = ref_list[i++];
1426                 break;
1427             }
1428         }
1429         for (; j < ref_list_count; j++) {
1430             if (ref_list[j]->structure != picture_structure) {
1431                 RefPicList[n++] = ref_list[j++];
1432                 break;
1433             }
1434         }
1435     } while (i < ref_list_count || j < ref_list_count);
1436     *RefPicList_count = n;
1437 }
1438
1439 static inline void
1440 init_picture_refs_fields(
1441     GstVaapiPictureH264 *picture,
1442     GstVaapiPictureH264 *RefPicList[32],
1443     guint               *RefPicList_count,
1444     GstVaapiPictureH264 *short_ref[32],
1445     guint                short_ref_count,
1446     GstVaapiPictureH264 *long_ref[32],
1447     guint                long_ref_count
1448 )
1449 {
1450     guint n = 0;
1451
1452     /* 8.2.4.2.5 - reference picture lists in fields */
1453     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
1454         short_ref, short_ref_count);
1455     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
1456         long_ref, long_ref_count);
1457     *RefPicList_count = n;
1458 }
1459
1460 static void
1461 init_picture_refs_p_slice(
1462     GstVaapiDecoderH264 *decoder,
1463     GstVaapiPictureH264 *picture,
1464     GstH264SliceHdr     *slice_hdr
1465 )
1466 {
1467     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1468     GstVaapiPictureH264 **ref_list;
1469     guint i;
1470
1471     GST_DEBUG("decode reference picture list for P and SP slices");
1472
1473     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1474         /* 8.2.4.2.1 - P and SP slices in frames */
1475         if (priv->short_ref_count > 0) {
1476             ref_list = priv->RefPicList0;
1477             for (i = 0; i < priv->short_ref_count; i++)
1478                 ref_list[i] = priv->short_ref[i];
1479             SORT_REF_LIST(ref_list, i, pic_num_dec);
1480             priv->RefPicList0_count += i;
1481         }
1482
1483         if (priv->long_ref_count > 0) {
1484             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1485             for (i = 0; i < priv->long_ref_count; i++)
1486                 ref_list[i] = priv->long_ref[i];
1487             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
1488             priv->RefPicList0_count += i;
1489         }
1490     }
1491     else {
1492         /* 8.2.4.2.2 - P and SP slices in fields */
1493         GstVaapiPictureH264 *short_ref[32];
1494         guint short_ref_count = 0;
1495         GstVaapiPictureH264 *long_ref[32];
1496         guint long_ref_count = 0;
1497
1498         if (priv->short_ref_count > 0) {
1499             for (i = 0; i < priv->short_ref_count; i++)
1500                 short_ref[i] = priv->short_ref[i];
1501             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
1502             short_ref_count = i;
1503         }
1504
1505         if (priv->long_ref_count > 0) {
1506             for (i = 0; i < priv->long_ref_count; i++)
1507                 long_ref[i] = priv->long_ref[i];
1508             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
1509             long_ref_count = i;
1510         }
1511
1512         init_picture_refs_fields(
1513             picture,
1514             priv->RefPicList0, &priv->RefPicList0_count,
1515             short_ref,          short_ref_count,
1516             long_ref,           long_ref_count
1517         );
1518     }
1519 }
1520
1521 static void
1522 init_picture_refs_b_slice(
1523     GstVaapiDecoderH264 *decoder,
1524     GstVaapiPictureH264 *picture,
1525     GstH264SliceHdr     *slice_hdr
1526 )
1527 {
1528     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1529     GstVaapiPictureH264 **ref_list;
1530     guint i, n;
1531
1532     GST_DEBUG("decode reference picture list for B slices");
1533
1534     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1535         /* 8.2.4.2.3 - B slices in frames */
1536
1537         /* RefPicList0 */
1538         if (priv->short_ref_count > 0) {
1539             // 1. Short-term references
1540             ref_list = priv->RefPicList0;
1541             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1542                 if (priv->short_ref[i]->base.poc < picture->base.poc)
1543                     ref_list[n++] = priv->short_ref[i];
1544             }
1545             SORT_REF_LIST(ref_list, n, poc_dec);
1546             priv->RefPicList0_count += n;
1547
1548             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1549             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1550                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
1551                     ref_list[n++] = priv->short_ref[i];
1552             }
1553             SORT_REF_LIST(ref_list, n, poc_inc);
1554             priv->RefPicList0_count += n;
1555         }
1556
1557         if (priv->long_ref_count > 0) {
1558             // 2. Long-term references
1559             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
1560             for (n = 0, i = 0; i < priv->long_ref_count; i++)
1561                 ref_list[n++] = priv->long_ref[i];
1562             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
1563             priv->RefPicList0_count += n;
1564         }
1565
1566         /* RefPicList1 */
1567         if (priv->short_ref_count > 0) {
1568             // 1. Short-term references
1569             ref_list = priv->RefPicList1;
1570             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1571                 if (priv->short_ref[i]->base.poc > picture->base.poc)
1572                     ref_list[n++] = priv->short_ref[i];
1573             }
1574             SORT_REF_LIST(ref_list, n, poc_inc);
1575             priv->RefPicList1_count += n;
1576
1577             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
1578             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1579                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1580                     ref_list[n++] = priv->short_ref[i];
1581             }
1582             SORT_REF_LIST(ref_list, n, poc_dec);
1583             priv->RefPicList1_count += n;
1584         }
1585
1586         if (priv->long_ref_count > 0) {
1587             // 2. Long-term references
1588             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
1589             for (n = 0, i = 0; i < priv->long_ref_count; i++)
1590                 ref_list[n++] = priv->long_ref[i];
1591             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
1592             priv->RefPicList1_count += n;
1593         }
1594     }
1595     else {
1596         /* 8.2.4.2.4 - B slices in fields */
1597         GstVaapiPictureH264 *short_ref0[32];
1598         guint short_ref0_count = 0;
1599         GstVaapiPictureH264 *short_ref1[32];
1600         guint short_ref1_count = 0;
1601         GstVaapiPictureH264 *long_ref[32];
1602         guint long_ref_count = 0;
1603
1604         /* refFrameList0ShortTerm */
1605         if (priv->short_ref_count > 0) {
1606             ref_list = short_ref0;
1607             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1608                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1609                     ref_list[n++] = priv->short_ref[i];
1610             }
1611             SORT_REF_LIST(ref_list, n, poc_dec);
1612             short_ref0_count += n;
1613
1614             ref_list = &short_ref0[short_ref0_count];
1615             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1616                 if (priv->short_ref[i]->base.poc > picture->base.poc)
1617                     ref_list[n++] = priv->short_ref[i];
1618             }
1619             SORT_REF_LIST(ref_list, n, poc_inc);
1620             short_ref0_count += n;
1621         }
1622
1623         /* refFrameList1ShortTerm */
1624         if (priv->short_ref_count > 0) {
1625             ref_list = short_ref1;
1626             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1627                 if (priv->short_ref[i]->base.poc > picture->base.poc)
1628                     ref_list[n++] = priv->short_ref[i];
1629             }
1630             SORT_REF_LIST(ref_list, n, poc_inc);
1631             short_ref1_count += n;
1632
1633             ref_list = &short_ref1[short_ref1_count];
1634             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
1635                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
1636                     ref_list[n++] = priv->short_ref[i];
1637             }
1638             SORT_REF_LIST(ref_list, n, poc_dec);
1639             short_ref1_count += n;
1640         }
1641
1642         /* refFrameListLongTerm */
1643         if (priv->long_ref_count > 0) {
1644             for (i = 0; i < priv->long_ref_count; i++)
1645                 long_ref[i] = priv->long_ref[i];
1646             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
1647             long_ref_count = i;
1648         }
1649
1650         init_picture_refs_fields(
1651             picture,
1652             priv->RefPicList0, &priv->RefPicList0_count,
1653             short_ref0,         short_ref0_count,
1654             long_ref,           long_ref_count
1655         );
1656
1657         init_picture_refs_fields(
1658             picture,
1659             priv->RefPicList1, &priv->RefPicList1_count,
1660             short_ref1,         short_ref1_count,
1661             long_ref,           long_ref_count
1662         );
1663    }
1664
1665     /* Check whether RefPicList1 is identical to RefPicList0, then
1666        swap if necessary */
1667     if (priv->RefPicList1_count > 1 &&
1668         priv->RefPicList1_count == priv->RefPicList0_count &&
1669         memcmp(priv->RefPicList0, priv->RefPicList1,
1670                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
1671         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
1672         priv->RefPicList1[0] = priv->RefPicList1[1];
1673         priv->RefPicList1[1] = tmp;
1674     }
1675 }
1676
1677 #undef SORT_REF_LIST
1678
1679 static gint
1680 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
1681 {
1682     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1683     guint i;
1684
1685     for (i = 0; i < priv->short_ref_count; i++) {
1686         if (priv->short_ref[i]->pic_num == pic_num)
1687             return i;
1688     }
1689     GST_ERROR("found no short-term reference picture with PicNum = %d",
1690               pic_num);
1691     return -1;
1692 }
1693
1694 static gint
1695 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
1696 {
1697     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1698     guint i;
1699
1700     for (i = 0; i < priv->long_ref_count; i++) {
1701         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
1702             return i;
1703     }
1704     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
1705               long_term_pic_num);
1706     return -1;
1707 }
1708
1709 static void
1710 exec_picture_refs_modification_1(
1711     GstVaapiDecoderH264           *decoder,
1712     GstVaapiPictureH264           *picture,
1713     GstH264SliceHdr               *slice_hdr,
1714     guint                          list
1715 )
1716 {
1717     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1718     GstH264PPS * const pps = slice_hdr->pps;
1719     GstH264SPS * const sps = pps->sequence;
1720     GstH264RefPicListModification *ref_pic_list_modification;
1721     guint num_ref_pic_list_modifications;
1722     GstVaapiPictureH264 **ref_list;
1723     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
1724     guint i, j, n, num_refs;
1725     gint found_ref_idx;
1726     gint32 MaxPicNum, CurrPicNum, picNumPred;
1727
1728     GST_DEBUG("modification process of reference picture list %u", list);
1729
1730     if (list == 0) {
1731         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
1732         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
1733         ref_list                       = priv->RefPicList0;
1734         ref_list_count_ptr             = &priv->RefPicList0_count;
1735         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
1736     }
1737     else {
1738         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
1739         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
1740         ref_list                       = priv->RefPicList1;
1741         ref_list_count_ptr             = &priv->RefPicList1_count;
1742         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
1743     }
1744     ref_list_count = *ref_list_count_ptr;
1745
1746     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
1747         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
1748         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
1749     }
1750     else {
1751         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
1752         CurrPicNum = slice_hdr->frame_num;                      // frame_num
1753     }
1754
1755     picNumPred = CurrPicNum;
1756
1757     for (i = 0; i < num_ref_pic_list_modifications; i++) {
1758         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
1759         if (l->modification_of_pic_nums_idc == 3)
1760             break;
1761
1762         /* 8.2.4.3.1 - Short-term reference pictures */
1763         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
1764             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
1765             gint32 picNum, picNumNoWrap;
1766
1767             // (8-34)
1768             if (l->modification_of_pic_nums_idc == 0) {
1769                 picNumNoWrap = picNumPred - abs_diff_pic_num;
1770                 if (picNumNoWrap < 0)
1771                     picNumNoWrap += MaxPicNum;
1772             }
1773
1774             // (8-35)
1775             else {
1776                 picNumNoWrap = picNumPred + abs_diff_pic_num;
1777                 if (picNumNoWrap >= MaxPicNum)
1778                     picNumNoWrap -= MaxPicNum;
1779             }
1780             picNumPred = picNumNoWrap;
1781
1782             // (8-36)
1783             picNum = picNumNoWrap;
1784             if (picNum > CurrPicNum)
1785                 picNum -= MaxPicNum;
1786
1787             // (8-37)
1788             for (j = num_refs; j > ref_list_idx; j--)
1789                 ref_list[j] = ref_list[j - 1];
1790             found_ref_idx = find_short_term_reference(decoder, picNum);
1791             ref_list[ref_list_idx++] =
1792                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
1793             n = ref_list_idx;
1794             for (j = ref_list_idx; j <= num_refs; j++) {
1795                 gint32 PicNumF;
1796                 if (!ref_list[j])
1797                     continue;
1798                 PicNumF =
1799                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
1800                     ref_list[j]->pic_num : MaxPicNum;
1801                 if (PicNumF != picNum)
1802                     ref_list[n++] = ref_list[j];
1803             }
1804         }
1805
1806         /* 8.2.4.3.2 - Long-term reference pictures */
1807         else {
1808
1809             for (j = num_refs; j > ref_list_idx; j--)
1810                 ref_list[j] = ref_list[j - 1];
1811             found_ref_idx =
1812                 find_long_term_reference(decoder, l->value.long_term_pic_num);
1813             ref_list[ref_list_idx++] =
1814                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
1815             n = ref_list_idx;
1816             for (j = ref_list_idx; j <= num_refs; j++) {
1817                 gint32 LongTermPicNumF;
1818                 if (!ref_list[j])
1819                     continue;
1820                 LongTermPicNumF =
1821                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
1822                     ref_list[j]->long_term_pic_num : INT_MAX;
1823                 if (LongTermPicNumF != l->value.long_term_pic_num)
1824                     ref_list[n++] = ref_list[j];
1825             }
1826         }
1827     }
1828
1829 #if DEBUG
1830     for (i = 0; i < num_refs; i++)
1831         if (!ref_list[i])
1832             GST_ERROR("list %u entry %u is empty", list, i);
1833 #endif
1834     *ref_list_count_ptr = num_refs;
1835 }
1836
1837 /* 8.2.4.3 - Modification process for reference picture lists */
1838 static void
1839 exec_picture_refs_modification(
1840     GstVaapiDecoderH264 *decoder,
1841     GstVaapiPictureH264 *picture,
1842     GstH264SliceHdr     *slice_hdr
1843 )
1844 {
1845     GST_DEBUG("execute ref_pic_list_modification()");
1846
1847     /* RefPicList0 */
1848     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
1849         slice_hdr->ref_pic_list_modification_flag_l0)
1850         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
1851
1852     /* RefPicList1 */
1853     if (GST_H264_IS_B_SLICE(slice_hdr) &&
1854         slice_hdr->ref_pic_list_modification_flag_l1)
1855         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
1856 }
1857
1858 static void
1859 init_picture_ref_lists(GstVaapiDecoderH264 *decoder)
1860 {
1861     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1862     guint i, j, short_ref_count, long_ref_count;
1863
1864     short_ref_count = 0;
1865     long_ref_count  = 0;
1866     if (GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture)) {
1867         for (i = 0; i < priv->dpb_count; i++) {
1868             GstVaapiFrameStore * const fs = priv->dpb[i];
1869             GstVaapiPictureH264 *picture;
1870             if (!gst_vaapi_frame_store_has_frame(fs))
1871                 continue;
1872             picture = fs->buffers[0];
1873             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1874                 priv->short_ref[short_ref_count++] = picture;
1875             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1876                 priv->long_ref[long_ref_count++] = picture;
1877             picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1878             picture->other_field = fs->buffers[1];
1879         }
1880     }
1881     else {
1882         for (i = 0; i < priv->dpb_count; i++) {
1883             GstVaapiFrameStore * const fs = priv->dpb[i];
1884             for (j = 0; j < fs->num_buffers; j++) {
1885                 GstVaapiPictureH264 * const picture = fs->buffers[j];
1886                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
1887                     priv->short_ref[short_ref_count++] = picture;
1888                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
1889                     priv->long_ref[long_ref_count++] = picture;
1890                 picture->structure = picture->base.structure;
1891                 picture->other_field = fs->buffers[j ^ 1];
1892             }
1893         }
1894     }
1895
1896     for (i = short_ref_count; i < priv->short_ref_count; i++)
1897         priv->short_ref[i] = NULL;
1898     priv->short_ref_count = short_ref_count;
1899
1900     for (i = long_ref_count; i < priv->long_ref_count; i++)
1901         priv->long_ref[i] = NULL;
1902     priv->long_ref_count = long_ref_count;
1903 }
1904
1905 static void
1906 init_picture_refs(
1907     GstVaapiDecoderH264 *decoder,
1908     GstVaapiPictureH264 *picture,
1909     GstH264SliceHdr     *slice_hdr
1910 )
1911 {
1912     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1913     GstVaapiPicture * const base_picture = &picture->base;
1914     guint i, num_refs;
1915
1916     init_picture_ref_lists(decoder);
1917     init_picture_refs_pic_num(decoder, picture, slice_hdr);
1918
1919     priv->RefPicList0_count = 0;
1920     priv->RefPicList1_count = 0;
1921
1922     switch (base_picture->type) {
1923     case GST_VAAPI_PICTURE_TYPE_P:
1924     case GST_VAAPI_PICTURE_TYPE_SP:
1925         init_picture_refs_p_slice(decoder, picture, slice_hdr);
1926         break;
1927     case GST_VAAPI_PICTURE_TYPE_B:
1928         init_picture_refs_b_slice(decoder, picture, slice_hdr);
1929         break;
1930     default:
1931         break;
1932     }
1933
1934     exec_picture_refs_modification(decoder, picture, slice_hdr);
1935
1936     switch (base_picture->type) {
1937     case GST_VAAPI_PICTURE_TYPE_B:
1938         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
1939         for (i = priv->RefPicList1_count; i < num_refs; i++)
1940             priv->RefPicList1[i] = NULL;
1941         priv->RefPicList1_count = num_refs;
1942
1943         // fall-through
1944     case GST_VAAPI_PICTURE_TYPE_P:
1945     case GST_VAAPI_PICTURE_TYPE_SP:
1946         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
1947         for (i = priv->RefPicList0_count; i < num_refs; i++)
1948             priv->RefPicList0[i] = NULL;
1949         priv->RefPicList0_count = num_refs;
1950         break;
1951     default:
1952         break;
1953     }
1954 }
1955
1956 static gboolean
1957 init_picture(
1958     GstVaapiDecoderH264 *decoder,
1959     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
1960 {
1961     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1962     GstVaapiPicture * const base_picture = &picture->base;
1963     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1964
1965     priv->prev_frame_num        = priv->frame_num;
1966     priv->frame_num             = slice_hdr->frame_num;
1967     picture->frame_num          = priv->frame_num;
1968     picture->frame_num_wrap     = priv->frame_num;
1969     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
1970     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
1971
1972     /* Reset decoder state for IDR pictures */
1973     if (pi->nalu.type == GST_H264_NAL_SLICE_IDR) {
1974         GST_DEBUG("<IDR>");
1975         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
1976         dpb_flush(decoder);
1977     }
1978
1979     /* Initialize slice type */
1980     switch (slice_hdr->type % 5) {
1981     case GST_H264_P_SLICE:
1982         base_picture->type = GST_VAAPI_PICTURE_TYPE_P;
1983         break;
1984     case GST_H264_B_SLICE:
1985         base_picture->type = GST_VAAPI_PICTURE_TYPE_B;
1986         break;
1987     case GST_H264_I_SLICE:
1988         base_picture->type = GST_VAAPI_PICTURE_TYPE_I;
1989         break;
1990     case GST_H264_SP_SLICE:
1991         base_picture->type = GST_VAAPI_PICTURE_TYPE_SP;
1992         break;
1993     case GST_H264_SI_SLICE:
1994         base_picture->type = GST_VAAPI_PICTURE_TYPE_SI;
1995         break;
1996     }
1997
1998     /* Initialize picture structure */
1999     if (!slice_hdr->field_pic_flag)
2000         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2001     else {
2002         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2003         if (!slice_hdr->bottom_field_flag)
2004             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2005         else
2006             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2007     }
2008     picture->structure = base_picture->structure;
2009
2010     /* Initialize reference flags */
2011     if (pi->nalu.ref_idc) {
2012         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2013             &slice_hdr->dec_ref_pic_marking;
2014
2015         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2016             dec_ref_pic_marking->long_term_reference_flag)
2017             GST_VAAPI_PICTURE_FLAG_SET(picture,
2018                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2019         else
2020             GST_VAAPI_PICTURE_FLAG_SET(picture,
2021                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2022     }
2023
2024     init_picture_poc(decoder, picture, slice_hdr);
2025     init_picture_refs(decoder, picture, slice_hdr);
2026     return TRUE;
2027 }
2028
2029 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2030 static gboolean
2031 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2032 {
2033     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2034     GstH264PPS * const pps = priv->current_picture->pps;
2035     GstH264SPS * const sps = pps->sequence;
2036     GstVaapiPictureH264 *ref_picture;
2037     guint i, m, max_num_ref_frames;
2038
2039     GST_DEBUG("reference picture marking process (sliding window)");
2040
2041     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2042         return TRUE;
2043
2044     max_num_ref_frames = sps->num_ref_frames;
2045     if (max_num_ref_frames == 0)
2046         max_num_ref_frames = 1;
2047     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2048         max_num_ref_frames <<= 1;
2049
2050     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2051         return TRUE;
2052     if (priv->short_ref_count < 1)
2053         return FALSE;
2054
2055     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2056         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2057         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2058             m = i;
2059     }
2060
2061     ref_picture = priv->short_ref[m];
2062     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2063     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2064
2065     /* Both fields need to be marked as "unused for reference", so
2066        remove the other field from the short_ref[] list as well */
2067     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2068         for (i = 0; i < priv->short_ref_count; i++) {
2069             if (priv->short_ref[i] == ref_picture->other_field) {
2070                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2071                 break;
2072             }
2073         }
2074     }
2075     return TRUE;
2076 }
2077
2078 static inline gint32
2079 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2080 {
2081     gint32 pic_num;
2082
2083     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2084         pic_num = picture->frame_num_wrap;
2085     else
2086         pic_num = 2 * picture->frame_num_wrap + 1;
2087     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2088     return pic_num;
2089 }
2090
2091 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2092 static void
2093 exec_ref_pic_marking_adaptive_mmco_1(
2094     GstVaapiDecoderH264  *decoder,
2095     GstVaapiPictureH264  *picture,
2096     GstH264RefPicMarking *ref_pic_marking
2097 )
2098 {
2099     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2100     gint32 i, picNumX;
2101
2102     picNumX = get_picNumX(picture, ref_pic_marking);
2103     i = find_short_term_reference(decoder, picNumX);
2104     if (i < 0)
2105         return;
2106
2107     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2108         GST_VAAPI_PICTURE_IS_FRAME(picture));
2109     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2110 }
2111
2112 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2113 static void
2114 exec_ref_pic_marking_adaptive_mmco_2(
2115     GstVaapiDecoderH264  *decoder,
2116     GstVaapiPictureH264  *picture,
2117     GstH264RefPicMarking *ref_pic_marking
2118 )
2119 {
2120     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2121     gint32 i;
2122
2123     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2124     if (i < 0)
2125         return;
2126
2127     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2128         GST_VAAPI_PICTURE_IS_FRAME(picture));
2129     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2130 }
2131
2132 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2133 static void
2134 exec_ref_pic_marking_adaptive_mmco_3(
2135     GstVaapiDecoderH264  *decoder,
2136     GstVaapiPictureH264  *picture,
2137     GstH264RefPicMarking *ref_pic_marking
2138 )
2139 {
2140     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2141     GstVaapiPictureH264 *ref_picture;
2142     gint32 i, picNumX;
2143
2144     for (i = 0; i < priv->long_ref_count; i++) {
2145         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2146             break;
2147     }
2148     if (i != priv->long_ref_count) {
2149         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2150         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2151     }
2152
2153     picNumX = get_picNumX(picture, ref_pic_marking);
2154     i = find_short_term_reference(decoder, picNumX);
2155     if (i < 0)
2156         return;
2157
2158     ref_picture = priv->short_ref[i];
2159     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2160     priv->long_ref[priv->long_ref_count++] = ref_picture;
2161
2162     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2163     gst_vaapi_picture_h264_set_reference(ref_picture,
2164         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
2165         GST_VAAPI_PICTURE_IS_FRAME(picture));
2166 }
2167
2168 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
2169  * as "unused for reference" */
2170 static void
2171 exec_ref_pic_marking_adaptive_mmco_4(
2172     GstVaapiDecoderH264  *decoder,
2173     GstVaapiPictureH264  *picture,
2174     GstH264RefPicMarking *ref_pic_marking
2175 )
2176 {
2177     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2178     gint32 i, long_term_frame_idx;
2179
2180     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
2181
2182     for (i = 0; i < priv->long_ref_count; i++) {
2183         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
2184             continue;
2185         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
2186         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2187         i--;
2188     }
2189 }
2190
2191 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
2192 static void
2193 exec_ref_pic_marking_adaptive_mmco_5(
2194     GstVaapiDecoderH264  *decoder,
2195     GstVaapiPictureH264  *picture,
2196     GstH264RefPicMarking *ref_pic_marking
2197 )
2198 {
2199     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2200
2201     dpb_flush(decoder);
2202
2203     priv->prev_pic_has_mmco5 = TRUE;
2204
2205     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
2206     priv->frame_num = 0;
2207     priv->frame_num_offset = 0;
2208     picture->frame_num = 0;
2209
2210     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
2211     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2212         picture->field_poc[TOP_FIELD] -= picture->base.poc;
2213     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2214         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
2215     picture->base.poc = 0;
2216 }
2217
2218 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
2219 static void
2220 exec_ref_pic_marking_adaptive_mmco_6(
2221     GstVaapiDecoderH264  *decoder,
2222     GstVaapiPictureH264  *picture,
2223     GstH264RefPicMarking *ref_pic_marking
2224 )
2225 {
2226     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2227     gst_vaapi_picture_h264_set_reference(picture,
2228         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, FALSE);
2229 }
2230
2231 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
2232 static gboolean
2233 exec_ref_pic_marking_adaptive(
2234     GstVaapiDecoderH264     *decoder,
2235     GstVaapiPictureH264     *picture,
2236     GstH264DecRefPicMarking *dec_ref_pic_marking
2237 )
2238 {
2239     guint i;
2240
2241     GST_DEBUG("reference picture marking process (adaptive memory control)");
2242
2243     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
2244         GstVaapiDecoderH264  *decoder,
2245         GstVaapiPictureH264  *picture,
2246         GstH264RefPicMarking *ref_pic_marking
2247     );
2248
2249     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
2250         NULL,
2251         exec_ref_pic_marking_adaptive_mmco_1,
2252         exec_ref_pic_marking_adaptive_mmco_2,
2253         exec_ref_pic_marking_adaptive_mmco_3,
2254         exec_ref_pic_marking_adaptive_mmco_4,
2255         exec_ref_pic_marking_adaptive_mmco_5,
2256         exec_ref_pic_marking_adaptive_mmco_6,
2257     };
2258
2259     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
2260         GstH264RefPicMarking * const ref_pic_marking =
2261             &dec_ref_pic_marking->ref_pic_marking[i];
2262
2263         const guint mmco = ref_pic_marking->memory_management_control_operation;
2264         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
2265             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
2266         else {
2267             GST_ERROR("unhandled MMCO %u", mmco);
2268             return FALSE;
2269         }
2270     }
2271     return TRUE;
2272 }
2273
2274 /* 8.2.5 - Execute reference picture marking process */
2275 static gboolean
2276 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
2277 {
2278     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2279
2280     priv->prev_pic_has_mmco5 = FALSE;
2281     priv->prev_pic_structure = picture->structure;
2282
2283     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
2284         return TRUE;
2285
2286     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
2287         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2288             &picture->last_slice_hdr->dec_ref_pic_marking;
2289         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
2290             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
2291                 return FALSE;
2292         }
2293         else {
2294             if (!exec_ref_pic_marking_sliding_window(decoder))
2295                 return FALSE;
2296         }
2297     }
2298     return TRUE;
2299 }
2300
2301 static void
2302 vaapi_init_picture(VAPictureH264 *pic)
2303 {
2304     pic->picture_id           = VA_INVALID_ID;
2305     pic->frame_idx            = 0;
2306     pic->flags                = VA_PICTURE_H264_INVALID;
2307     pic->TopFieldOrderCnt     = 0;
2308     pic->BottomFieldOrderCnt  = 0;
2309 }
2310
2311 static void
2312 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
2313     guint picture_structure)
2314 {
2315     if (!picture_structure)
2316         picture_structure = picture->structure;
2317
2318     pic->picture_id = picture->base.surface_id;
2319     pic->flags = 0;
2320
2321     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
2322         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
2323         pic->frame_idx = picture->long_term_frame_idx;
2324     }
2325     else {
2326         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
2327             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
2328         pic->frame_idx = picture->frame_num;
2329     }
2330
2331     switch (picture_structure) {
2332     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
2333         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
2334         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
2335         break;
2336     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
2337         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
2338         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
2339         pic->BottomFieldOrderCnt = 0;
2340         break;
2341     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
2342         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
2343         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
2344         pic->TopFieldOrderCnt = 0;
2345         break;
2346     }
2347 }
2348
2349 static gboolean
2350 fill_picture(GstVaapiDecoderH264 *decoder,
2351     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2352 {
2353     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2354     GstVaapiPicture * const base_picture = &picture->base;
2355     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2356     GstH264PPS * const pps = picture->pps;
2357     GstH264SPS * const sps = pps->sequence;
2358     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
2359     guint i, n;
2360
2361     /* Fill in VAPictureParameterBufferH264 */
2362     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
2363
2364     for (i = 0, n = 0; i < priv->dpb_count; i++) {
2365         GstVaapiFrameStore * const fs = priv->dpb[i];
2366         if (gst_vaapi_frame_store_has_reference(fs))
2367             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
2368                 fs->buffers[0], fs->structure);
2369     }
2370     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
2371         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
2372
2373 #define COPY_FIELD(s, f) \
2374     pic_param->f = (s)->f
2375
2376 #define COPY_BFM(a, s, f) \
2377     pic_param->a.bits.f = (s)->f
2378
2379     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
2380     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
2381     pic_param->frame_num                    = priv->frame_num;
2382
2383     COPY_FIELD(sps, bit_depth_luma_minus8);
2384     COPY_FIELD(sps, bit_depth_chroma_minus8);
2385     COPY_FIELD(sps, num_ref_frames);
2386     COPY_FIELD(pps, num_slice_groups_minus1);
2387     COPY_FIELD(pps, slice_group_map_type);
2388     COPY_FIELD(pps, slice_group_change_rate_minus1);
2389     COPY_FIELD(pps, pic_init_qp_minus26);
2390     COPY_FIELD(pps, pic_init_qs_minus26);
2391     COPY_FIELD(pps, chroma_qp_index_offset);
2392     COPY_FIELD(pps, second_chroma_qp_index_offset);
2393
2394     pic_param->seq_fields.value                                         = 0; /* reset all bits */
2395     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
2396     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
2397
2398     COPY_BFM(seq_fields, sps, chroma_format_idc);
2399     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
2400     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
2401     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
2402     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
2403     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
2404     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
2405     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
2406     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
2407
2408     pic_param->pic_fields.value                                         = 0; /* reset all bits */
2409     pic_param->pic_fields.bits.field_pic_flag                           = slice_hdr->field_pic_flag;
2410     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
2411
2412     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
2413     COPY_BFM(pic_fields, pps, weighted_pred_flag);
2414     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
2415     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
2416     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
2417     COPY_BFM(pic_fields, pps, pic_order_present_flag);
2418     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
2419     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
2420     return TRUE;
2421 }
2422
2423 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
2424 static gboolean
2425 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
2426 {
2427     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2428     GstH264PPS * const pps = slice_hdr->pps;
2429     GstH264SPS * const sps = pps->sequence;
2430     GstH264SliceHdr *prev_slice_hdr;
2431
2432     if (!prev_pi)
2433         return TRUE;
2434     prev_slice_hdr = &prev_pi->data.slice_hdr;
2435
2436 #define CHECK_EXPR(expr, field_name) do {              \
2437         if (!(expr)) {                                 \
2438             GST_DEBUG(field_name " differs in value"); \
2439             return TRUE;                               \
2440         }                                              \
2441     } while (0)
2442
2443 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
2444     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
2445
2446     /* frame_num differs in value, regardless of inferred values to 0 */
2447     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
2448
2449     /* pic_parameter_set_id differs in value */
2450     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
2451
2452     /* field_pic_flag differs in value */
2453     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
2454
2455     /* bottom_field_flag is present in both and differs in value */
2456     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
2457         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
2458
2459     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
2460     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
2461                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
2462
2463     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
2464        value or delta_pic_order_cnt_bottom differs in value */
2465     if (sps->pic_order_cnt_type == 0) {
2466         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
2467         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
2468             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
2469     }
2470
2471     /* POC type is 1 for both and either delta_pic_order_cnt[0]
2472        differs in value or delta_pic_order_cnt[1] differs in value */
2473     else if (sps->pic_order_cnt_type == 1) {
2474         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
2475         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
2476     }
2477
2478     /* IdrPicFlag differs in value */
2479     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
2480
2481     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
2482     if (pi->nalu.idr_pic_flag)
2483         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
2484
2485 #undef CHECK_EXPR
2486 #undef CHECK_VALUE
2487     return FALSE;
2488 }
2489
2490 static GstVaapiDecoderStatus
2491 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2492 {
2493     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2494     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2495     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2496     GstH264PPS * const pps = slice_hdr->pps;
2497     GstH264SPS * const sps = pps->sequence;
2498     GstVaapiPictureH264 *picture;
2499     GstVaapiDecoderStatus status;
2500
2501     status = ensure_context(decoder, sps);
2502     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2503         return status;
2504
2505     if (priv->current_picture) {
2506         /* Re-use current picture where the first field was decoded */
2507         picture = gst_vaapi_picture_h264_new_field(priv->current_picture);
2508         if (!picture) {
2509             GST_ERROR("failed to allocate field picture");
2510             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2511         }
2512     }
2513     else {
2514         /* Create new picture */
2515         picture = gst_vaapi_picture_h264_new(decoder);
2516         if (!picture) {
2517             GST_ERROR("failed to allocate picture");
2518             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2519         }
2520     }
2521     gst_vaapi_picture_replace(&priv->current_picture, picture);
2522     gst_vaapi_picture_unref(picture);
2523
2524     /* Update cropping rectangle */
2525     if (sps->frame_cropping_flag) {
2526         GstVaapiRectangle crop_rect;
2527         crop_rect.x = sps->crop_rect_x;
2528         crop_rect.y = sps->crop_rect_y;
2529         crop_rect.width = sps->crop_rect_width;
2530         crop_rect.height = sps->crop_rect_height;
2531         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
2532     }
2533
2534     picture->pps = pps;
2535
2536     status = ensure_quant_matrix(decoder, picture);
2537     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
2538         GST_ERROR("failed to reset quantizer matrix");
2539         return status;
2540     }
2541
2542     if (!init_picture(decoder, picture, pi))
2543         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2544     if (!fill_picture(decoder, picture, pi))
2545         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2546     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2547 }
2548
2549 static inline guint
2550 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr)
2551 {
2552     guint epb_count;
2553
2554     epb_count = slice_hdr->n_emulation_prevention_bytes;
2555     return 8 /* nal_unit_type */ + slice_hdr->header_size - epb_count * 8;
2556 }
2557
2558 static gboolean
2559 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
2560     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2561 {
2562     VASliceParameterBufferH264 * const slice_param = slice->param;
2563     GstH264PPS * const pps = slice_hdr->pps;
2564     GstH264SPS * const sps = pps->sequence;
2565     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
2566     guint num_weight_tables = 0;
2567     gint i, j;
2568
2569     if (pps->weighted_pred_flag &&
2570         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
2571         num_weight_tables = 1;
2572     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
2573         num_weight_tables = 2;
2574     else
2575         num_weight_tables = 0;
2576
2577     slice_param->luma_log2_weight_denom   = 0;
2578     slice_param->chroma_log2_weight_denom = 0;
2579     slice_param->luma_weight_l0_flag      = 0;
2580     slice_param->chroma_weight_l0_flag    = 0;
2581     slice_param->luma_weight_l1_flag      = 0;
2582     slice_param->chroma_weight_l1_flag    = 0;
2583
2584     if (num_weight_tables < 1)
2585         return TRUE;
2586
2587     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
2588     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
2589
2590     slice_param->luma_weight_l0_flag = 1;
2591     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
2592         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
2593         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
2594     }
2595
2596     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
2597     if (slice_param->chroma_weight_l0_flag) {
2598         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
2599             for (j = 0; j < 2; j++) {
2600                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
2601                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
2602             }
2603         }
2604     }
2605
2606     if (num_weight_tables < 2)
2607         return TRUE;
2608
2609     slice_param->luma_weight_l1_flag = 1;
2610     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
2611         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
2612         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
2613     }
2614
2615     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
2616     if (slice_param->chroma_weight_l1_flag) {
2617         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
2618             for (j = 0; j < 2; j++) {
2619                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
2620                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
2621             }
2622         }
2623     }
2624     return TRUE;
2625 }
2626
2627 static gboolean
2628 fill_RefPicList(GstVaapiDecoderH264 *decoder,
2629     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2630 {
2631     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2632     VASliceParameterBufferH264 * const slice_param = slice->param;
2633     guint i, num_ref_lists = 0;
2634
2635     slice_param->num_ref_idx_l0_active_minus1 = 0;
2636     slice_param->num_ref_idx_l1_active_minus1 = 0;
2637
2638     if (GST_H264_IS_B_SLICE(slice_hdr))
2639         num_ref_lists = 2;
2640     else if (GST_H264_IS_I_SLICE(slice_hdr))
2641         num_ref_lists = 0;
2642     else
2643         num_ref_lists = 1;
2644
2645     if (num_ref_lists < 1)
2646         return TRUE;
2647
2648     slice_param->num_ref_idx_l0_active_minus1 =
2649         slice_hdr->num_ref_idx_l0_active_minus1;
2650
2651     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
2652         vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
2653     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
2654         vaapi_init_picture(&slice_param->RefPicList0[i]);
2655
2656     if (num_ref_lists < 2)
2657         return TRUE;
2658
2659     slice_param->num_ref_idx_l1_active_minus1 =
2660         slice_hdr->num_ref_idx_l1_active_minus1;
2661
2662     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
2663         vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
2664     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
2665         vaapi_init_picture(&slice_param->RefPicList1[i]);
2666     return TRUE;
2667 }
2668
2669 static gboolean
2670 fill_slice(GstVaapiDecoderH264 *decoder,
2671     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
2672 {
2673     VASliceParameterBufferH264 * const slice_param = slice->param;
2674
2675     /* Fill in VASliceParameterBufferH264 */
2676     slice_param->slice_data_bit_offset          = get_slice_data_bit_offset(slice_hdr);
2677     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
2678     slice_param->slice_type                     = slice_hdr->type % 5;
2679     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
2680     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
2681     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
2682     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
2683     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
2684     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
2685
2686     if (!fill_RefPicList(decoder, slice, slice_hdr))
2687         return FALSE;
2688     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
2689         return FALSE;
2690     return TRUE;
2691 }
2692
2693 static GstVaapiDecoderStatus
2694 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2695 {
2696     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2697     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2698     GstVaapiPictureH264 * const picture = priv->current_picture;
2699     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2700     GstVaapiSlice *slice;
2701     GstBuffer * const buffer =
2702         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
2703     GstMapInfo map_info;
2704
2705     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
2706
2707     if (!priv->got_sps || !priv->got_pps) {
2708         GST_ERROR("not initialized yet");
2709         return GST_VAAPI_DECODER_STATUS_SUCCESS;
2710     }
2711
2712     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
2713         GST_ERROR("failed to map buffer");
2714         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2715     }
2716
2717     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
2718         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
2719     gst_buffer_unmap(buffer, &map_info);
2720     if (!slice) {
2721         GST_ERROR("failed to allocate slice");
2722         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2723     }
2724
2725     if (!fill_slice(decoder, slice, slice_hdr)) {
2726         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
2727         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
2728     }
2729
2730     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
2731     picture->last_slice_hdr = slice_hdr;
2732     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2733 }
2734
2735 static inline gint
2736 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
2737 {
2738     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
2739                                                      0xffffff00, 0x00000100,
2740                                                      ofs, size,
2741                                                      scp);
2742 }
2743
2744 static GstVaapiDecoderStatus
2745 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
2746 {
2747     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
2748     GstVaapiDecoderStatus status;
2749
2750     switch (pi->nalu.type) {
2751     case GST_H264_NAL_SLICE_IDR:
2752         /* fall-through. IDR specifics are handled in init_picture() */
2753     case GST_H264_NAL_SLICE:
2754         status = decode_slice(decoder, unit);
2755         break;
2756     case GST_H264_NAL_SEQ_END:
2757     case GST_H264_NAL_STREAM_END:
2758         status = decode_sequence_end(decoder);
2759         break;
2760     case GST_H264_NAL_SEI:
2761         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
2762         break;
2763     default:
2764         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
2765         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
2766         break;
2767     }
2768     return status;
2769 }
2770
2771 static GstVaapiDecoderStatus
2772 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
2773     const guchar *buf, guint buf_size)
2774 {
2775     GstVaapiDecoderH264 * const decoder =
2776         GST_VAAPI_DECODER_H264_CAST(base_decoder);
2777     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2778     GstVaapiDecoderStatus status;
2779     GstVaapiDecoderUnit unit;
2780     GstVaapiParserInfoH264 pi;
2781     GstH264ParserResult result;
2782     guint i, ofs, num_sps, num_pps;
2783
2784     unit.parsed_info = &pi;
2785
2786     if (buf_size < 8)
2787         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2788
2789     if (buf[0] != 1) {
2790         GST_ERROR("failed to decode codec-data, not in avcC format");
2791         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
2792     }
2793
2794     priv->nal_length_size = (buf[4] & 0x03) + 1;
2795
2796     num_sps = buf[5] & 0x1f;
2797     ofs = 6;
2798
2799     for (i = 0; i < num_sps; i++) {
2800         result = gst_h264_parser_identify_nalu_avc(
2801             priv->parser,
2802             buf, ofs, buf_size, 2,
2803             &pi.nalu
2804         );
2805         if (result != GST_H264_PARSER_OK)
2806             return get_status(result);
2807
2808         status = parse_sps(decoder, &unit);
2809         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2810             return status;
2811         ofs = pi.nalu.offset + pi.nalu.size;
2812     }
2813
2814     num_pps = buf[ofs];
2815     ofs++;
2816
2817     for (i = 0; i < num_pps; i++) {
2818         result = gst_h264_parser_identify_nalu_avc(
2819             priv->parser,
2820             buf, ofs, buf_size, 2,
2821             &pi.nalu
2822         );
2823         if (result != GST_H264_PARSER_OK)
2824             return get_status(result);
2825
2826         status = parse_pps(decoder, &unit);
2827         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2828             return status;
2829         ofs = pi.nalu.offset + pi.nalu.size;
2830     }
2831
2832     priv->is_avcC = TRUE;
2833     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2834 }
2835
2836 static GstVaapiDecoderStatus
2837 ensure_decoder(GstVaapiDecoderH264 *decoder)
2838 {
2839     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2840     GstVaapiDecoderStatus status;
2841
2842     if (!priv->is_opened) {
2843         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
2844         if (!priv->is_opened)
2845             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
2846
2847         status = gst_vaapi_decoder_decode_codec_data(
2848             GST_VAAPI_DECODER_CAST(decoder));
2849         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2850             return status;
2851     }
2852     return GST_VAAPI_DECODER_STATUS_SUCCESS;
2853 }
2854
2855 static GstVaapiDecoderStatus
2856 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
2857     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
2858 {
2859     GstVaapiDecoderH264 * const decoder =
2860         GST_VAAPI_DECODER_H264_CAST(base_decoder);
2861     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2862     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
2863     GstVaapiParserInfoH264 *pi;
2864     GstVaapiDecoderStatus status;
2865     GstH264ParserResult result;
2866     guchar *buf;
2867     guint i, size, buf_size, nalu_size, flags;
2868     guint32 start_code;
2869     gint ofs, ofs2;
2870
2871     status = ensure_decoder(decoder);
2872     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2873         return status;
2874
2875     size = gst_adapter_available(adapter);
2876
2877     if (priv->is_avcC) {
2878         if (size < priv->nal_length_size)
2879             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2880
2881         buf = (guchar *)&start_code;
2882         g_assert(priv->nal_length_size <= sizeof(start_code));
2883         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
2884
2885         nalu_size = 0;
2886         for (i = 0; i < priv->nal_length_size; i++)
2887             nalu_size = (nalu_size << 8) | buf[i];
2888
2889         buf_size = priv->nal_length_size + nalu_size;
2890         if (size < buf_size)
2891             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2892     }
2893     else {
2894         if (size < 4)
2895             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2896
2897         ofs = scan_for_start_code(adapter, 0, size, NULL);
2898         if (ofs < 0)
2899             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2900
2901         if (ofs > 0) {
2902             gst_adapter_flush(adapter, ofs);
2903             size -= ofs;
2904         }
2905
2906         ofs2 = ps->input_offset2 - ofs - 4;
2907         if (ofs2 < 4)
2908             ofs2 = 4;
2909
2910         ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
2911             scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
2912         if (ofs < 0) {
2913             // Assume the whole NAL unit is present if end-of-stream
2914             if (!at_eos) {
2915                 ps->input_offset2 = size;
2916                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2917             }
2918             ofs = size;
2919         }
2920         buf_size = ofs;
2921     }
2922     ps->input_offset2 = 0;
2923
2924     buf = (guchar *)gst_adapter_map(adapter, buf_size);
2925     if (!buf)
2926         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
2927
2928     unit->size = buf_size;
2929
2930     pi = gst_vaapi_parser_info_h264_new();
2931     if (!pi)
2932         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
2933
2934     gst_vaapi_decoder_unit_set_parsed_info(unit,
2935         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
2936
2937     if (priv->is_avcC)
2938         result = gst_h264_parser_identify_nalu_avc(priv->parser,
2939             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
2940     else
2941         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
2942             buf, 0, buf_size, &pi->nalu);
2943     status = get_status(result);
2944     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2945         return status;
2946
2947     switch (pi->nalu.type) {
2948     case GST_H264_NAL_SPS:
2949         status = parse_sps(decoder, unit);
2950         break;
2951     case GST_H264_NAL_PPS:
2952         status = parse_pps(decoder, unit);
2953         break;
2954     case GST_H264_NAL_SEI:
2955         status = parse_sei(decoder, unit);
2956         break;
2957     case GST_H264_NAL_SLICE_IDR:
2958     case GST_H264_NAL_SLICE:
2959         status = parse_slice(decoder, unit);
2960         break;
2961     default:
2962         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
2963         break;
2964     }
2965     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
2966         return status;
2967
2968     flags = 0;
2969     switch (pi->nalu.type) {
2970     case GST_H264_NAL_AU_DELIMITER:
2971         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
2972         /* fall-through */
2973     case GST_H264_NAL_FILLER_DATA:
2974         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
2975         break;
2976     case GST_H264_NAL_STREAM_END:
2977         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
2978         /* fall-through */
2979     case GST_H264_NAL_SEQ_END:
2980         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
2981         break;
2982     case GST_H264_NAL_SPS:
2983     case GST_H264_NAL_PPS:
2984         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
2985         /* fall-through */
2986     case GST_H264_NAL_SEI:
2987         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
2988         break;
2989     case GST_H264_NAL_SLICE_IDR:
2990     case GST_H264_NAL_SLICE:
2991         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
2992         if (is_new_picture(pi, priv->prev_slice_pi))
2993             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
2994         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
2995         break;
2996     default:
2997         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
2998             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
2999         break;
3000     }
3001     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
3002
3003     pi->nalu.data = NULL;
3004     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3005 }
3006
3007 static GstVaapiDecoderStatus
3008 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
3009     GstVaapiDecoderUnit *unit)
3010 {
3011     GstVaapiDecoderH264 * const decoder =
3012         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3013     GstVaapiDecoderStatus status;
3014
3015     status = ensure_decoder(decoder);
3016     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3017         return status;
3018     return decode_unit(decoder, unit);
3019 }
3020
3021 static GstVaapiDecoderStatus
3022 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
3023     GstVaapiDecoderUnit *unit)
3024 {
3025     GstVaapiDecoderH264 * const decoder =
3026         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3027
3028     return decode_picture(decoder, unit);
3029 }
3030
3031 static GstVaapiDecoderStatus
3032 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
3033 {
3034     GstVaapiDecoderH264 * const decoder =
3035         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3036
3037     return decode_current_picture(decoder);
3038 }
3039
3040 static GstVaapiDecoderStatus
3041 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
3042 {
3043     GstVaapiDecoderH264 * const decoder =
3044         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3045
3046     dpb_flush(decoder);
3047     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3048 }
3049
3050 static void
3051 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
3052 {
3053     GstVaapiMiniObjectClass * const object_class =
3054         GST_VAAPI_MINI_OBJECT_CLASS(klass);
3055     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
3056
3057     object_class->size          = sizeof(GstVaapiDecoderH264);
3058     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
3059
3060     decoder_class->create       = gst_vaapi_decoder_h264_create;
3061     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
3062     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
3063     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
3064     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
3065     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
3066     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
3067
3068     decoder_class->decode_codec_data =
3069         gst_vaapi_decoder_h264_decode_codec_data;
3070 }
3071
3072 static inline const GstVaapiDecoderClass *
3073 gst_vaapi_decoder_h264_class(void)
3074 {
3075     static GstVaapiDecoderH264Class g_class;
3076     static gsize g_class_init = FALSE;
3077
3078     if (g_once_init_enter(&g_class_init)) {
3079         gst_vaapi_decoder_h264_class_init(&g_class);
3080         g_once_init_leave(&g_class_init, TRUE);
3081     }
3082     return GST_VAAPI_DECODER_CLASS(&g_class);
3083 }
3084
3085 /**
3086  * gst_vaapi_decoder_h264_new:
3087  * @display: a #GstVaapiDisplay
3088  * @caps: a #GstCaps holding codec information
3089  *
3090  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
3091  * hold extra information like codec-data and pictured coded size.
3092  *
3093  * Return value: the newly allocated #GstVaapiDecoder object
3094  */
3095 GstVaapiDecoder *
3096 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
3097 {
3098     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
3099 }