82b8d1520a3ce73b3797aa04c6fdd84516f1773b
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiPictureH264        *current_picture;
449     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
450     GstVaapiParserInfoH264     *active_sps;
451     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
452     GstVaapiParserInfoH264     *active_pps;
453     GstVaapiParserInfoH264     *prev_pi;
454     GstVaapiParserInfoH264     *prev_slice_pi;
455     GstVaapiFrameStore        **prev_frames;
456     guint                       prev_frames_alloc;
457     GstVaapiFrameStore        **dpb;
458     guint                       dpb_count;
459     guint                       dpb_size;
460     guint                       dpb_size_max;
461     guint                       max_views;
462     GstVaapiProfile             profile;
463     GstVaapiEntrypoint          entrypoint;
464     GstVaapiChromaType          chroma_type;
465     GPtrArray                  *inter_views;
466     GstVaapiPictureH264        *short_ref[32];
467     guint                       short_ref_count;
468     GstVaapiPictureH264        *long_ref[32];
469     guint                       long_ref_count;
470     GstVaapiPictureH264        *RefPicList0[32];
471     guint                       RefPicList0_count;
472     GstVaapiPictureH264        *RefPicList1[32];
473     guint                       RefPicList1_count;
474     guint                       nal_length_size;
475     guint                       mb_width;
476     guint                       mb_height;
477     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
478     gint32                      poc_msb;                // PicOrderCntMsb
479     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
480     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
481     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
482     gint32                      frame_num_offset;       // FrameNumOffset
483     gint32                      frame_num;              // frame_num (from slice_header())
484     gint32                      prev_frame_num;         // prevFrameNum
485     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
486     gboolean                    prev_pic_structure;     // previous picture structure
487     guint                       is_opened               : 1;
488     guint                       is_avcC                 : 1;
489     guint                       has_context             : 1;
490     guint                       progressive_sequence    : 1;
491 };
492
493 /**
494  * GstVaapiDecoderH264:
495  *
496  * A decoder based on H264.
497  */
498 struct _GstVaapiDecoderH264 {
499     /*< private >*/
500     GstVaapiDecoder             parent_instance;
501     GstVaapiDecoderH264Private  priv;
502 };
503
504 /**
505  * GstVaapiDecoderH264Class:
506  *
507  * A decoder class based on H264.
508  */
509 struct _GstVaapiDecoderH264Class {
510     /*< private >*/
511     GstVaapiDecoderClass parent_class;
512 };
513
514 static gboolean
515 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
516
517 static gboolean
518 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
519     GstVaapiPictureH264 *picture);
520
521 static inline gboolean
522 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
523     GstVaapiFrameStore *fs)
524 {
525     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
526 }
527
528 /* Determines if the supplied profile is one of the MVC set */
529 static gboolean
530 is_mvc_profile(GstH264Profile profile)
531 {
532     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
533         profile == GST_H264_PROFILE_STEREO_HIGH;
534 }
535
536 /* Determines the view_id from the supplied NAL unit */
537 static inline guint
538 get_view_id(GstH264NalUnit *nalu)
539 {
540     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
541 }
542
543 /* Determines the view order index (VOIdx) from the supplied view_id */
544 static gint
545 get_view_order_index(GstH264SPS *sps, guint16 view_id)
546 {
547     GstH264SPSExtMVC *mvc;
548     gint i;
549
550     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
551         return 0;
552
553     mvc = &sps->extension.mvc;
554     for (i = 0; i <= mvc->num_views_minus1; i++) {
555         if (mvc->view[i].view_id == view_id)
556             return i;
557     }
558     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
559     return -1;
560 }
561
562 /* Determines NumViews */
563 static guint
564 get_num_views(GstH264SPS *sps)
565 {
566     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
567         sps->extension.mvc.num_views_minus1 : 0);
568 }
569
570 /* Get number of reference frames to use */
571 static guint
572 get_max_dec_frame_buffering(GstH264SPS *sps)
573 {
574     guint num_views, max_dpb_frames;
575     guint max_dec_frame_buffering, PicSizeMbs;
576     GstVaapiLevelH264 level;
577     const GstVaapiH264LevelLimits *level_limits;
578
579     /* Table A-1 - Level limits */
580     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
581         level = GST_VAAPI_LEVEL_H264_L1b;
582     else
583         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
584     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
585     if (G_UNLIKELY(!level_limits)) {
586         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
587         max_dec_frame_buffering = 16;
588     }
589     else {
590         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
591                       (sps->pic_height_in_map_units_minus1 + 1) *
592                       (sps->frame_mbs_only_flag ? 1 : 2));
593         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
594     }
595     if (is_mvc_profile(sps->profile_idc))
596         max_dec_frame_buffering <<= 1;
597
598     /* VUI parameters */
599     if (sps->vui_parameters_present_flag) {
600         GstH264VUIParams * const vui_params = &sps->vui_parameters;
601         if (vui_params->bitstream_restriction_flag)
602             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
603         else {
604             switch (sps->profile_idc) {
605             case 44:  // CAVLC 4:4:4 Intra profile
606             case GST_H264_PROFILE_SCALABLE_HIGH:
607             case GST_H264_PROFILE_HIGH:
608             case GST_H264_PROFILE_HIGH10:
609             case GST_H264_PROFILE_HIGH_422:
610             case GST_H264_PROFILE_HIGH_444:
611                 if (sps->constraint_set3_flag)
612                     max_dec_frame_buffering = 0;
613                 break;
614             }
615         }
616     }
617
618     num_views = get_num_views(sps);
619     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
620     if (max_dec_frame_buffering > max_dpb_frames)
621         max_dec_frame_buffering = max_dpb_frames;
622     else if (max_dec_frame_buffering < sps->num_ref_frames)
623         max_dec_frame_buffering = sps->num_ref_frames;
624     return MAX(1, max_dec_frame_buffering);
625 }
626
627 static void
628 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
629 {
630     gpointer * const entries = array;
631     guint num_entries = *array_length_ptr;
632
633     g_return_if_fail(index < num_entries);
634
635     if (index != --num_entries)
636         entries[index] = entries[num_entries];
637     entries[num_entries] = NULL;
638     *array_length_ptr = num_entries;
639 }
640
641 #if 1
642 static inline void
643 array_remove_index(void *array, guint *array_length_ptr, guint index)
644 {
645     array_remove_index_fast(array, array_length_ptr, index);
646 }
647 #else
648 static void
649 array_remove_index(void *array, guint *array_length_ptr, guint index)
650 {
651     gpointer * const entries = array;
652     const guint num_entries = *array_length_ptr - 1;
653     guint i;
654
655     g_return_if_fail(index <= num_entries);
656
657     for (i = index; i < num_entries; i++)
658         entries[i] = entries[i + 1];
659     entries[num_entries] = NULL;
660     *array_length_ptr = num_entries;
661 }
662 #endif
663
664 #define ARRAY_REMOVE_INDEX(array, index) \
665     array_remove_index(array, &array##_count, index)
666
667 static void
668 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
669 {
670     GstVaapiDecoderH264Private * const priv = &decoder->priv;
671     guint i, num_frames = --priv->dpb_count;
672
673     if (USE_STRICT_DPB_ORDERING) {
674         for (i = index; i < num_frames; i++)
675             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
676     }
677     else if (index != num_frames)
678         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
679     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
680 }
681
682 static gboolean
683 dpb_output(
684     GstVaapiDecoderH264 *decoder,
685     GstVaapiFrameStore  *fs,
686     GstVaapiPictureH264 *picture
687 )
688 {
689     picture->output_needed = FALSE;
690
691     if (fs) {
692         if (--fs->output_needed > 0)
693             return TRUE;
694         picture = fs->buffers[0];
695     }
696     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
697 }
698
699 static inline void
700 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
701 {
702     GstVaapiDecoderH264Private * const priv = &decoder->priv;
703     GstVaapiFrameStore * const fs = priv->dpb[i];
704
705     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
706         dpb_remove_index(decoder, i);
707 }
708
709 /* Finds the frame store holding the supplied picture */
710 static gint
711 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
712 {
713     GstVaapiDecoderH264Private * const priv = &decoder->priv;
714     gint i, j;
715
716     for (i = 0; i < priv->dpb_count; i++) {
717         GstVaapiFrameStore * const fs = priv->dpb[i];
718         for (j = 0; j < fs->num_buffers; j++) {
719             if (fs->buffers[j] == picture)
720                 return i;
721         }
722     }
723     return -1;
724 }
725
726 /* Finds the picture with the lowest POC that needs to be output */
727 static gint
728 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
729     GstVaapiPictureH264 **found_picture_ptr)
730 {
731     GstVaapiDecoderH264Private * const priv = &decoder->priv;
732     GstVaapiPictureH264 *found_picture = NULL;
733     guint i, j, found_index;
734
735     for (i = 0; i < priv->dpb_count; i++) {
736         GstVaapiFrameStore * const fs = priv->dpb[i];
737         if (!fs->output_needed)
738             continue;
739         if (picture && picture->base.view_id != fs->view_id)
740             continue;
741         for (j = 0; j < fs->num_buffers; j++) {
742             GstVaapiPictureH264 * const pic = fs->buffers[j];
743             if (!pic->output_needed)
744                 continue;
745             if (!found_picture || found_picture->base.poc > pic->base.poc ||
746                 (found_picture->base.poc == pic->base.poc &&
747                  found_picture->base.voc > pic->base.voc))
748                 found_picture = pic, found_index = i;
749         }
750     }
751
752     if (found_picture_ptr)
753         *found_picture_ptr = found_picture;
754     return found_picture ? found_index : -1;
755 }
756
757 /* Finds the picture with the lowest VOC that needs to be output */
758 static gint
759 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
760     GstVaapiPictureH264 **found_picture_ptr)
761 {
762     GstVaapiDecoderH264Private * const priv = &decoder->priv;
763     GstVaapiPictureH264 *found_picture = NULL;
764     guint i, j, found_index;
765
766     for (i = 0; i < priv->dpb_count; i++) {
767         GstVaapiFrameStore * const fs = priv->dpb[i];
768         if (!fs->output_needed || fs->view_id == picture->base.view_id)
769             continue;
770         for (j = 0; j < fs->num_buffers; j++) {
771             GstVaapiPictureH264 * const pic = fs->buffers[j];
772             if (!pic->output_needed || pic->base.poc != picture->base.poc)
773                 continue;
774             if (!found_picture || found_picture->base.voc > pic->base.voc)
775                 found_picture = pic, found_index = i;
776         }
777     }
778
779     if (found_picture_ptr)
780         *found_picture_ptr = found_picture;
781     return found_picture ? found_index : -1;
782 }
783
784 static gboolean
785 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
786     GstVaapiPictureH264 *picture, guint voc)
787 {
788     GstVaapiDecoderH264Private * const priv = &decoder->priv;
789     GstVaapiPictureH264 *found_picture;
790     gint found_index;
791     gboolean success;
792
793     if (priv->max_views == 1)
794         return TRUE;
795
796     /* Emit all other view components that were in the same access
797        unit than the picture we have just found */
798     found_picture = picture;
799     for (;;) {
800         found_index = dpb_find_lowest_voc(decoder, found_picture,
801             &found_picture);
802         if (found_index < 0 || found_picture->base.voc >= voc)
803             break;
804         success = dpb_output(decoder, priv->dpb[found_index], found_picture);
805         dpb_evict(decoder, found_picture, found_index);
806         if (!success)
807             return FALSE;
808     }
809     return TRUE;
810 }
811
812 static gboolean
813 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
814 {
815     GstVaapiDecoderH264Private * const priv = &decoder->priv;
816     GstVaapiPictureH264 *found_picture;
817     gint found_index;
818     gboolean success;
819
820     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
821     if (found_index < 0)
822         return FALSE;
823
824     if (picture && picture->base.poc != found_picture->base.poc)
825         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
826
827     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
828     dpb_evict(decoder, found_picture, found_index);
829     if (priv->max_views == 1)
830         return success;
831
832     if (picture && picture->base.poc != found_picture->base.poc)
833         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
834     return success;
835 }
836
837 static void
838 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
839 {
840     GstVaapiDecoderH264Private * const priv = &decoder->priv;
841     guint i, n;
842
843     for (i = 0; i < priv->dpb_count; i++) {
844         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
845             continue;
846         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
847     }
848
849     for (i = 0, n = 0; i < priv->dpb_count; i++) {
850         if (priv->dpb[i])
851             priv->dpb[n++] = priv->dpb[i];
852     }
853     priv->dpb_count = n;
854
855     /* Clear previous frame buffers only if this is a "flush-all" operation,
856        or if the picture is the first one in the access unit */
857     if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
858             GST_VAAPI_PICTURE_FLAG_AU_START)) {
859         for (i = 0; i < priv->max_views; i++)
860             gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
861     }
862 }
863
864 static void
865 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
866 {
867     while (dpb_bump(decoder, picture))
868         ;
869     dpb_clear(decoder, picture);
870 }
871
872 static void
873 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
874 {
875     GstVaapiDecoderH264Private * const priv = &decoder->priv;
876     const gboolean is_last_picture = /* in the access unit */
877         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
878     guint i;
879
880     // Remove all unused inter-view only reference components of the current AU
881     i = 0;
882     while (i < priv->dpb_count) {
883         GstVaapiFrameStore * const fs = priv->dpb[i];
884         if (fs->view_id != picture->base.view_id &&
885             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
886             (is_last_picture ||
887              !is_inter_view_reference_for_next_frames(decoder, fs)))
888             dpb_remove_index(decoder, i);
889         else
890             i++;
891     }
892 }
893
894 static gboolean
895 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
896 {
897     GstVaapiDecoderH264Private * const priv = &decoder->priv;
898     GstVaapiFrameStore *fs;
899     guint i;
900
901     if (priv->max_views > 1)
902         dpb_prune_mvc(decoder, picture);
903
904     // Remove all unused pictures
905     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
906         i = 0;
907         while (i < priv->dpb_count) {
908             GstVaapiFrameStore * const fs = priv->dpb[i];
909             if (fs->view_id == picture->base.view_id &&
910                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
911                 dpb_remove_index(decoder, i);
912             else
913                 i++;
914         }
915     }
916
917     // Check if picture is the second field and the first field is still in DPB
918     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
919         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
920         const gint found_index = dpb_find_picture(decoder,
921             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
922         if (found_index >= 0)
923             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
924     }
925
926     // Create new frame store, and split fields if necessary
927     fs = gst_vaapi_frame_store_new(picture);
928     if (!fs)
929         return FALSE;
930     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
931     gst_vaapi_frame_store_unref(fs);
932
933     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
934         if (!gst_vaapi_frame_store_split_fields(fs))
935             return FALSE;
936     }
937
938     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
939     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
940         while (priv->dpb_count == priv->dpb_size) {
941             if (!dpb_bump(decoder, picture))
942                 return FALSE;
943         }
944     }
945
946     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
947     else {
948         const gboolean StoreInterViewOnlyRefFlag =
949             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
950                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
951             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
952                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
953         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
954             return TRUE;
955         while (priv->dpb_count == priv->dpb_size) {
956             if (!StoreInterViewOnlyRefFlag) {
957                 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
958                     return dpb_output(decoder, NULL, picture);
959             }
960             if (!dpb_bump(decoder, picture))
961                 return FALSE;
962         }
963     }
964
965     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
966     if (picture->output_flag) {
967         picture->output_needed = TRUE;
968         fs->output_needed++;
969     }
970     return TRUE;
971 }
972
973 static gboolean
974 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
975 {
976     GstVaapiDecoderH264Private * const priv = &decoder->priv;
977
978     if (dpb_size < priv->dpb_count)
979         return FALSE;
980
981     if (dpb_size > priv->dpb_size_max) {
982         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
983         if (!priv->dpb)
984             return FALSE;
985         memset(&priv->dpb[priv->dpb_size_max], 0,
986             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
987         priv->dpb_size_max = dpb_size;
988     }
989
990     if (priv->dpb_size < dpb_size)
991         priv->dpb_size = dpb_size;
992     else if (dpb_size < priv->dpb_count)
993         return FALSE;
994
995     GST_DEBUG("DPB size %u", priv->dpb_size);
996     return TRUE;
997 }
998
999 static void
1000 unref_inter_view(GstVaapiPictureH264 *picture)
1001 {
1002     if (!picture)
1003         return;
1004     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1005     gst_vaapi_picture_unref(picture);
1006 }
1007
1008 /* Resets MVC resources */
1009 static gboolean
1010 mvc_reset(GstVaapiDecoderH264 *decoder)
1011 {
1012     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1013     guint i;
1014
1015     // Resize array of inter-view references
1016     if (!priv->inter_views) {
1017         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1018             (GDestroyNotify)unref_inter_view);
1019         if (!priv->inter_views)
1020             return FALSE;
1021     }
1022
1023     // Resize array of previous frame buffers
1024     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1025         gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
1026
1027     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1028         sizeof(*priv->prev_frames));
1029     if (!priv->prev_frames) {
1030         priv->prev_frames_alloc = 0;
1031         return FALSE;
1032     }
1033     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1034         priv->prev_frames[i] = NULL;
1035     priv->prev_frames_alloc = priv->max_views;
1036     return TRUE;
1037 }
1038
1039 static GstVaapiDecoderStatus
1040 get_status(GstH264ParserResult result)
1041 {
1042     GstVaapiDecoderStatus status;
1043
1044     switch (result) {
1045     case GST_H264_PARSER_OK:
1046         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1047         break;
1048     case GST_H264_PARSER_NO_NAL_END:
1049         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1050         break;
1051     case GST_H264_PARSER_ERROR:
1052         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1053         break;
1054     default:
1055         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1056         break;
1057     }
1058     return status;
1059 }
1060
1061 static void
1062 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1063 {
1064     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1065
1066     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1067     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1068     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1069
1070     dpb_clear(decoder, NULL);
1071
1072     if (priv->inter_views) {
1073         g_ptr_array_unref(priv->inter_views);
1074         priv->inter_views = NULL;
1075     }
1076
1077     if (priv->parser) {
1078         gst_h264_nal_parser_free(priv->parser);
1079         priv->parser = NULL;
1080     }
1081 }
1082
1083 static gboolean
1084 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1085 {
1086     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1087
1088     gst_vaapi_decoder_h264_close(decoder);
1089
1090     priv->parser = gst_h264_nal_parser_new();
1091     if (!priv->parser)
1092         return FALSE;
1093     return TRUE;
1094 }
1095
1096 static void
1097 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1098 {
1099     GstVaapiDecoderH264 * const decoder =
1100         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1101     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1102     guint i;
1103
1104     gst_vaapi_decoder_h264_close(decoder);
1105
1106     g_free(priv->dpb);
1107     priv->dpb = NULL;
1108     priv->dpb_size = 0;
1109
1110     g_free(priv->prev_frames);
1111     priv->prev_frames = NULL;
1112     priv->prev_frames_alloc = 0;
1113
1114     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1115         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1116     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1117
1118     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1119         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1120     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1121 }
1122
1123 static gboolean
1124 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1125 {
1126     GstVaapiDecoderH264 * const decoder =
1127         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1128     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1129
1130     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1131     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1132     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1133     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1134     priv->progressive_sequence  = TRUE;
1135     return TRUE;
1136 }
1137
1138 /* Activates the supplied PPS */
1139 static GstH264PPS *
1140 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1141 {
1142     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1143     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1144
1145     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1146     return pi ? &pi->data.pps : NULL;
1147 }
1148
1149 /* Returns the active PPS */
1150 static inline GstH264PPS *
1151 get_pps(GstVaapiDecoderH264 *decoder)
1152 {
1153     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1154
1155     return pi ? &pi->data.pps : NULL;
1156 }
1157
1158 /* Activate the supplied SPS */
1159 static GstH264SPS *
1160 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1161 {
1162     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1163     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1164
1165     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1166     return pi ? &pi->data.sps : NULL;
1167 }
1168
1169 /* Returns the active SPS */
1170 static inline GstH264SPS *
1171 get_sps(GstVaapiDecoderH264 *decoder)
1172 {
1173     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1174
1175     return pi ? &pi->data.sps : NULL;
1176 }
1177
1178 static void
1179 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1180     GstVaapiProfile profile)
1181 {
1182     guint n_profiles = *n_profiles_ptr;
1183
1184     profiles[n_profiles++] = profile;
1185     switch (profile) {
1186     case GST_VAAPI_PROFILE_H264_MAIN:
1187         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1188         break;
1189     default:
1190         break;
1191     }
1192     *n_profiles_ptr = n_profiles;
1193 }
1194
1195 /* Fills in compatible profiles for MVC decoding */
1196 static void
1197 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1198     guint *n_profiles_ptr, guint dpb_size)
1199 {
1200     const gchar * const vendor_string =
1201         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1202
1203     gboolean add_high_profile = FALSE;
1204     struct map {
1205         const gchar *str;
1206         guint str_len;
1207     };
1208     const struct map *m;
1209
1210     // Drivers that support slice level decoding
1211     if (vendor_string && dpb_size <= 16) {
1212         static const struct map drv_names[] = {
1213             { "Intel i965 driver", 17 },
1214             { NULL, 0 }
1215         };
1216         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1217             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1218                 add_high_profile = TRUE;
1219         }
1220     }
1221
1222     if (add_high_profile)
1223         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1224 }
1225
1226 static GstVaapiProfile
1227 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1228 {
1229     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1230     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1231     GstVaapiProfile profile, profiles[4];
1232     guint i, n_profiles = 0;
1233
1234     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1235     if (!profile)
1236         return GST_VAAPI_PROFILE_UNKNOWN;
1237
1238     fill_profiles(profiles, &n_profiles, profile);
1239     switch (profile) {
1240     case GST_VAAPI_PROFILE_H264_BASELINE:
1241         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1242             fill_profiles(profiles, &n_profiles,
1243                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1244             fill_profiles(profiles, &n_profiles,
1245                 GST_VAAPI_PROFILE_H264_MAIN);
1246         }
1247         break;
1248     case GST_VAAPI_PROFILE_H264_EXTENDED:
1249         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1250             fill_profiles(profiles, &n_profiles,
1251                 GST_VAAPI_PROFILE_H264_MAIN);
1252         }
1253         break;
1254     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1255         if (priv->max_views == 2) {
1256             fill_profiles(profiles, &n_profiles,
1257                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1258         }
1259         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1260         break;
1261     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1262         if (sps->frame_mbs_only_flag) {
1263             fill_profiles(profiles, &n_profiles,
1264                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1265         }
1266         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1267         break;
1268     default:
1269         break;
1270     }
1271
1272     /* If the preferred profile (profiles[0]) matches one that we already
1273        found, then just return it now instead of searching for it again */
1274     if (profiles[0] == priv->profile)
1275         return priv->profile;
1276
1277     for (i = 0; i < n_profiles; i++) {
1278         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1279             return profiles[i];
1280     }
1281     return GST_VAAPI_PROFILE_UNKNOWN;
1282 }
1283
1284 static GstVaapiDecoderStatus
1285 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1286 {
1287     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1288     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1289     GstVaapiContextInfo info;
1290     GstVaapiProfile profile;
1291     GstVaapiChromaType chroma_type;
1292     gboolean reset_context = FALSE;
1293     guint mb_width, mb_height, dpb_size;
1294
1295     dpb_size = get_max_dec_frame_buffering(sps);
1296     if (priv->dpb_size < dpb_size) {
1297         GST_DEBUG("DPB size increased");
1298         reset_context = TRUE;
1299     }
1300
1301     profile = get_profile(decoder, sps, dpb_size);
1302     if (!profile) {
1303         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1304         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1305     }
1306
1307     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1308         GST_DEBUG("profile changed");
1309         reset_context = TRUE;
1310         priv->profile = profile;
1311     }
1312
1313     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1314     if (!chroma_type) {
1315         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1316         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1317     }
1318
1319     if (priv->chroma_type != chroma_type) {
1320         GST_DEBUG("chroma format changed");
1321         reset_context     = TRUE;
1322         priv->chroma_type = chroma_type;
1323     }
1324
1325     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1326     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1327         !sps->frame_mbs_only_flag;
1328     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1329         GST_DEBUG("size changed");
1330         reset_context   = TRUE;
1331         priv->mb_width  = mb_width;
1332         priv->mb_height = mb_height;
1333     }
1334
1335     priv->progressive_sequence = sps->frame_mbs_only_flag;
1336 #if 0
1337     /* XXX: we only output complete frames for now */
1338     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1339 #endif
1340
1341     gst_vaapi_decoder_set_pixel_aspect_ratio(
1342         base_decoder,
1343         sps->vui_parameters.par_n,
1344         sps->vui_parameters.par_d
1345     );
1346
1347     if (!reset_context && priv->has_context)
1348         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1349
1350     /* XXX: fix surface size when cropping is implemented */
1351     info.profile    = priv->profile;
1352     info.entrypoint = priv->entrypoint;
1353     info.chroma_type = priv->chroma_type;
1354     info.width      = sps->width;
1355     info.height     = sps->height;
1356     info.ref_frames = dpb_size;
1357
1358     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1359         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1360     priv->has_context = TRUE;
1361
1362     /* Reset DPB */
1363     if (!dpb_reset(decoder, dpb_size))
1364         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1365
1366     /* Reset MVC data */
1367     if (!mvc_reset(decoder))
1368         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1369     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1370 }
1371
1372 static void
1373 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1374     const GstH264SPS *sps)
1375 {
1376     guint i;
1377
1378     /* There are always 6 4x4 scaling lists */
1379     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1380     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1381
1382     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1383         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1384             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1385 }
1386
1387 static void
1388 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1389     const GstH264SPS *sps)
1390 {
1391     guint i, n;
1392
1393     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1394     if (!pps->transform_8x8_mode_flag)
1395         return;
1396
1397     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1398     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1399
1400     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1401     for (i = 0; i < n; i++) {
1402         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1403             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1404     }
1405 }
1406
1407 static GstVaapiDecoderStatus
1408 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1409 {
1410     GstVaapiPicture * const base_picture = &picture->base;
1411     GstH264PPS * const pps = get_pps(decoder);
1412     GstH264SPS * const sps = get_sps(decoder);
1413     VAIQMatrixBufferH264 *iq_matrix;
1414
1415     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1416     if (!base_picture->iq_matrix) {
1417         GST_ERROR("failed to allocate IQ matrix");
1418         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1419     }
1420     iq_matrix = base_picture->iq_matrix->param;
1421
1422     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1423        is not large enough to hold lists for 4:4:4 */
1424     if (sps->chroma_format_idc == 3)
1425         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1426
1427     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1428     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1429
1430     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1431 }
1432
1433 static inline gboolean
1434 is_valid_state(guint state, guint ref_state)
1435 {
1436     return (state & ref_state) == ref_state;
1437 }
1438
1439 static GstVaapiDecoderStatus
1440 decode_current_picture(GstVaapiDecoderH264 *decoder)
1441 {
1442     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1443     GstVaapiPictureH264 * const picture = priv->current_picture;
1444
1445     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1446         goto drop_frame;
1447     priv->decoder_state = 0;
1448
1449     if (!picture)
1450         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1451
1452     if (!exec_ref_pic_marking(decoder, picture))
1453         goto error;
1454     if (!dpb_add(decoder, picture))
1455         goto error;
1456     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1457         goto error;
1458     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1459     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1460
1461 error:
1462     /* XXX: fix for cases where first field failed to be decoded */
1463     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1464     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1465
1466 drop_frame:
1467     priv->decoder_state = 0;
1468     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1469 }
1470
1471 static GstVaapiDecoderStatus
1472 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1473 {
1474     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1475     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1476     GstH264SPS * const sps = &pi->data.sps;
1477     GstH264ParserResult result;
1478
1479     GST_DEBUG("parse SPS");
1480
1481     priv->parser_state = 0;
1482
1483     /* Variables that don't have inferred values per the H.264
1484        standard but that should get a default value anyway */
1485     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1486
1487     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1488     if (result != GST_H264_PARSER_OK)
1489         return get_status(result);
1490
1491     /* Reset defaults */
1492     priv->max_views = 1;
1493
1494     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1495     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1496 }
1497
1498 static GstVaapiDecoderStatus
1499 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1500 {
1501     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1502     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1503     GstH264SPS * const sps = &pi->data.sps;
1504     GstH264ParserResult result;
1505
1506     GST_DEBUG("parse subset SPS");
1507
1508     /* Variables that don't have inferred values per the H.264
1509        standard but that should get a default value anyway */
1510     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1511
1512     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1513         TRUE);
1514     if (result != GST_H264_PARSER_OK)
1515         return get_status(result);
1516
1517     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1518     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1519 }
1520
1521 static GstVaapiDecoderStatus
1522 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1523 {
1524     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1525     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1526     GstH264PPS * const pps = &pi->data.pps;
1527     GstH264ParserResult result;
1528
1529     GST_DEBUG("parse PPS");
1530
1531     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1532
1533     /* Variables that don't have inferred values per the H.264
1534        standard but that should get a default value anyway */
1535     pps->slice_group_map_type = 0;
1536     pps->slice_group_change_rate_minus1 = 0;
1537
1538     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1539     if (result != GST_H264_PARSER_OK)
1540         return get_status(result);
1541
1542     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1543     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1544 }
1545
1546 static GstVaapiDecoderStatus
1547 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1548 {
1549     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1550     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1551     GArray ** const sei_ptr = &pi->data.sei;
1552     GstH264ParserResult result;
1553
1554     GST_DEBUG("parse SEI");
1555
1556     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1557     if (result != GST_H264_PARSER_OK) {
1558         GST_WARNING("failed to parse SEI messages");
1559         return get_status(result);
1560     }
1561     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1562 }
1563
1564 static GstVaapiDecoderStatus
1565 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1566 {
1567     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1568     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1569     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1570     GstH264NalUnit * const nalu = &pi->nalu;
1571     GstH264SPS *sps;
1572     GstH264ParserResult result;
1573     guint num_views;
1574
1575     GST_DEBUG("parse slice");
1576
1577     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1578                            GST_H264_VIDEO_STATE_GOT_PPS);
1579
1580     /* Propagate Prefix NAL unit info, if necessary */
1581     switch (nalu->type) {
1582     case GST_H264_NAL_SLICE:
1583     case GST_H264_NAL_SLICE_IDR: {
1584         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1585         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1586             /* MVC sequences shall have a Prefix NAL unit immediately
1587                preceding this NAL unit */
1588             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1589             pi->nalu.extension = prev_pi->nalu.extension;
1590         }
1591         else {
1592             /* In the very unlikely case there is no Prefix NAL unit
1593                immediately preceding this NAL unit, try to infer some
1594                defaults (H.7.4.1.1) */
1595             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1596             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1597             nalu->idr_pic_flag = !mvc->non_idr_flag;
1598             mvc->priority_id = 0;
1599             mvc->view_id = 0;
1600             mvc->temporal_id = 0;
1601             mvc->anchor_pic_flag = 0;
1602             mvc->inter_view_flag = 1;
1603         }
1604         break;
1605     }
1606     }
1607
1608     /* Variables that don't have inferred values per the H.264
1609        standard but that should get a default value anyway */
1610     slice_hdr->cabac_init_idc = 0;
1611     slice_hdr->direct_spatial_mv_pred_flag = 0;
1612
1613     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1614         slice_hdr, TRUE, TRUE);
1615     if (result != GST_H264_PARSER_OK)
1616         return get_status(result);
1617
1618     sps = slice_hdr->pps->sequence;
1619
1620     /* Update MVC data */
1621     num_views = get_num_views(sps);
1622     if (priv->max_views < num_views) {
1623         priv->max_views = num_views;
1624         GST_DEBUG("maximum number of views changed to %u", num_views);
1625     }
1626     pi->view_id = get_view_id(&pi->nalu);
1627     pi->voc = get_view_order_index(sps, pi->view_id);
1628
1629     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1630     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1631 }
1632
1633 static GstVaapiDecoderStatus
1634 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1635 {
1636     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1637     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1638     GstH264SPS * const sps = &pi->data.sps;
1639
1640     GST_DEBUG("decode SPS");
1641
1642     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1643     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1644 }
1645
1646 static GstVaapiDecoderStatus
1647 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1648 {
1649     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1650     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1651     GstH264SPS * const sps = &pi->data.sps;
1652
1653     GST_DEBUG("decode subset SPS");
1654
1655     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1656     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1657 }
1658
1659 static GstVaapiDecoderStatus
1660 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1661 {
1662     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1663     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1664     GstH264PPS * const pps = &pi->data.pps;
1665
1666     GST_DEBUG("decode PPS");
1667
1668     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1669     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1670 }
1671
1672 static GstVaapiDecoderStatus
1673 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1674 {
1675     GstVaapiDecoderStatus status;
1676
1677     GST_DEBUG("decode sequence-end");
1678
1679     status = decode_current_picture(decoder);
1680     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1681         return status;
1682
1683     dpb_flush(decoder, NULL);
1684     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1685 }
1686
1687 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1688 static void
1689 init_picture_poc_0(
1690     GstVaapiDecoderH264 *decoder,
1691     GstVaapiPictureH264 *picture,
1692     GstH264SliceHdr     *slice_hdr
1693 )
1694 {
1695     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1696     GstH264SPS * const sps = get_sps(decoder);
1697     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1698     gint32 temp_poc;
1699
1700     GST_DEBUG("decode picture order count type 0");
1701
1702     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1703         priv->prev_poc_msb = 0;
1704         priv->prev_poc_lsb = 0;
1705     }
1706     else if (priv->prev_pic_has_mmco5) {
1707         priv->prev_poc_msb = 0;
1708         priv->prev_poc_lsb =
1709             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1710              0 : priv->field_poc[TOP_FIELD]);
1711     }
1712     else {
1713         priv->prev_poc_msb = priv->poc_msb;
1714         priv->prev_poc_lsb = priv->poc_lsb;
1715     }
1716
1717     // (8-3)
1718     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1719     if (priv->poc_lsb < priv->prev_poc_lsb &&
1720         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1721         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1722     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1723              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1724         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1725     else
1726         priv->poc_msb = priv->prev_poc_msb;
1727
1728     temp_poc = priv->poc_msb + priv->poc_lsb;
1729     switch (picture->structure) {
1730     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1731         // (8-4, 8-5)
1732         priv->field_poc[TOP_FIELD] = temp_poc;
1733         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1734             slice_hdr->delta_pic_order_cnt_bottom;
1735         break;
1736     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1737         // (8-4)
1738         priv->field_poc[TOP_FIELD] = temp_poc;
1739         break;
1740     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1741         // (8-5)
1742         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1743         break;
1744     }
1745 }
1746
1747 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1748 static void
1749 init_picture_poc_1(
1750     GstVaapiDecoderH264 *decoder,
1751     GstVaapiPictureH264 *picture,
1752     GstH264SliceHdr     *slice_hdr
1753 )
1754 {
1755     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1756     GstH264SPS * const sps = get_sps(decoder);
1757     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1758     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1759     guint i;
1760
1761     GST_DEBUG("decode picture order count type 1");
1762
1763     if (priv->prev_pic_has_mmco5)
1764         prev_frame_num_offset = 0;
1765     else
1766         prev_frame_num_offset = priv->frame_num_offset;
1767
1768     // (8-6)
1769     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1770         priv->frame_num_offset = 0;
1771     else if (priv->prev_frame_num > priv->frame_num)
1772         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1773     else
1774         priv->frame_num_offset = prev_frame_num_offset;
1775
1776     // (8-7)
1777     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1778         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1779     else
1780         abs_frame_num = 0;
1781     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1782         abs_frame_num = abs_frame_num - 1;
1783
1784     if (abs_frame_num > 0) {
1785         gint32 expected_delta_per_poc_cycle;
1786         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1787
1788         expected_delta_per_poc_cycle = 0;
1789         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1790             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1791
1792         // (8-8)
1793         poc_cycle_cnt = (abs_frame_num - 1) /
1794             sps->num_ref_frames_in_pic_order_cnt_cycle;
1795         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1796             sps->num_ref_frames_in_pic_order_cnt_cycle;
1797
1798         // (8-9)
1799         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1800         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1801             expected_poc += sps->offset_for_ref_frame[i];
1802     }
1803     else
1804         expected_poc = 0;
1805     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1806         expected_poc += sps->offset_for_non_ref_pic;
1807
1808     // (8-10)
1809     switch (picture->structure) {
1810     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1811         priv->field_poc[TOP_FIELD] = expected_poc +
1812             slice_hdr->delta_pic_order_cnt[0];
1813         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1814             sps->offset_for_top_to_bottom_field +
1815             slice_hdr->delta_pic_order_cnt[1];
1816         break;
1817     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1818         priv->field_poc[TOP_FIELD] = expected_poc +
1819             slice_hdr->delta_pic_order_cnt[0];
1820         break;
1821     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1822         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1823             sps->offset_for_top_to_bottom_field +
1824             slice_hdr->delta_pic_order_cnt[0];
1825         break;
1826     }
1827 }
1828
1829 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1830 static void
1831 init_picture_poc_2(
1832     GstVaapiDecoderH264 *decoder,
1833     GstVaapiPictureH264 *picture,
1834     GstH264SliceHdr     *slice_hdr
1835 )
1836 {
1837     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1838     GstH264SPS * const sps = get_sps(decoder);
1839     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1840     gint32 prev_frame_num_offset, temp_poc;
1841
1842     GST_DEBUG("decode picture order count type 2");
1843
1844     if (priv->prev_pic_has_mmco5)
1845         prev_frame_num_offset = 0;
1846     else
1847         prev_frame_num_offset = priv->frame_num_offset;
1848
1849     // (8-11)
1850     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1851         priv->frame_num_offset = 0;
1852     else if (priv->prev_frame_num > priv->frame_num)
1853         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1854     else
1855         priv->frame_num_offset = prev_frame_num_offset;
1856
1857     // (8-12)
1858     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1859         temp_poc = 0;
1860     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1861         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1862     else
1863         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1864
1865     // (8-13)
1866     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1867         priv->field_poc[TOP_FIELD] = temp_poc;
1868     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1869         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1870 }
1871
1872 /* 8.2.1 - Decoding process for picture order count */
1873 static void
1874 init_picture_poc(
1875     GstVaapiDecoderH264 *decoder,
1876     GstVaapiPictureH264 *picture,
1877     GstH264SliceHdr     *slice_hdr
1878 )
1879 {
1880     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1881     GstH264SPS * const sps = get_sps(decoder);
1882
1883     switch (sps->pic_order_cnt_type) {
1884     case 0:
1885         init_picture_poc_0(decoder, picture, slice_hdr);
1886         break;
1887     case 1:
1888         init_picture_poc_1(decoder, picture, slice_hdr);
1889         break;
1890     case 2:
1891         init_picture_poc_2(decoder, picture, slice_hdr);
1892         break;
1893     }
1894
1895     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1896         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1897     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1898         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1899     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1900 }
1901
1902 static int
1903 compare_picture_pic_num_dec(const void *a, const void *b)
1904 {
1905     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1906     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1907
1908     return picB->pic_num - picA->pic_num;
1909 }
1910
1911 static int
1912 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1913 {
1914     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1915     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1916
1917     return picA->long_term_pic_num - picB->long_term_pic_num;
1918 }
1919
1920 static int
1921 compare_picture_poc_dec(const void *a, const void *b)
1922 {
1923     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1924     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1925
1926     return picB->base.poc - picA->base.poc;
1927 }
1928
1929 static int
1930 compare_picture_poc_inc(const void *a, const void *b)
1931 {
1932     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1933     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1934
1935     return picA->base.poc - picB->base.poc;
1936 }
1937
1938 static int
1939 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1940 {
1941     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1942     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1943
1944     return picB->frame_num_wrap - picA->frame_num_wrap;
1945 }
1946
1947 static int
1948 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1949 {
1950     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1951     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1952
1953     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1954 }
1955
1956 /* 8.2.4.1 - Decoding process for picture numbers */
1957 static void
1958 init_picture_refs_pic_num(
1959     GstVaapiDecoderH264 *decoder,
1960     GstVaapiPictureH264 *picture,
1961     GstH264SliceHdr     *slice_hdr
1962 )
1963 {
1964     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1965     GstH264SPS * const sps = get_sps(decoder);
1966     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1967     guint i;
1968
1969     GST_DEBUG("decode picture numbers");
1970
1971     for (i = 0; i < priv->short_ref_count; i++) {
1972         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1973
1974         // (H.8.2)
1975         if (pic->base.view_id != picture->base.view_id)
1976             continue;
1977
1978         // (8-27)
1979         if (pic->frame_num > priv->frame_num)
1980             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1981         else
1982             pic->frame_num_wrap = pic->frame_num;
1983
1984         // (8-28, 8-30, 8-31)
1985         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1986             pic->pic_num = pic->frame_num_wrap;
1987         else {
1988             if (pic->structure == picture->structure)
1989                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1990             else
1991                 pic->pic_num = 2 * pic->frame_num_wrap;
1992         }
1993     }
1994
1995     for (i = 0; i < priv->long_ref_count; i++) {
1996         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1997
1998         // (H.8.2)
1999         if (pic->base.view_id != picture->base.view_id)
2000             continue;
2001
2002         // (8-29, 8-32, 8-33)
2003         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2004             pic->long_term_pic_num = pic->long_term_frame_idx;
2005         else {
2006             if (pic->structure == picture->structure)
2007                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2008             else
2009                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2010         }
2011     }
2012 }
2013
2014 #define SORT_REF_LIST(list, n, compare_func) \
2015     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2016
2017 static void
2018 init_picture_refs_fields_1(
2019     guint                picture_structure,
2020     GstVaapiPictureH264 *RefPicList[32],
2021     guint               *RefPicList_count,
2022     GstVaapiPictureH264 *ref_list[32],
2023     guint                ref_list_count
2024 )
2025 {
2026     guint i, j, n;
2027
2028     i = 0;
2029     j = 0;
2030     n = *RefPicList_count;
2031     do {
2032         g_assert(n < 32);
2033         for (; i < ref_list_count; i++) {
2034             if (ref_list[i]->structure == picture_structure) {
2035                 RefPicList[n++] = ref_list[i++];
2036                 break;
2037             }
2038         }
2039         for (; j < ref_list_count; j++) {
2040             if (ref_list[j]->structure != picture_structure) {
2041                 RefPicList[n++] = ref_list[j++];
2042                 break;
2043             }
2044         }
2045     } while (i < ref_list_count || j < ref_list_count);
2046     *RefPicList_count = n;
2047 }
2048
2049 static inline void
2050 init_picture_refs_fields(
2051     GstVaapiPictureH264 *picture,
2052     GstVaapiPictureH264 *RefPicList[32],
2053     guint               *RefPicList_count,
2054     GstVaapiPictureH264 *short_ref[32],
2055     guint                short_ref_count,
2056     GstVaapiPictureH264 *long_ref[32],
2057     guint                long_ref_count
2058 )
2059 {
2060     guint n = 0;
2061
2062     /* 8.2.4.2.5 - reference picture lists in fields */
2063     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2064         short_ref, short_ref_count);
2065     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2066         long_ref, long_ref_count);
2067     *RefPicList_count = n;
2068 }
2069
2070 /* Finds the inter-view reference picture with the supplied view id */
2071 static GstVaapiPictureH264 *
2072 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2073 {
2074     GPtrArray * const inter_views = decoder->priv.inter_views;
2075     guint i;
2076
2077     for (i = 0; i < inter_views->len; i++) {
2078         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2079         if (picture->base.view_id == view_id)
2080             return picture;
2081     }
2082
2083     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2084         view_id);
2085     return NULL;
2086 }
2087
2088 /* Checks whether the view id exists in the supplied list of view ids */
2089 static gboolean
2090 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2091 {
2092     guint i;
2093
2094     for (i = 0; i < num_view_ids; i++) {
2095         if (view_ids[i] == view_id)
2096             return TRUE;
2097     }
2098     return FALSE;
2099 }
2100
2101 static gboolean
2102 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2103     gboolean is_anchor)
2104 {
2105     if (is_anchor)
2106         return (find_view_id(view_id, view->anchor_ref_l0,
2107                     view->num_anchor_refs_l0) ||
2108                 find_view_id(view_id, view->anchor_ref_l1,
2109                     view->num_anchor_refs_l1));
2110
2111     return (find_view_id(view_id, view->non_anchor_ref_l0,
2112                 view->num_non_anchor_refs_l0) ||
2113             find_view_id(view_id, view->non_anchor_ref_l1,
2114                 view->num_non_anchor_refs_l1));
2115 }
2116
2117 /* Checks whether the inter-view reference picture with the supplied
2118    view id is used for decoding the current view component picture */
2119 static gboolean
2120 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2121     guint16 view_id, GstVaapiPictureH264 *picture)
2122 {
2123     const GstH264SPS * const sps = get_sps(decoder);
2124     gboolean is_anchor;
2125
2126     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2127         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2128         return FALSE;
2129
2130     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2131     return find_view_id_in_view(view_id,
2132         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2133 }
2134
2135 /* Checks whether the supplied inter-view reference picture is used
2136    for decoding the next view component pictures */
2137 static gboolean
2138 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2139     GstVaapiPictureH264 *picture)
2140 {
2141     const GstH264SPS * const sps = get_sps(decoder);
2142     gboolean is_anchor;
2143     guint i, num_views;
2144
2145     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2146         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2147         return FALSE;
2148
2149     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2150     num_views = sps->extension.mvc.num_views_minus1 + 1;
2151     for (i = picture->base.voc + 1; i < num_views; i++) {
2152         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2153         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2154             return TRUE;
2155     }
2156     return FALSE;
2157 }
2158
2159 /* H.8.2.1 - Initialization process for inter-view prediction references */
2160 static void
2161 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2162     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2163     const guint16 *view_ids, guint num_view_ids)
2164 {
2165     guint j, n;
2166
2167     n = *ref_list_count_ptr;
2168     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2169         GstVaapiPictureH264 * const pic =
2170             find_inter_view_reference(decoder, view_ids[j]);
2171         if (pic)
2172             ref_list[n++] = pic;
2173     }
2174     *ref_list_count_ptr = n;
2175 }
2176
2177 static inline void
2178 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2179     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2180 {
2181     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2182     const GstH264SPS * const sps = get_sps(decoder);
2183     const GstH264SPSExtMVCView *view;
2184
2185     GST_DEBUG("initialize reference picture list for inter-view prediction");
2186
2187     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2188         return;
2189     view = &sps->extension.mvc.view[picture->base.voc];
2190
2191 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2192         init_picture_refs_mvc_1(decoder,                                \
2193             priv->RefPicList##ref_list,                                 \
2194             &priv->RefPicList##ref_list##_count,                        \
2195             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2196             view->view_list##_l##ref_list,                              \
2197             view->num_##view_list##s_l##ref_list);                      \
2198     } while (0)
2199
2200     if (list == 0) {
2201         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2202             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2203         else
2204             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2205     }
2206     else {
2207         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2208             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2209         else
2210             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2211     }
2212
2213 #undef INVOKE_INIT_PICTURE_REFS_MVC
2214 }
2215
2216 static void
2217 init_picture_refs_p_slice(
2218     GstVaapiDecoderH264 *decoder,
2219     GstVaapiPictureH264 *picture,
2220     GstH264SliceHdr     *slice_hdr
2221 )
2222 {
2223     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2224     GstVaapiPictureH264 **ref_list;
2225     guint i;
2226
2227     GST_DEBUG("decode reference picture list for P and SP slices");
2228
2229     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2230         /* 8.2.4.2.1 - P and SP slices in frames */
2231         if (priv->short_ref_count > 0) {
2232             ref_list = priv->RefPicList0;
2233             for (i = 0; i < priv->short_ref_count; i++)
2234                 ref_list[i] = priv->short_ref[i];
2235             SORT_REF_LIST(ref_list, i, pic_num_dec);
2236             priv->RefPicList0_count += i;
2237         }
2238
2239         if (priv->long_ref_count > 0) {
2240             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2241             for (i = 0; i < priv->long_ref_count; i++)
2242                 ref_list[i] = priv->long_ref[i];
2243             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2244             priv->RefPicList0_count += i;
2245         }
2246     }
2247     else {
2248         /* 8.2.4.2.2 - P and SP slices in fields */
2249         GstVaapiPictureH264 *short_ref[32];
2250         guint short_ref_count = 0;
2251         GstVaapiPictureH264 *long_ref[32];
2252         guint long_ref_count = 0;
2253
2254         if (priv->short_ref_count > 0) {
2255             for (i = 0; i < priv->short_ref_count; i++)
2256                 short_ref[i] = priv->short_ref[i];
2257             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2258             short_ref_count = i;
2259         }
2260
2261         if (priv->long_ref_count > 0) {
2262             for (i = 0; i < priv->long_ref_count; i++)
2263                 long_ref[i] = priv->long_ref[i];
2264             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2265             long_ref_count = i;
2266         }
2267
2268         init_picture_refs_fields(
2269             picture,
2270             priv->RefPicList0, &priv->RefPicList0_count,
2271             short_ref,          short_ref_count,
2272             long_ref,           long_ref_count
2273         );
2274     }
2275
2276     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2277         /* RefPicList0 */
2278         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2279     }
2280 }
2281
2282 static void
2283 init_picture_refs_b_slice(
2284     GstVaapiDecoderH264 *decoder,
2285     GstVaapiPictureH264 *picture,
2286     GstH264SliceHdr     *slice_hdr
2287 )
2288 {
2289     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2290     GstVaapiPictureH264 **ref_list;
2291     guint i, n;
2292
2293     GST_DEBUG("decode reference picture list for B slices");
2294
2295     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2296         /* 8.2.4.2.3 - B slices in frames */
2297
2298         /* RefPicList0 */
2299         if (priv->short_ref_count > 0) {
2300             // 1. Short-term references
2301             ref_list = priv->RefPicList0;
2302             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2303                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2304                     ref_list[n++] = priv->short_ref[i];
2305             }
2306             SORT_REF_LIST(ref_list, n, poc_dec);
2307             priv->RefPicList0_count += n;
2308
2309             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2310             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2311                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2312                     ref_list[n++] = priv->short_ref[i];
2313             }
2314             SORT_REF_LIST(ref_list, n, poc_inc);
2315             priv->RefPicList0_count += n;
2316         }
2317
2318         if (priv->long_ref_count > 0) {
2319             // 2. Long-term references
2320             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2321             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2322                 ref_list[n++] = priv->long_ref[i];
2323             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2324             priv->RefPicList0_count += n;
2325         }
2326
2327         /* RefPicList1 */
2328         if (priv->short_ref_count > 0) {
2329             // 1. Short-term references
2330             ref_list = priv->RefPicList1;
2331             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2332                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2333                     ref_list[n++] = priv->short_ref[i];
2334             }
2335             SORT_REF_LIST(ref_list, n, poc_inc);
2336             priv->RefPicList1_count += n;
2337
2338             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2339             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2340                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2341                     ref_list[n++] = priv->short_ref[i];
2342             }
2343             SORT_REF_LIST(ref_list, n, poc_dec);
2344             priv->RefPicList1_count += n;
2345         }
2346
2347         if (priv->long_ref_count > 0) {
2348             // 2. Long-term references
2349             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2350             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2351                 ref_list[n++] = priv->long_ref[i];
2352             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2353             priv->RefPicList1_count += n;
2354         }
2355     }
2356     else {
2357         /* 8.2.4.2.4 - B slices in fields */
2358         GstVaapiPictureH264 *short_ref0[32];
2359         guint short_ref0_count = 0;
2360         GstVaapiPictureH264 *short_ref1[32];
2361         guint short_ref1_count = 0;
2362         GstVaapiPictureH264 *long_ref[32];
2363         guint long_ref_count = 0;
2364
2365         /* refFrameList0ShortTerm */
2366         if (priv->short_ref_count > 0) {
2367             ref_list = short_ref0;
2368             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2369                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2370                     ref_list[n++] = priv->short_ref[i];
2371             }
2372             SORT_REF_LIST(ref_list, n, poc_dec);
2373             short_ref0_count += n;
2374
2375             ref_list = &short_ref0[short_ref0_count];
2376             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2377                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2378                     ref_list[n++] = priv->short_ref[i];
2379             }
2380             SORT_REF_LIST(ref_list, n, poc_inc);
2381             short_ref0_count += n;
2382         }
2383
2384         /* refFrameList1ShortTerm */
2385         if (priv->short_ref_count > 0) {
2386             ref_list = short_ref1;
2387             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2388                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2389                     ref_list[n++] = priv->short_ref[i];
2390             }
2391             SORT_REF_LIST(ref_list, n, poc_inc);
2392             short_ref1_count += n;
2393
2394             ref_list = &short_ref1[short_ref1_count];
2395             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2396                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2397                     ref_list[n++] = priv->short_ref[i];
2398             }
2399             SORT_REF_LIST(ref_list, n, poc_dec);
2400             short_ref1_count += n;
2401         }
2402
2403         /* refFrameListLongTerm */
2404         if (priv->long_ref_count > 0) {
2405             for (i = 0; i < priv->long_ref_count; i++)
2406                 long_ref[i] = priv->long_ref[i];
2407             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2408             long_ref_count = i;
2409         }
2410
2411         init_picture_refs_fields(
2412             picture,
2413             priv->RefPicList0, &priv->RefPicList0_count,
2414             short_ref0,         short_ref0_count,
2415             long_ref,           long_ref_count
2416         );
2417
2418         init_picture_refs_fields(
2419             picture,
2420             priv->RefPicList1, &priv->RefPicList1_count,
2421             short_ref1,         short_ref1_count,
2422             long_ref,           long_ref_count
2423         );
2424    }
2425
2426     /* Check whether RefPicList1 is identical to RefPicList0, then
2427        swap if necessary */
2428     if (priv->RefPicList1_count > 1 &&
2429         priv->RefPicList1_count == priv->RefPicList0_count &&
2430         memcmp(priv->RefPicList0, priv->RefPicList1,
2431                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2432         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2433         priv->RefPicList1[0] = priv->RefPicList1[1];
2434         priv->RefPicList1[1] = tmp;
2435     }
2436
2437     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2438         /* RefPicList0 */
2439         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2440
2441         /* RefPicList1 */
2442         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2443     }
2444 }
2445
2446 #undef SORT_REF_LIST
2447
2448 static gint
2449 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2450 {
2451     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2452     guint i;
2453
2454     for (i = 0; i < priv->short_ref_count; i++) {
2455         if (priv->short_ref[i]->pic_num == pic_num)
2456             return i;
2457     }
2458     GST_ERROR("found no short-term reference picture with PicNum = %d",
2459               pic_num);
2460     return -1;
2461 }
2462
2463 static gint
2464 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2465 {
2466     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2467     guint i;
2468
2469     for (i = 0; i < priv->long_ref_count; i++) {
2470         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2471             return i;
2472     }
2473     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2474               long_term_pic_num);
2475     return -1;
2476 }
2477
2478 static void
2479 exec_picture_refs_modification_1(
2480     GstVaapiDecoderH264           *decoder,
2481     GstVaapiPictureH264           *picture,
2482     GstH264SliceHdr               *slice_hdr,
2483     guint                          list
2484 )
2485 {
2486     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2487     GstH264SPS * const sps = get_sps(decoder);
2488     GstH264RefPicListModification *ref_pic_list_modification;
2489     guint num_ref_pic_list_modifications;
2490     GstVaapiPictureH264 **ref_list;
2491     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2492     const guint16 *view_ids = NULL;
2493     guint i, j, n, num_refs, num_view_ids = 0;
2494     gint found_ref_idx;
2495     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2496
2497     GST_DEBUG("modification process of reference picture list %u", list);
2498
2499     if (list == 0) {
2500         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2501         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2502         ref_list                       = priv->RefPicList0;
2503         ref_list_count_ptr             = &priv->RefPicList0_count;
2504         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2505
2506         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2507             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2508             const GstH264SPSExtMVCView * const view =
2509                 &sps->extension.mvc.view[picture->base.voc];
2510             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2511                 view_ids = view->anchor_ref_l0;
2512                 num_view_ids = view->num_anchor_refs_l0;
2513             }
2514             else {
2515                 view_ids = view->non_anchor_ref_l0;
2516                 num_view_ids = view->num_non_anchor_refs_l0;
2517             }
2518         }
2519     }
2520     else {
2521         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2522         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2523         ref_list                       = priv->RefPicList1;
2524         ref_list_count_ptr             = &priv->RefPicList1_count;
2525         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2526
2527         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2528             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2529             const GstH264SPSExtMVCView * const view =
2530                 &sps->extension.mvc.view[picture->base.voc];
2531             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2532                 view_ids = view->anchor_ref_l1;
2533                 num_view_ids = view->num_anchor_refs_l1;
2534             }
2535             else {
2536                 view_ids = view->non_anchor_ref_l1;
2537                 num_view_ids = view->num_non_anchor_refs_l1;
2538             }
2539         }
2540     }
2541     ref_list_count = *ref_list_count_ptr;
2542
2543     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2544         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2545         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2546     }
2547     else {
2548         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2549         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2550     }
2551
2552     picNumPred = CurrPicNum;
2553     picViewIdxPred = -1;
2554
2555     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2556         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2557         if (l->modification_of_pic_nums_idc == 3)
2558             break;
2559
2560         /* 8.2.4.3.1 - Short-term reference pictures */
2561         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2562             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2563             gint32 picNum, picNumNoWrap;
2564
2565             // (8-34)
2566             if (l->modification_of_pic_nums_idc == 0) {
2567                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2568                 if (picNumNoWrap < 0)
2569                     picNumNoWrap += MaxPicNum;
2570             }
2571
2572             // (8-35)
2573             else {
2574                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2575                 if (picNumNoWrap >= MaxPicNum)
2576                     picNumNoWrap -= MaxPicNum;
2577             }
2578             picNumPred = picNumNoWrap;
2579
2580             // (8-36)
2581             picNum = picNumNoWrap;
2582             if (picNum > CurrPicNum)
2583                 picNum -= MaxPicNum;
2584
2585             // (8-37)
2586             for (j = num_refs; j > ref_list_idx; j--)
2587                 ref_list[j] = ref_list[j - 1];
2588             found_ref_idx = find_short_term_reference(decoder, picNum);
2589             ref_list[ref_list_idx++] =
2590                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2591             n = ref_list_idx;
2592             for (j = ref_list_idx; j <= num_refs; j++) {
2593                 gint32 PicNumF;
2594                 if (!ref_list[j])
2595                     continue;
2596                 PicNumF =
2597                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2598                     ref_list[j]->pic_num : MaxPicNum;
2599                 if (PicNumF != picNum ||
2600                     ref_list[j]->base.view_id != picture->base.view_id)
2601                     ref_list[n++] = ref_list[j];
2602             }
2603         }
2604
2605         /* 8.2.4.3.2 - Long-term reference pictures */
2606         else if (l->modification_of_pic_nums_idc == 2) {
2607
2608             for (j = num_refs; j > ref_list_idx; j--)
2609                 ref_list[j] = ref_list[j - 1];
2610             found_ref_idx =
2611                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2612             ref_list[ref_list_idx++] =
2613                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2614             n = ref_list_idx;
2615             for (j = ref_list_idx; j <= num_refs; j++) {
2616                 gint32 LongTermPicNumF;
2617                 if (!ref_list[j])
2618                     continue;
2619                 LongTermPicNumF =
2620                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2621                     ref_list[j]->long_term_pic_num : INT_MAX;
2622                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2623                     ref_list[j]->base.view_id != picture->base.view_id)
2624                     ref_list[n++] = ref_list[j];
2625             }
2626         }
2627
2628         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2629         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2630                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2631                  (l->modification_of_pic_nums_idc == 4 ||
2632                   l->modification_of_pic_nums_idc == 5)) {
2633             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2634             gint32 picViewIdx, targetViewId;
2635
2636             // (H-6)
2637             if (l->modification_of_pic_nums_idc == 4) {
2638                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2639                 if (picViewIdx < 0)
2640                     picViewIdx += num_view_ids;
2641             }
2642
2643             // (H-7)
2644             else {
2645                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2646                 if (picViewIdx >= num_view_ids)
2647                     picViewIdx -= num_view_ids;
2648             }
2649             picViewIdxPred = picViewIdx;
2650
2651             // (H-8, H-9)
2652             targetViewId = view_ids[picViewIdx];
2653
2654             // (H-10)
2655             for (j = num_refs; j > ref_list_idx; j--)
2656                 ref_list[j] = ref_list[j - 1];
2657             ref_list[ref_list_idx++] =
2658                 find_inter_view_reference(decoder, targetViewId);
2659             n = ref_list_idx;
2660             for (j = ref_list_idx; j <= num_refs; j++) {
2661                 if (!ref_list[j])
2662                     continue;
2663                 if (ref_list[j]->base.view_id != targetViewId ||
2664                     ref_list[j]->base.poc != picture->base.poc)
2665                     ref_list[n++] = ref_list[j];
2666             }
2667         }
2668     }
2669
2670 #if DEBUG
2671     for (i = 0; i < num_refs; i++)
2672         if (!ref_list[i])
2673             GST_ERROR("list %u entry %u is empty", list, i);
2674 #endif
2675     *ref_list_count_ptr = num_refs;
2676 }
2677
2678 /* 8.2.4.3 - Modification process for reference picture lists */
2679 static void
2680 exec_picture_refs_modification(
2681     GstVaapiDecoderH264 *decoder,
2682     GstVaapiPictureH264 *picture,
2683     GstH264SliceHdr     *slice_hdr
2684 )
2685 {
2686     GST_DEBUG("execute ref_pic_list_modification()");
2687
2688     /* RefPicList0 */
2689     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2690         slice_hdr->ref_pic_list_modification_flag_l0)
2691         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2692
2693     /* RefPicList1 */
2694     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2695         slice_hdr->ref_pic_list_modification_flag_l1)
2696         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2697 }
2698
2699 static void
2700 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2701     GstVaapiPictureH264 *picture)
2702 {
2703     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2704     guint i, j, short_ref_count, long_ref_count;
2705
2706     short_ref_count = 0;
2707     long_ref_count  = 0;
2708     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2709         for (i = 0; i < priv->dpb_count; i++) {
2710             GstVaapiFrameStore * const fs = priv->dpb[i];
2711             GstVaapiPictureH264 *pic;
2712             if (!gst_vaapi_frame_store_has_frame(fs))
2713                 continue;
2714             pic = fs->buffers[0];
2715             if (pic->base.view_id != picture->base.view_id)
2716                 continue;
2717             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2718                 priv->short_ref[short_ref_count++] = pic;
2719             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2720                 priv->long_ref[long_ref_count++] = pic;
2721             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2722             pic->other_field = fs->buffers[1];
2723         }
2724     }
2725     else {
2726         for (i = 0; i < priv->dpb_count; i++) {
2727             GstVaapiFrameStore * const fs = priv->dpb[i];
2728             for (j = 0; j < fs->num_buffers; j++) {
2729                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2730                 if (pic->base.view_id != picture->base.view_id)
2731                     continue;
2732                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2733                     priv->short_ref[short_ref_count++] = pic;
2734                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2735                     priv->long_ref[long_ref_count++] = pic;
2736                 pic->structure = pic->base.structure;
2737                 pic->other_field = fs->buffers[j ^ 1];
2738             }
2739         }
2740     }
2741
2742     for (i = short_ref_count; i < priv->short_ref_count; i++)
2743         priv->short_ref[i] = NULL;
2744     priv->short_ref_count = short_ref_count;
2745
2746     for (i = long_ref_count; i < priv->long_ref_count; i++)
2747         priv->long_ref[i] = NULL;
2748     priv->long_ref_count = long_ref_count;
2749 }
2750
2751 static void
2752 init_picture_refs(
2753     GstVaapiDecoderH264 *decoder,
2754     GstVaapiPictureH264 *picture,
2755     GstH264SliceHdr     *slice_hdr
2756 )
2757 {
2758     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2759     guint i, num_refs;
2760
2761     init_picture_ref_lists(decoder, picture);
2762     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2763
2764     priv->RefPicList0_count = 0;
2765     priv->RefPicList1_count = 0;
2766
2767     switch (slice_hdr->type % 5) {
2768     case GST_H264_P_SLICE:
2769     case GST_H264_SP_SLICE:
2770         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2771         break;
2772     case GST_H264_B_SLICE:
2773         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2774         break;
2775     default:
2776         break;
2777     }
2778
2779     exec_picture_refs_modification(decoder, picture, slice_hdr);
2780
2781     switch (slice_hdr->type % 5) {
2782     case GST_H264_B_SLICE:
2783         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2784         for (i = priv->RefPicList1_count; i < num_refs; i++)
2785             priv->RefPicList1[i] = NULL;
2786         priv->RefPicList1_count = num_refs;
2787
2788         // fall-through
2789     case GST_H264_P_SLICE:
2790     case GST_H264_SP_SLICE:
2791         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2792         for (i = priv->RefPicList0_count; i < num_refs; i++)
2793             priv->RefPicList0[i] = NULL;
2794         priv->RefPicList0_count = num_refs;
2795         break;
2796     default:
2797         break;
2798     }
2799 }
2800
2801 static gboolean
2802 init_picture(
2803     GstVaapiDecoderH264 *decoder,
2804     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2805 {
2806     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2807     GstVaapiPicture * const base_picture = &picture->base;
2808     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2809
2810     priv->prev_frame_num        = priv->frame_num;
2811     priv->frame_num             = slice_hdr->frame_num;
2812     picture->frame_num          = priv->frame_num;
2813     picture->frame_num_wrap     = priv->frame_num;
2814     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2815     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2816     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2817     base_picture->view_id       = pi->view_id;
2818     base_picture->voc           = pi->voc;
2819
2820     /* Initialize extensions */
2821     switch (pi->nalu.extension_type) {
2822     case GST_H264_NAL_EXTENSION_MVC: {
2823         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2824
2825         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2826         if (mvc->inter_view_flag)
2827             GST_VAAPI_PICTURE_FLAG_SET(picture,
2828                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2829         if (mvc->anchor_pic_flag)
2830             GST_VAAPI_PICTURE_FLAG_SET(picture,
2831                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2832         break;
2833     }
2834     }
2835
2836     /* Reset decoder state for IDR pictures */
2837     if (pi->nalu.idr_pic_flag) {
2838         GST_DEBUG("<IDR>");
2839         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2840         dpb_flush(decoder, picture);
2841     }
2842
2843     /* Initialize picture structure */
2844     if (!slice_hdr->field_pic_flag)
2845         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2846     else {
2847         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2848         if (!slice_hdr->bottom_field_flag)
2849             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2850         else
2851             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2852     }
2853     picture->structure = base_picture->structure;
2854
2855     /* Initialize reference flags */
2856     if (pi->nalu.ref_idc) {
2857         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2858             &slice_hdr->dec_ref_pic_marking;
2859
2860         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2861             dec_ref_pic_marking->long_term_reference_flag)
2862             GST_VAAPI_PICTURE_FLAG_SET(picture,
2863                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2864         else
2865             GST_VAAPI_PICTURE_FLAG_SET(picture,
2866                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2867     }
2868
2869     init_picture_poc(decoder, picture, slice_hdr);
2870     return TRUE;
2871 }
2872
2873 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2874 static gboolean
2875 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2876 {
2877     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2878     GstH264SPS * const sps = get_sps(decoder);
2879     GstVaapiPictureH264 *ref_picture;
2880     guint i, m, max_num_ref_frames;
2881
2882     GST_DEBUG("reference picture marking process (sliding window)");
2883
2884     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2885         return TRUE;
2886
2887     max_num_ref_frames = sps->num_ref_frames;
2888     if (max_num_ref_frames == 0)
2889         max_num_ref_frames = 1;
2890     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2891         max_num_ref_frames <<= 1;
2892
2893     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2894         return TRUE;
2895     if (priv->short_ref_count < 1)
2896         return FALSE;
2897
2898     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2899         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2900         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2901             m = i;
2902     }
2903
2904     ref_picture = priv->short_ref[m];
2905     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2906     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2907
2908     /* Both fields need to be marked as "unused for reference", so
2909        remove the other field from the short_ref[] list as well */
2910     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2911         for (i = 0; i < priv->short_ref_count; i++) {
2912             if (priv->short_ref[i] == ref_picture->other_field) {
2913                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2914                 break;
2915             }
2916         }
2917     }
2918     return TRUE;
2919 }
2920
2921 static inline gint32
2922 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2923 {
2924     gint32 pic_num;
2925
2926     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2927         pic_num = picture->frame_num_wrap;
2928     else
2929         pic_num = 2 * picture->frame_num_wrap + 1;
2930     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2931     return pic_num;
2932 }
2933
2934 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2935 static void
2936 exec_ref_pic_marking_adaptive_mmco_1(
2937     GstVaapiDecoderH264  *decoder,
2938     GstVaapiPictureH264  *picture,
2939     GstH264RefPicMarking *ref_pic_marking
2940 )
2941 {
2942     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2943     gint32 i, picNumX;
2944
2945     picNumX = get_picNumX(picture, ref_pic_marking);
2946     i = find_short_term_reference(decoder, picNumX);
2947     if (i < 0)
2948         return;
2949
2950     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2951         GST_VAAPI_PICTURE_IS_FRAME(picture));
2952     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2953 }
2954
2955 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2956 static void
2957 exec_ref_pic_marking_adaptive_mmco_2(
2958     GstVaapiDecoderH264  *decoder,
2959     GstVaapiPictureH264  *picture,
2960     GstH264RefPicMarking *ref_pic_marking
2961 )
2962 {
2963     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2964     gint32 i;
2965
2966     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2967     if (i < 0)
2968         return;
2969
2970     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2971         GST_VAAPI_PICTURE_IS_FRAME(picture));
2972     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2973 }
2974
2975 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2976 static void
2977 exec_ref_pic_marking_adaptive_mmco_3(
2978     GstVaapiDecoderH264  *decoder,
2979     GstVaapiPictureH264  *picture,
2980     GstH264RefPicMarking *ref_pic_marking
2981 )
2982 {
2983     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2984     GstVaapiPictureH264 *ref_picture, *other_field;
2985     gint32 i, picNumX;
2986
2987     for (i = 0; i < priv->long_ref_count; i++) {
2988         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2989             break;
2990     }
2991     if (i != priv->long_ref_count) {
2992         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2993         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2994     }
2995
2996     picNumX = get_picNumX(picture, ref_pic_marking);
2997     i = find_short_term_reference(decoder, picNumX);
2998     if (i < 0)
2999         return;
3000
3001     ref_picture = priv->short_ref[i];
3002     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3003     priv->long_ref[priv->long_ref_count++] = ref_picture;
3004
3005     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3006     gst_vaapi_picture_h264_set_reference(ref_picture,
3007         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3008         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3009
3010     /* Assign LongTermFrameIdx to the other field if it was also
3011        marked as "used for long-term reference */
3012     other_field = ref_picture->other_field;
3013     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3014         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3015 }
3016
3017 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3018  * as "unused for reference" */
3019 static void
3020 exec_ref_pic_marking_adaptive_mmco_4(
3021     GstVaapiDecoderH264  *decoder,
3022     GstVaapiPictureH264  *picture,
3023     GstH264RefPicMarking *ref_pic_marking
3024 )
3025 {
3026     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3027     gint32 i, long_term_frame_idx;
3028
3029     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3030
3031     for (i = 0; i < priv->long_ref_count; i++) {
3032         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3033             continue;
3034         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3035         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3036         i--;
3037     }
3038 }
3039
3040 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3041 static void
3042 exec_ref_pic_marking_adaptive_mmco_5(
3043     GstVaapiDecoderH264  *decoder,
3044     GstVaapiPictureH264  *picture,
3045     GstH264RefPicMarking *ref_pic_marking
3046 )
3047 {
3048     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3049
3050     dpb_flush(decoder, picture);
3051
3052     priv->prev_pic_has_mmco5 = TRUE;
3053
3054     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3055     priv->frame_num = 0;
3056     priv->frame_num_offset = 0;
3057     picture->frame_num = 0;
3058
3059     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3060     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3061         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3062     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3063         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3064     picture->base.poc = 0;
3065 }
3066
3067 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3068 static void
3069 exec_ref_pic_marking_adaptive_mmco_6(
3070     GstVaapiDecoderH264  *decoder,
3071     GstVaapiPictureH264  *picture,
3072     GstH264RefPicMarking *ref_pic_marking
3073 )
3074 {
3075     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3076     GstVaapiPictureH264 *other_field;
3077     guint i;
3078
3079     for (i = 0; i < priv->long_ref_count; i++) {
3080         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3081             break;
3082     }
3083     if (i != priv->long_ref_count) {
3084         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3085         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3086     }
3087
3088     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3089     gst_vaapi_picture_h264_set_reference(picture,
3090         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3091         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3092
3093     /* Assign LongTermFrameIdx to the other field if it was also
3094        marked as "used for long-term reference */
3095     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3096     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3097         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3098 }
3099
3100 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3101 static gboolean
3102 exec_ref_pic_marking_adaptive(
3103     GstVaapiDecoderH264     *decoder,
3104     GstVaapiPictureH264     *picture,
3105     GstH264DecRefPicMarking *dec_ref_pic_marking
3106 )
3107 {
3108     guint i;
3109
3110     GST_DEBUG("reference picture marking process (adaptive memory control)");
3111
3112     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3113         GstVaapiDecoderH264  *decoder,
3114         GstVaapiPictureH264  *picture,
3115         GstH264RefPicMarking *ref_pic_marking
3116     );
3117
3118     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3119         NULL,
3120         exec_ref_pic_marking_adaptive_mmco_1,
3121         exec_ref_pic_marking_adaptive_mmco_2,
3122         exec_ref_pic_marking_adaptive_mmco_3,
3123         exec_ref_pic_marking_adaptive_mmco_4,
3124         exec_ref_pic_marking_adaptive_mmco_5,
3125         exec_ref_pic_marking_adaptive_mmco_6,
3126     };
3127
3128     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3129         GstH264RefPicMarking * const ref_pic_marking =
3130             &dec_ref_pic_marking->ref_pic_marking[i];
3131
3132         const guint mmco = ref_pic_marking->memory_management_control_operation;
3133         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3134             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3135         else {
3136             GST_ERROR("unhandled MMCO %u", mmco);
3137             return FALSE;
3138         }
3139     }
3140     return TRUE;
3141 }
3142
3143 /* 8.2.5 - Execute reference picture marking process */
3144 static gboolean
3145 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3146 {
3147     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3148
3149     priv->prev_pic_has_mmco5 = FALSE;
3150     priv->prev_pic_structure = picture->structure;
3151
3152     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3153         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3154
3155     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3156         return TRUE;
3157
3158     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3159         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3160             &picture->last_slice_hdr->dec_ref_pic_marking;
3161         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3162             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3163                 return FALSE;
3164         }
3165         else {
3166             if (!exec_ref_pic_marking_sliding_window(decoder))
3167                 return FALSE;
3168         }
3169     }
3170     return TRUE;
3171 }
3172
3173 static void
3174 vaapi_init_picture(VAPictureH264 *pic)
3175 {
3176     pic->picture_id           = VA_INVALID_ID;
3177     pic->frame_idx            = 0;
3178     pic->flags                = VA_PICTURE_H264_INVALID;
3179     pic->TopFieldOrderCnt     = 0;
3180     pic->BottomFieldOrderCnt  = 0;
3181 }
3182
3183 static void
3184 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3185     guint picture_structure)
3186 {
3187     if (!picture_structure)
3188         picture_structure = picture->structure;
3189
3190     pic->picture_id = picture->base.surface_id;
3191     pic->flags = 0;
3192
3193     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3194         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3195         pic->frame_idx = picture->long_term_frame_idx;
3196     }
3197     else {
3198         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3199             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3200         pic->frame_idx = picture->frame_num;
3201     }
3202
3203     switch (picture_structure) {
3204     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3205         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3206         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3207         break;
3208     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3209         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3210         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3211         pic->BottomFieldOrderCnt = 0;
3212         break;
3213     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3214         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3215         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3216         pic->TopFieldOrderCnt = 0;
3217         break;
3218     }
3219 }
3220
3221 static void
3222 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3223     GstVaapiPictureH264 *picture)
3224 {
3225     vaapi_fill_picture(pic, picture, 0);
3226
3227     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3228     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3229         /* The inter-view reference components and inter-view only
3230            reference components that are included in the reference
3231            picture lists are considered as not being marked as "used for
3232            short-term reference" or "used for long-term reference" */
3233         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3234                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3235     }
3236 }
3237
3238 static gboolean
3239 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3240 {
3241     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3242     GstVaapiPicture * const base_picture = &picture->base;
3243     GstH264PPS * const pps = get_pps(decoder);
3244     GstH264SPS * const sps = get_sps(decoder);
3245     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3246     guint i, n;
3247
3248     /* Fill in VAPictureParameterBufferH264 */
3249     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3250
3251     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3252         GstVaapiFrameStore * const fs = priv->dpb[i];
3253         if ((gst_vaapi_frame_store_has_reference(fs) &&
3254              fs->view_id == picture->base.view_id) ||
3255             (gst_vaapi_frame_store_has_inter_view(fs) &&
3256              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3257             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3258                 fs->buffers[0], fs->structure);
3259         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3260             break;
3261     }
3262     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3263         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3264
3265 #define COPY_FIELD(s, f) \
3266     pic_param->f = (s)->f
3267
3268 #define COPY_BFM(a, s, f) \
3269     pic_param->a.bits.f = (s)->f
3270
3271     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3272     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3273     pic_param->frame_num                    = priv->frame_num;
3274
3275     COPY_FIELD(sps, bit_depth_luma_minus8);
3276     COPY_FIELD(sps, bit_depth_chroma_minus8);
3277     COPY_FIELD(sps, num_ref_frames);
3278     COPY_FIELD(pps, num_slice_groups_minus1);
3279     COPY_FIELD(pps, slice_group_map_type);
3280     COPY_FIELD(pps, slice_group_change_rate_minus1);
3281     COPY_FIELD(pps, pic_init_qp_minus26);
3282     COPY_FIELD(pps, pic_init_qs_minus26);
3283     COPY_FIELD(pps, chroma_qp_index_offset);
3284     COPY_FIELD(pps, second_chroma_qp_index_offset);
3285
3286     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3287     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3288     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3289
3290     COPY_BFM(seq_fields, sps, chroma_format_idc);
3291     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3292     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3293     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3294     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3295     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3296     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3297     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3298     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3299
3300     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3301     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3302     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3303
3304     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3305     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3306     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3307     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3308     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3309     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3310     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3311     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3312     return TRUE;
3313 }
3314
3315 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3316 static gboolean
3317 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3318 {
3319     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3320     GstH264PPS * const pps = slice_hdr->pps;
3321     GstH264SPS * const sps = pps->sequence;
3322     GstH264SliceHdr *prev_slice_hdr;
3323
3324     if (!prev_pi)
3325         return TRUE;
3326     prev_slice_hdr = &prev_pi->data.slice_hdr;
3327
3328 #define CHECK_EXPR(expr, field_name) do {              \
3329         if (!(expr)) {                                 \
3330             GST_DEBUG(field_name " differs in value"); \
3331             return TRUE;                               \
3332         }                                              \
3333     } while (0)
3334
3335 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3336     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3337
3338     /* view_id differs in value and VOIdx of current slice_hdr is less
3339        than the VOIdx of the prev_slice_hdr */
3340     CHECK_VALUE(pi, prev_pi, view_id);
3341
3342     /* frame_num differs in value, regardless of inferred values to 0 */
3343     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3344
3345     /* pic_parameter_set_id differs in value */
3346     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3347
3348     /* field_pic_flag differs in value */
3349     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3350
3351     /* bottom_field_flag is present in both and differs in value */
3352     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3353         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3354
3355     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3356     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3357                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3358
3359     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3360        value or delta_pic_order_cnt_bottom differs in value */
3361     if (sps->pic_order_cnt_type == 0) {
3362         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3363         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3364             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3365     }
3366
3367     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3368        differs in value or delta_pic_order_cnt[1] differs in value */
3369     else if (sps->pic_order_cnt_type == 1) {
3370         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3371         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3372     }
3373
3374     /* IdrPicFlag differs in value */
3375     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3376
3377     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3378     if (pi->nalu.idr_pic_flag)
3379         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3380
3381 #undef CHECK_EXPR
3382 #undef CHECK_VALUE
3383     return FALSE;
3384 }
3385
3386 /* Detection of a new access unit, assuming we are already in presence
3387    of a new picture */
3388 static inline gboolean
3389 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3390 {
3391     if (!prev_pi || prev_pi->view_id == pi->view_id)
3392         return TRUE;
3393     return pi->voc < prev_pi->voc;
3394 }
3395
3396 /* Finds the first field picture corresponding to the supplied picture */
3397 static GstVaapiPictureH264 *
3398 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3399 {
3400     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3401     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3402     GstVaapiFrameStore *fs;
3403
3404     if (!slice_hdr->field_pic_flag)
3405         return NULL;
3406
3407     fs = priv->prev_frames[pi->voc];
3408     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3409         return NULL;
3410
3411     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3412         return fs->buffers[0];
3413     return NULL;
3414 }
3415
3416 static GstVaapiDecoderStatus
3417 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3418 {
3419     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3420     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3421     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3422     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3423     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3424     GstVaapiPictureH264 *picture, *first_field;
3425     GstVaapiDecoderStatus status;
3426
3427     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3428     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3429
3430     /* Only decode base stream for MVC */
3431     switch (sps->profile_idc) {
3432     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3433     case GST_H264_PROFILE_STEREO_HIGH:
3434         if (0) {
3435             GST_DEBUG("drop picture from substream");
3436             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3437         }
3438         break;
3439     }
3440
3441     status = ensure_context(decoder, sps);
3442     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3443         return status;
3444
3445     priv->decoder_state = 0;
3446
3447     first_field = find_first_field(decoder, pi);
3448     if (first_field) {
3449         /* Re-use current picture where the first field was decoded */
3450         picture = gst_vaapi_picture_h264_new_field(first_field);
3451         if (!picture) {
3452             GST_ERROR("failed to allocate field picture");
3453             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3454         }
3455     }
3456     else {
3457         /* Create new picture */
3458         picture = gst_vaapi_picture_h264_new(decoder);
3459         if (!picture) {
3460             GST_ERROR("failed to allocate picture");
3461             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3462         }
3463     }
3464     gst_vaapi_picture_replace(&priv->current_picture, picture);
3465     gst_vaapi_picture_unref(picture);
3466
3467     /* Clear inter-view references list if this is the primary coded
3468        picture of the current access unit */
3469     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3470         g_ptr_array_set_size(priv->inter_views, 0);
3471
3472     /* Update cropping rectangle */
3473     if (sps->frame_cropping_flag) {
3474         GstVaapiRectangle crop_rect;
3475         crop_rect.x = sps->crop_rect_x;
3476         crop_rect.y = sps->crop_rect_y;
3477         crop_rect.width = sps->crop_rect_width;
3478         crop_rect.height = sps->crop_rect_height;
3479         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3480     }
3481
3482     status = ensure_quant_matrix(decoder, picture);
3483     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3484         GST_ERROR("failed to reset quantizer matrix");
3485         return status;
3486     }
3487
3488     if (!init_picture(decoder, picture, pi))
3489         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3490     if (!fill_picture(decoder, picture))
3491         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3492
3493     priv->decoder_state = pi->state;
3494     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3495 }
3496
3497 static inline guint
3498 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3499 {
3500     guint epb_count;
3501
3502     epb_count = slice_hdr->n_emulation_prevention_bytes;
3503     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3504 }
3505
3506 static gboolean
3507 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3508     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3509 {
3510     VASliceParameterBufferH264 * const slice_param = slice->param;
3511     GstH264PPS * const pps = get_pps(decoder);
3512     GstH264SPS * const sps = get_sps(decoder);
3513     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3514     guint num_weight_tables = 0;
3515     gint i, j;
3516
3517     if (pps->weighted_pred_flag &&
3518         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3519         num_weight_tables = 1;
3520     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3521         num_weight_tables = 2;
3522     else
3523         num_weight_tables = 0;
3524
3525     slice_param->luma_log2_weight_denom   = 0;
3526     slice_param->chroma_log2_weight_denom = 0;
3527     slice_param->luma_weight_l0_flag      = 0;
3528     slice_param->chroma_weight_l0_flag    = 0;
3529     slice_param->luma_weight_l1_flag      = 0;
3530     slice_param->chroma_weight_l1_flag    = 0;
3531
3532     if (num_weight_tables < 1)
3533         return TRUE;
3534
3535     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3536     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3537
3538     slice_param->luma_weight_l0_flag = 1;
3539     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3540         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3541         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3542     }
3543
3544     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3545     if (slice_param->chroma_weight_l0_flag) {
3546         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3547             for (j = 0; j < 2; j++) {
3548                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3549                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3550             }
3551         }
3552     }
3553
3554     if (num_weight_tables < 2)
3555         return TRUE;
3556
3557     slice_param->luma_weight_l1_flag = 1;
3558     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3559         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3560         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3561     }
3562
3563     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3564     if (slice_param->chroma_weight_l1_flag) {
3565         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3566             for (j = 0; j < 2; j++) {
3567                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3568                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3569             }
3570         }
3571     }
3572     return TRUE;
3573 }
3574
3575 static gboolean
3576 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3577     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3578 {
3579     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3580     VASliceParameterBufferH264 * const slice_param = slice->param;
3581     guint i, num_ref_lists = 0;
3582
3583     slice_param->num_ref_idx_l0_active_minus1 = 0;
3584     slice_param->num_ref_idx_l1_active_minus1 = 0;
3585
3586     if (GST_H264_IS_B_SLICE(slice_hdr))
3587         num_ref_lists = 2;
3588     else if (GST_H264_IS_I_SLICE(slice_hdr))
3589         num_ref_lists = 0;
3590     else
3591         num_ref_lists = 1;
3592
3593     if (num_ref_lists < 1)
3594         return TRUE;
3595
3596     slice_param->num_ref_idx_l0_active_minus1 =
3597         slice_hdr->num_ref_idx_l0_active_minus1;
3598
3599     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3600         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3601             priv->RefPicList0[i]);
3602     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3603         vaapi_init_picture(&slice_param->RefPicList0[i]);
3604
3605     if (num_ref_lists < 2)
3606         return TRUE;
3607
3608     slice_param->num_ref_idx_l1_active_minus1 =
3609         slice_hdr->num_ref_idx_l1_active_minus1;
3610
3611     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3612         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3613             priv->RefPicList1[i]);
3614     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3615         vaapi_init_picture(&slice_param->RefPicList1[i]);
3616     return TRUE;
3617 }
3618
3619 static gboolean
3620 fill_slice(GstVaapiDecoderH264 *decoder,
3621     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3622 {
3623     VASliceParameterBufferH264 * const slice_param = slice->param;
3624     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3625
3626     /* Fill in VASliceParameterBufferH264 */
3627     slice_param->slice_data_bit_offset =
3628         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3629     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3630     slice_param->slice_type                     = slice_hdr->type % 5;
3631     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3632     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3633     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3634     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3635     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3636     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3637
3638     if (!fill_RefPicList(decoder, slice, slice_hdr))
3639         return FALSE;
3640     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3641         return FALSE;
3642     return TRUE;
3643 }
3644
3645 static GstVaapiDecoderStatus
3646 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3647 {
3648     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3649     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3650     GstVaapiPictureH264 * const picture = priv->current_picture;
3651     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3652     GstVaapiSlice *slice;
3653     GstBuffer * const buffer =
3654         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3655     GstMapInfo map_info;
3656
3657     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3658
3659     if (!is_valid_state(pi->state,
3660             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3661         GST_WARNING("failed to receive enough headers to decode slice");
3662         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3663     }
3664
3665     if (!ensure_pps(decoder, slice_hdr->pps)) {
3666         GST_ERROR("failed to activate PPS");
3667         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3668     }
3669
3670     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3671         GST_ERROR("failed to activate SPS");
3672         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3673     }
3674
3675     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3676         GST_ERROR("failed to map buffer");
3677         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3678     }
3679
3680     /* Check wether this is the first/last slice in the current access unit */
3681     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3682         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3683     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3684         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3685
3686     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3687         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3688     gst_buffer_unmap(buffer, &map_info);
3689     if (!slice) {
3690         GST_ERROR("failed to allocate slice");
3691         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3692     }
3693
3694     init_picture_refs(decoder, picture, slice_hdr);
3695     if (!fill_slice(decoder, slice, pi)) {
3696         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3697         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3698     }
3699
3700     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3701     picture->last_slice_hdr = slice_hdr;
3702     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3703     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3704 }
3705
3706 static inline gint
3707 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3708 {
3709     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3710                                                      0xffffff00, 0x00000100,
3711                                                      ofs, size,
3712                                                      scp);
3713 }
3714
3715 static GstVaapiDecoderStatus
3716 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3717 {
3718     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3719     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3720     GstVaapiDecoderStatus status;
3721
3722     priv->decoder_state |= pi->state;
3723     switch (pi->nalu.type) {
3724     case GST_H264_NAL_SPS:
3725         status = decode_sps(decoder, unit);
3726         break;
3727     case GST_H264_NAL_SUBSET_SPS:
3728         status = decode_subset_sps(decoder, unit);
3729         break;
3730     case GST_H264_NAL_PPS:
3731         status = decode_pps(decoder, unit);
3732         break;
3733     case GST_H264_NAL_SLICE_EXT:
3734     case GST_H264_NAL_SLICE_IDR:
3735         /* fall-through. IDR specifics are handled in init_picture() */
3736     case GST_H264_NAL_SLICE:
3737         status = decode_slice(decoder, unit);
3738         break;
3739     case GST_H264_NAL_SEQ_END:
3740     case GST_H264_NAL_STREAM_END:
3741         status = decode_sequence_end(decoder);
3742         break;
3743     case GST_H264_NAL_SEI:
3744         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3745         break;
3746     default:
3747         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3748         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3749         break;
3750     }
3751     return status;
3752 }
3753
3754 static GstVaapiDecoderStatus
3755 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3756     const guchar *buf, guint buf_size)
3757 {
3758     GstVaapiDecoderH264 * const decoder =
3759         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3760     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3761     GstVaapiDecoderStatus status;
3762     GstVaapiDecoderUnit unit;
3763     GstVaapiParserInfoH264 *pi = NULL;
3764     GstH264ParserResult result;
3765     guint i, ofs, num_sps, num_pps;
3766
3767     unit.parsed_info = NULL;
3768
3769     if (buf_size < 8)
3770         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3771
3772     if (buf[0] != 1) {
3773         GST_ERROR("failed to decode codec-data, not in avcC format");
3774         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3775     }
3776
3777     priv->nal_length_size = (buf[4] & 0x03) + 1;
3778
3779     num_sps = buf[5] & 0x1f;
3780     ofs = 6;
3781
3782     for (i = 0; i < num_sps; i++) {
3783         pi = gst_vaapi_parser_info_h264_new();
3784         if (!pi)
3785             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3786         unit.parsed_info = pi;
3787
3788         result = gst_h264_parser_identify_nalu_avc(
3789             priv->parser,
3790             buf, ofs, buf_size, 2,
3791             &pi->nalu
3792         );
3793         if (result != GST_H264_PARSER_OK) {
3794             status = get_status(result);
3795             goto cleanup;
3796         }
3797
3798         status = parse_sps(decoder, &unit);
3799         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3800             goto cleanup;
3801         ofs = pi->nalu.offset + pi->nalu.size;
3802
3803         status = decode_sps(decoder, &unit);
3804         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3805             goto cleanup;
3806         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3807     }
3808
3809     num_pps = buf[ofs];
3810     ofs++;
3811
3812     for (i = 0; i < num_pps; i++) {
3813         pi = gst_vaapi_parser_info_h264_new();
3814         if (!pi)
3815             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3816         unit.parsed_info = pi;
3817
3818         result = gst_h264_parser_identify_nalu_avc(
3819             priv->parser,
3820             buf, ofs, buf_size, 2,
3821             &pi->nalu
3822         );
3823         if (result != GST_H264_PARSER_OK) {
3824             status = get_status(result);
3825             goto cleanup;
3826         }
3827
3828         status = parse_pps(decoder, &unit);
3829         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3830             goto cleanup;
3831         ofs = pi->nalu.offset + pi->nalu.size;
3832
3833         status = decode_pps(decoder, &unit);
3834         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3835             goto cleanup;
3836         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3837     }
3838
3839     priv->is_avcC = TRUE;
3840     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3841
3842 cleanup:
3843     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3844     return status;
3845 }
3846
3847 static GstVaapiDecoderStatus
3848 ensure_decoder(GstVaapiDecoderH264 *decoder)
3849 {
3850     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3851     GstVaapiDecoderStatus status;
3852
3853     if (!priv->is_opened) {
3854         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3855         if (!priv->is_opened)
3856             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3857
3858         status = gst_vaapi_decoder_decode_codec_data(
3859             GST_VAAPI_DECODER_CAST(decoder));
3860         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3861             return status;
3862     }
3863     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3864 }
3865
3866 static GstVaapiDecoderStatus
3867 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3868     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3869 {
3870     GstVaapiDecoderH264 * const decoder =
3871         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3872     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3873     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3874     GstVaapiParserInfoH264 *pi;
3875     GstVaapiDecoderStatus status;
3876     GstH264ParserResult result;
3877     guchar *buf;
3878     guint i, size, buf_size, nalu_size, flags;
3879     guint32 start_code;
3880     gint ofs, ofs2;
3881
3882     status = ensure_decoder(decoder);
3883     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3884         return status;
3885
3886     size = gst_adapter_available(adapter);
3887
3888     if (priv->is_avcC) {
3889         if (size < priv->nal_length_size)
3890             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3891
3892         buf = (guchar *)&start_code;
3893         g_assert(priv->nal_length_size <= sizeof(start_code));
3894         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3895
3896         nalu_size = 0;
3897         for (i = 0; i < priv->nal_length_size; i++)
3898             nalu_size = (nalu_size << 8) | buf[i];
3899
3900         buf_size = priv->nal_length_size + nalu_size;
3901         if (size < buf_size)
3902             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3903     }
3904     else {
3905         if (size < 4)
3906             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3907
3908         ofs = scan_for_start_code(adapter, 0, size, NULL);
3909         if (ofs < 0)
3910             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3911
3912         if (ofs > 0) {
3913             gst_adapter_flush(adapter, ofs);
3914             size -= ofs;
3915         }
3916
3917         ofs2 = ps->input_offset2 - ofs - 4;
3918         if (ofs2 < 4)
3919             ofs2 = 4;
3920
3921         ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3922             scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3923         if (ofs < 0) {
3924             // Assume the whole NAL unit is present if end-of-stream
3925             if (!at_eos) {
3926                 ps->input_offset2 = size;
3927                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3928             }
3929             ofs = size;
3930         }
3931         buf_size = ofs;
3932     }
3933     ps->input_offset2 = 0;
3934
3935     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3936     if (!buf)
3937         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3938
3939     unit->size = buf_size;
3940
3941     pi = gst_vaapi_parser_info_h264_new();
3942     if (!pi)
3943         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3944
3945     gst_vaapi_decoder_unit_set_parsed_info(unit,
3946         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3947
3948     if (priv->is_avcC)
3949         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3950             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3951     else
3952         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3953             buf, 0, buf_size, &pi->nalu);
3954     status = get_status(result);
3955     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3956         return status;
3957
3958     switch (pi->nalu.type) {
3959     case GST_H264_NAL_SPS:
3960         status = parse_sps(decoder, unit);
3961         break;
3962     case GST_H264_NAL_SUBSET_SPS:
3963         status = parse_subset_sps(decoder, unit);
3964         break;
3965     case GST_H264_NAL_PPS:
3966         status = parse_pps(decoder, unit);
3967         break;
3968     case GST_H264_NAL_SEI:
3969         status = parse_sei(decoder, unit);
3970         break;
3971     case GST_H264_NAL_SLICE_EXT:
3972         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3973             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3974             break;
3975         }
3976         /* fall-through */
3977     case GST_H264_NAL_SLICE_IDR:
3978     case GST_H264_NAL_SLICE:
3979         status = parse_slice(decoder, unit);
3980         break;
3981     default:
3982         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3983         break;
3984     }
3985     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3986         return status;
3987
3988     flags = 0;
3989     switch (pi->nalu.type) {
3990     case GST_H264_NAL_AU_DELIMITER:
3991         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3992         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3993         /* fall-through */
3994     case GST_H264_NAL_FILLER_DATA:
3995         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3996         break;
3997     case GST_H264_NAL_STREAM_END:
3998         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3999         /* fall-through */
4000     case GST_H264_NAL_SEQ_END:
4001         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4002         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4003         break;
4004     case GST_H264_NAL_SPS:
4005     case GST_H264_NAL_SUBSET_SPS:
4006     case GST_H264_NAL_PPS:
4007     case GST_H264_NAL_SEI:
4008         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4009         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4010         break;
4011     case GST_H264_NAL_SLICE_EXT:
4012         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4013             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4014             break;
4015         }
4016         /* fall-through */
4017     case GST_H264_NAL_SLICE_IDR:
4018     case GST_H264_NAL_SLICE:
4019         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4020         if (is_new_picture(pi, priv->prev_slice_pi)) {
4021             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4022             if (is_new_access_unit(pi, priv->prev_slice_pi))
4023                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4024         }
4025         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4026         break;
4027     case GST_H264_NAL_SPS_EXT:
4028     case GST_H264_NAL_SLICE_AUX:
4029         /* skip SPS extension and auxiliary slice for now */
4030         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4031         break;
4032     case GST_H264_NAL_PREFIX_UNIT:
4033         /* skip Prefix NAL units for now */
4034         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4035             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4036             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4037         break;
4038     default:
4039         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4040             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4041                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4042         break;
4043     }
4044     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4045         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4046     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4047
4048     pi->nalu.data = NULL;
4049     pi->state = priv->parser_state;
4050     pi->flags = flags;
4051     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4052     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4053 }
4054
4055 static GstVaapiDecoderStatus
4056 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4057     GstVaapiDecoderUnit *unit)
4058 {
4059     GstVaapiDecoderH264 * const decoder =
4060         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4061     GstVaapiDecoderStatus status;
4062
4063     status = ensure_decoder(decoder);
4064     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4065         return status;
4066     return decode_unit(decoder, unit);
4067 }
4068
4069 static GstVaapiDecoderStatus
4070 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4071     GstVaapiDecoderUnit *unit)
4072 {
4073     GstVaapiDecoderH264 * const decoder =
4074         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4075
4076     return decode_picture(decoder, unit);
4077 }
4078
4079 static GstVaapiDecoderStatus
4080 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4081 {
4082     GstVaapiDecoderH264 * const decoder =
4083         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4084
4085     return decode_current_picture(decoder);
4086 }
4087
4088 static GstVaapiDecoderStatus
4089 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4090 {
4091     GstVaapiDecoderH264 * const decoder =
4092         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4093
4094     dpb_flush(decoder, NULL);
4095     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4096 }
4097
4098 static void
4099 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4100 {
4101     GstVaapiMiniObjectClass * const object_class =
4102         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4103     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4104
4105     object_class->size          = sizeof(GstVaapiDecoderH264);
4106     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4107
4108     decoder_class->create       = gst_vaapi_decoder_h264_create;
4109     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4110     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4111     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4112     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4113     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4114     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4115
4116     decoder_class->decode_codec_data =
4117         gst_vaapi_decoder_h264_decode_codec_data;
4118 }
4119
4120 static inline const GstVaapiDecoderClass *
4121 gst_vaapi_decoder_h264_class(void)
4122 {
4123     static GstVaapiDecoderH264Class g_class;
4124     static gsize g_class_init = FALSE;
4125
4126     if (g_once_init_enter(&g_class_init)) {
4127         gst_vaapi_decoder_h264_class_init(&g_class);
4128         g_once_init_leave(&g_class_init, TRUE);
4129     }
4130     return GST_VAAPI_DECODER_CLASS(&g_class);
4131 }
4132
4133 /**
4134  * gst_vaapi_decoder_h264_new:
4135  * @display: a #GstVaapiDisplay
4136  * @caps: a #GstCaps holding codec information
4137  *
4138  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4139  * hold extra information like codec-data and pictured coded size.
4140  *
4141  * Return value: the newly allocated #GstVaapiDecoder object
4142  */
4143 GstVaapiDecoder *
4144 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4145 {
4146     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4147 }