decoder: h264: fix inter-view references array growth.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAGS(picture) & GST_VAAPI_PICTURE_FLAG_INTER_VIEW)
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAGS(picture) & GST_VAAPI_PICTURE_FLAG_ANCHOR)
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiPictureH264        *current_picture;
449     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
450     GstVaapiParserInfoH264     *active_sps;
451     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
452     GstVaapiParserInfoH264     *active_pps;
453     GstVaapiParserInfoH264     *prev_pi;
454     GstVaapiParserInfoH264     *prev_slice_pi;
455     GstVaapiFrameStore        **prev_frames;
456     guint                       prev_frames_alloc;
457     GstVaapiFrameStore        **dpb;
458     guint                       dpb_count;
459     guint                       dpb_size;
460     guint                       dpb_size_max;
461     guint                       max_views;
462     GstVaapiProfile             profile;
463     GstVaapiEntrypoint          entrypoint;
464     GstVaapiChromaType          chroma_type;
465     GPtrArray                  *inter_views;
466     GstVaapiPictureH264        *short_ref[32];
467     guint                       short_ref_count;
468     GstVaapiPictureH264        *long_ref[32];
469     guint                       long_ref_count;
470     GstVaapiPictureH264        *RefPicList0[32];
471     guint                       RefPicList0_count;
472     GstVaapiPictureH264        *RefPicList1[32];
473     guint                       RefPicList1_count;
474     guint                       nal_length_size;
475     guint                       mb_width;
476     guint                       mb_height;
477     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
478     gint32                      poc_msb;                // PicOrderCntMsb
479     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
480     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
481     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
482     gint32                      frame_num_offset;       // FrameNumOffset
483     gint32                      frame_num;              // frame_num (from slice_header())
484     gint32                      prev_frame_num;         // prevFrameNum
485     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
486     gboolean                    prev_pic_structure;     // previous picture structure
487     guint                       is_opened               : 1;
488     guint                       is_avcC                 : 1;
489     guint                       has_context             : 1;
490     guint                       progressive_sequence    : 1;
491 };
492
493 /**
494  * GstVaapiDecoderH264:
495  *
496  * A decoder based on H264.
497  */
498 struct _GstVaapiDecoderH264 {
499     /*< private >*/
500     GstVaapiDecoder             parent_instance;
501     GstVaapiDecoderH264Private  priv;
502 };
503
504 /**
505  * GstVaapiDecoderH264Class:
506  *
507  * A decoder class based on H264.
508  */
509 struct _GstVaapiDecoderH264Class {
510     /*< private >*/
511     GstVaapiDecoderClass parent_class;
512 };
513
514 static gboolean
515 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
516
517 /* Determines if the supplied profile is one of the MVC set */
518 static gboolean
519 is_mvc_profile(GstH264Profile profile)
520 {
521     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
522         profile == GST_H264_PROFILE_STEREO_HIGH;
523 }
524
525 /* Determines the view_id from the supplied NAL unit */
526 static inline guint
527 get_view_id(GstH264NalUnit *nalu)
528 {
529     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
530 }
531
532 /* Determines the view order index (VOIdx) from the supplied view_id */
533 static gint
534 get_view_order_index(GstH264SPS *sps, guint16 view_id)
535 {
536     GstH264SPSExtMVC *mvc;
537     gint i;
538
539     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
540         return 0;
541
542     mvc = &sps->extension.mvc;
543     for (i = 0; i <= mvc->num_views_minus1; i++) {
544         if (mvc->view[i].view_id == view_id)
545             return i;
546     }
547     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
548     return -1;
549 }
550
551 /* Determines NumViews */
552 static guint
553 get_num_views(GstH264SPS *sps)
554 {
555     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
556         sps->extension.mvc.num_views_minus1 : 0);
557 }
558
559 /* Get number of reference frames to use */
560 static guint
561 get_max_dec_frame_buffering(GstH264SPS *sps)
562 {
563     guint num_views, max_dpb_frames;
564     guint max_dec_frame_buffering, PicSizeMbs;
565     GstVaapiLevelH264 level;
566     const GstVaapiH264LevelLimits *level_limits;
567
568     /* Table A-1 - Level limits */
569     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
570         level = GST_VAAPI_LEVEL_H264_L1b;
571     else
572         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
573     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
574     if (G_UNLIKELY(!level_limits)) {
575         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
576         max_dec_frame_buffering = 16;
577     }
578     else {
579         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
580                       (sps->pic_height_in_map_units_minus1 + 1) *
581                       (sps->frame_mbs_only_flag ? 1 : 2));
582         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
583     }
584     if (is_mvc_profile(sps->profile_idc))
585         max_dec_frame_buffering <<= 1;
586
587     /* VUI parameters */
588     if (sps->vui_parameters_present_flag) {
589         GstH264VUIParams * const vui_params = &sps->vui_parameters;
590         if (vui_params->bitstream_restriction_flag)
591             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
592         else {
593             switch (sps->profile_idc) {
594             case 44:  // CAVLC 4:4:4 Intra profile
595             case GST_H264_PROFILE_SCALABLE_HIGH:
596             case GST_H264_PROFILE_HIGH:
597             case GST_H264_PROFILE_HIGH10:
598             case GST_H264_PROFILE_HIGH_422:
599             case GST_H264_PROFILE_HIGH_444:
600                 if (sps->constraint_set3_flag)
601                     max_dec_frame_buffering = 0;
602                 break;
603             }
604         }
605     }
606
607     num_views = get_num_views(sps);
608     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
609     if (max_dec_frame_buffering > max_dpb_frames)
610         max_dec_frame_buffering = max_dpb_frames;
611     else if (max_dec_frame_buffering < sps->num_ref_frames)
612         max_dec_frame_buffering = sps->num_ref_frames;
613     return MAX(1, max_dec_frame_buffering);
614 }
615
616 static void
617 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
618 {
619     gpointer * const entries = array;
620     guint num_entries = *array_length_ptr;
621
622     g_return_if_fail(index < num_entries);
623
624     if (index != --num_entries)
625         entries[index] = entries[num_entries];
626     entries[num_entries] = NULL;
627     *array_length_ptr = num_entries;
628 }
629
630 #if 1
631 static inline void
632 array_remove_index(void *array, guint *array_length_ptr, guint index)
633 {
634     array_remove_index_fast(array, array_length_ptr, index);
635 }
636 #else
637 static void
638 array_remove_index(void *array, guint *array_length_ptr, guint index)
639 {
640     gpointer * const entries = array;
641     const guint num_entries = *array_length_ptr - 1;
642     guint i;
643
644     g_return_if_fail(index <= num_entries);
645
646     for (i = index; i < num_entries; i++)
647         entries[i] = entries[i + 1];
648     entries[num_entries] = NULL;
649     *array_length_ptr = num_entries;
650 }
651 #endif
652
653 #define ARRAY_REMOVE_INDEX(array, index) \
654     array_remove_index(array, &array##_count, index)
655
656 static void
657 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
658 {
659     GstVaapiDecoderH264Private * const priv = &decoder->priv;
660     guint i, num_frames = --priv->dpb_count;
661
662     if (USE_STRICT_DPB_ORDERING) {
663         for (i = index; i < num_frames; i++)
664             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
665     }
666     else if (index != num_frames)
667         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
668     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
669 }
670
671 static gboolean
672 dpb_output(
673     GstVaapiDecoderH264 *decoder,
674     GstVaapiFrameStore  *fs,
675     GstVaapiPictureH264 *picture
676 )
677 {
678     picture->output_needed = FALSE;
679
680     if (fs) {
681         if (--fs->output_needed > 0)
682             return TRUE;
683         picture = fs->buffers[0];
684     }
685     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
686 }
687
688 static inline void
689 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
690 {
691     GstVaapiDecoderH264Private * const priv = &decoder->priv;
692     GstVaapiFrameStore * const fs = priv->dpb[i];
693
694     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
695         dpb_remove_index(decoder, i);
696 }
697
698 /* Finds the frame store holding the supplied picture */
699 static gint
700 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
701 {
702     GstVaapiDecoderH264Private * const priv = &decoder->priv;
703     gint i, j;
704
705     for (i = 0; i < priv->dpb_count; i++) {
706         GstVaapiFrameStore * const fs = priv->dpb[i];
707         for (j = 0; j < fs->num_buffers; j++) {
708             if (fs->buffers[j] == picture)
709                 return i;
710         }
711     }
712     return -1;
713 }
714
715 /* Finds the picture with the lowest POC that needs to be output */
716 static gint
717 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
718     GstVaapiPictureH264 **found_picture_ptr)
719 {
720     GstVaapiDecoderH264Private * const priv = &decoder->priv;
721     GstVaapiPictureH264 *found_picture = NULL;
722     guint i, j, found_index;
723
724     for (i = 0; i < priv->dpb_count; i++) {
725         GstVaapiFrameStore * const fs = priv->dpb[i];
726         if (!fs->output_needed)
727             continue;
728         if (picture && picture->base.view_id != fs->view_id)
729             continue;
730         for (j = 0; j < fs->num_buffers; j++) {
731             GstVaapiPictureH264 * const pic = fs->buffers[j];
732             if (!pic->output_needed)
733                 continue;
734             if (!found_picture || found_picture->base.poc > pic->base.poc ||
735                 (found_picture->base.poc == pic->base.poc &&
736                  found_picture->base.voc > pic->base.voc))
737                 found_picture = pic, found_index = i;
738         }
739     }
740
741     if (found_picture_ptr)
742         *found_picture_ptr = found_picture;
743     return found_picture ? found_index : -1;
744 }
745
746 /* Finds the picture with the lowest VOC that needs to be output */
747 static gint
748 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
749     GstVaapiPictureH264 **found_picture_ptr)
750 {
751     GstVaapiDecoderH264Private * const priv = &decoder->priv;
752     GstVaapiPictureH264 *found_picture = NULL;
753     guint i, j, found_index;
754
755     for (i = 0; i < priv->dpb_count; i++) {
756         GstVaapiFrameStore * const fs = priv->dpb[i];
757         if (!fs->output_needed || fs->view_id == picture->base.view_id)
758             continue;
759         for (j = 0; j < fs->num_buffers; j++) {
760             GstVaapiPictureH264 * const pic = fs->buffers[j];
761             if (!pic->output_needed || pic->base.poc != picture->base.poc)
762                 continue;
763             if (!found_picture || found_picture->base.voc > pic->base.voc)
764                 found_picture = pic, found_index = i;
765         }
766     }
767
768     if (found_picture_ptr)
769         *found_picture_ptr = found_picture;
770     return found_picture ? found_index : -1;
771 }
772
773 static gboolean
774 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
775 {
776     GstVaapiDecoderH264Private * const priv = &decoder->priv;
777     GstVaapiPictureH264 *found_picture;
778     gint found_index;
779     gboolean success;
780
781     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
782     if (found_index < 0)
783         return FALSE;
784
785     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
786     dpb_evict(decoder, found_picture, found_index);
787     if (priv->max_views == 1)
788         return success;
789
790     /* Emit all other view components that were in the same access
791        unit than the picture we have just found */
792     for (;;) {
793         found_index = dpb_find_lowest_voc(decoder, found_picture,
794             &found_picture);
795         if (found_index < 0)
796             break;
797         dpb_output(decoder, priv->dpb[found_index], found_picture);
798         dpb_evict(decoder, found_picture, found_index);
799     }
800     return success;
801 }
802
803 static void
804 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
805 {
806     GstVaapiDecoderH264Private * const priv = &decoder->priv;
807     guint i, n;
808
809     for (i = 0; i < priv->dpb_count; i++) {
810         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
811             continue;
812         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
813     }
814
815     for (i = 0, n = 0; i < priv->dpb_count; i++) {
816         if (priv->dpb[i])
817             priv->dpb[n++] = priv->dpb[i];
818     }
819     priv->dpb_count = n;
820
821     /* Clear previous frame buffers only if this is a "flush-all" operation,
822        or if the picture is the first one in the access unit */
823     if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
824             GST_VAAPI_PICTURE_FLAG_AU_START)) {
825         for (i = 0; i < priv->max_views; i++)
826             gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
827     }
828 }
829
830 static void
831 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
832 {
833     while (dpb_bump(decoder, picture))
834         ;
835     dpb_clear(decoder, picture);
836 }
837
838 static void
839 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 {
841     GstVaapiDecoderH264Private * const priv = &decoder->priv;
842     guint i;
843
844     // Remove all unused inter-view pictures
845     if (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END)) {
846         i = 0;
847         while (i < priv->dpb_count) {
848             GstVaapiFrameStore * const fs = priv->dpb[i];
849             if (fs->view_id != picture->base.view_id &&
850                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
851                 dpb_remove_index(decoder, i);
852             else
853                 i++;
854         }
855     }
856 }
857
858 static gboolean
859 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
860 {
861     GstVaapiDecoderH264Private * const priv = &decoder->priv;
862     GstVaapiFrameStore *fs;
863     guint i;
864
865     if (priv->max_views > 1)
866         dpb_prune_mvc(decoder, picture);
867
868     // Remove all unused pictures
869     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
870         i = 0;
871         while (i < priv->dpb_count) {
872             GstVaapiFrameStore * const fs = priv->dpb[i];
873             if (fs->view_id == picture->base.view_id &&
874                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
875                 dpb_remove_index(decoder, i);
876             else
877                 i++;
878         }
879     }
880
881     // Check if picture is the second field and the first field is still in DPB
882     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
883         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
884         const gint found_index = dpb_find_picture(decoder,
885             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
886         if (found_index >= 0)
887             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
888     }
889
890     // Create new frame store, and split fields if necessary
891     fs = gst_vaapi_frame_store_new(picture);
892     if (!fs)
893         return FALSE;
894     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
895     gst_vaapi_frame_store_unref(fs);
896
897     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
898         if (!gst_vaapi_frame_store_split_fields(fs))
899             return FALSE;
900     }
901
902     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
903     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
904         while (priv->dpb_count == priv->dpb_size) {
905             if (!dpb_bump(decoder, picture))
906                 return FALSE;
907         }
908     }
909
910     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
911     else {
912         const gboolean StoreInterViewOnlyRefFlag =
913             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
914                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
915             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
916                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
917         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
918             return TRUE;
919         while (priv->dpb_count == priv->dpb_size) {
920             if (!StoreInterViewOnlyRefFlag) {
921                 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
922                     return dpb_output(decoder, NULL, picture);
923             }
924             if (!dpb_bump(decoder, picture))
925                 return FALSE;
926         }
927     }
928
929     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
930     if (picture->output_flag) {
931         picture->output_needed = TRUE;
932         fs->output_needed++;
933     }
934     return TRUE;
935 }
936
937 static gboolean
938 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
939 {
940     GstVaapiDecoderH264Private * const priv = &decoder->priv;
941
942     if (dpb_size < priv->dpb_count)
943         return FALSE;
944
945     if (dpb_size > priv->dpb_size_max) {
946         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
947         if (!priv->dpb)
948             return FALSE;
949         memset(&priv->dpb[priv->dpb_size_max], 0,
950             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
951         priv->dpb_size_max = dpb_size;
952     }
953
954     if (priv->dpb_size < dpb_size)
955         priv->dpb_size = dpb_size;
956     else if (dpb_size < priv->dpb_count)
957         return FALSE;
958
959     GST_DEBUG("DPB size %u", priv->dpb_size);
960     return TRUE;
961 }
962
963 static void
964 unref_inter_view(GstVaapiPictureH264 *picture)
965 {
966     if (!picture)
967         return;
968     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
969     gst_vaapi_picture_unref(picture);
970 }
971
972 /* Resets MVC resources */
973 static gboolean
974 mvc_reset(GstVaapiDecoderH264 *decoder)
975 {
976     GstVaapiDecoderH264Private * const priv = &decoder->priv;
977     guint i;
978
979     // Resize array of inter-view references
980     if (!priv->inter_views) {
981         priv->inter_views = g_ptr_array_new_full(priv->max_views,
982             (GDestroyNotify)unref_inter_view);
983         if (!priv->inter_views)
984             return FALSE;
985     }
986
987     // Resize array of previous frame buffers
988     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
989         gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
990
991     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
992         sizeof(*priv->prev_frames));
993     if (!priv->prev_frames) {
994         priv->prev_frames_alloc = 0;
995         return FALSE;
996     }
997     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
998         priv->prev_frames[i] = NULL;
999     priv->prev_frames_alloc = priv->max_views;
1000     return TRUE;
1001 }
1002
1003 static GstVaapiDecoderStatus
1004 get_status(GstH264ParserResult result)
1005 {
1006     GstVaapiDecoderStatus status;
1007
1008     switch (result) {
1009     case GST_H264_PARSER_OK:
1010         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1011         break;
1012     case GST_H264_PARSER_NO_NAL_END:
1013         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1014         break;
1015     case GST_H264_PARSER_ERROR:
1016         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1017         break;
1018     default:
1019         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1020         break;
1021     }
1022     return status;
1023 }
1024
1025 static void
1026 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1027 {
1028     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1029
1030     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1031     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1032     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1033
1034     dpb_clear(decoder, NULL);
1035
1036     if (priv->inter_views) {
1037         g_ptr_array_unref(priv->inter_views);
1038         priv->inter_views = NULL;
1039     }
1040
1041     if (priv->parser) {
1042         gst_h264_nal_parser_free(priv->parser);
1043         priv->parser = NULL;
1044     }
1045 }
1046
1047 static gboolean
1048 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1049 {
1050     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1051
1052     gst_vaapi_decoder_h264_close(decoder);
1053
1054     priv->parser = gst_h264_nal_parser_new();
1055     if (!priv->parser)
1056         return FALSE;
1057     return TRUE;
1058 }
1059
1060 static void
1061 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1062 {
1063     GstVaapiDecoderH264 * const decoder =
1064         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1065     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1066     guint i;
1067
1068     gst_vaapi_decoder_h264_close(decoder);
1069
1070     g_free(priv->dpb);
1071     priv->dpb = NULL;
1072     priv->dpb_size = 0;
1073
1074     g_free(priv->prev_frames);
1075     priv->prev_frames = NULL;
1076     priv->prev_frames_alloc = 0;
1077
1078     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1079         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1080     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1081
1082     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1083         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1084     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1085 }
1086
1087 static gboolean
1088 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1089 {
1090     GstVaapiDecoderH264 * const decoder =
1091         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1092     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1093
1094     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1095     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1096     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1097     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1098     priv->progressive_sequence  = TRUE;
1099     return TRUE;
1100 }
1101
1102 /* Activates the supplied PPS */
1103 static GstH264PPS *
1104 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1105 {
1106     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1107     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1108
1109     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1110     return pi ? &pi->data.pps : NULL;
1111 }
1112
1113 /* Returns the active PPS */
1114 static inline GstH264PPS *
1115 get_pps(GstVaapiDecoderH264 *decoder)
1116 {
1117     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1118
1119     return pi ? &pi->data.pps : NULL;
1120 }
1121
1122 /* Activate the supplied SPS */
1123 static GstH264SPS *
1124 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1125 {
1126     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1127     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1128
1129     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1130     return pi ? &pi->data.sps : NULL;
1131 }
1132
1133 /* Returns the active SPS */
1134 static inline GstH264SPS *
1135 get_sps(GstVaapiDecoderH264 *decoder)
1136 {
1137     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1138
1139     return pi ? &pi->data.sps : NULL;
1140 }
1141
1142 static void
1143 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1144     GstVaapiProfile profile)
1145 {
1146     guint n_profiles = *n_profiles_ptr;
1147
1148     profiles[n_profiles++] = profile;
1149     switch (profile) {
1150     case GST_VAAPI_PROFILE_H264_MAIN:
1151         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1152         break;
1153     default:
1154         break;
1155     }
1156     *n_profiles_ptr = n_profiles;
1157 }
1158
1159 /* Fills in compatible profiles for MVC decoding */
1160 static void
1161 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1162     guint *n_profiles_ptr, guint dpb_size)
1163 {
1164     const gchar * const vendor_string =
1165         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1166
1167     gboolean add_high_profile = FALSE;
1168     struct map {
1169         const gchar *str;
1170         guint str_len;
1171     };
1172     const struct map *m;
1173
1174     // Drivers that support slice level decoding
1175     if (vendor_string && dpb_size <= 16) {
1176         static const struct map drv_names[] = {
1177             { "Intel i965 driver", 17 },
1178             { NULL, 0 }
1179         };
1180         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1181             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1182                 add_high_profile = TRUE;
1183         }
1184     }
1185
1186     if (add_high_profile)
1187         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1188 }
1189
1190 static GstVaapiProfile
1191 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1192 {
1193     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1194     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1195     GstVaapiProfile profile, profiles[4];
1196     guint i, n_profiles = 0;
1197
1198     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1199     if (!profile)
1200         return GST_VAAPI_PROFILE_UNKNOWN;
1201
1202     fill_profiles(profiles, &n_profiles, profile);
1203     switch (profile) {
1204     case GST_VAAPI_PROFILE_H264_BASELINE:
1205         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1206             fill_profiles(profiles, &n_profiles,
1207                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1208             fill_profiles(profiles, &n_profiles,
1209                 GST_VAAPI_PROFILE_H264_MAIN);
1210         }
1211         break;
1212     case GST_VAAPI_PROFILE_H264_EXTENDED:
1213         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1214             fill_profiles(profiles, &n_profiles,
1215                 GST_VAAPI_PROFILE_H264_MAIN);
1216         }
1217         break;
1218     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1219         if (priv->max_views == 2) {
1220             fill_profiles(profiles, &n_profiles,
1221                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1222         }
1223         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1224         break;
1225     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1226         if (sps->frame_mbs_only_flag) {
1227             fill_profiles(profiles, &n_profiles,
1228                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1229         }
1230         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1231         break;
1232     default:
1233         break;
1234     }
1235
1236     /* If the preferred profile (profiles[0]) matches one that we already
1237        found, then just return it now instead of searching for it again */
1238     if (profiles[0] == priv->profile)
1239         return priv->profile;
1240
1241     for (i = 0; i < n_profiles; i++) {
1242         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1243             return profiles[i];
1244     }
1245     return GST_VAAPI_PROFILE_UNKNOWN;
1246 }
1247
1248 static GstVaapiDecoderStatus
1249 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1250 {
1251     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1252     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1253     GstVaapiContextInfo info;
1254     GstVaapiProfile profile;
1255     GstVaapiChromaType chroma_type;
1256     gboolean reset_context = FALSE;
1257     guint mb_width, mb_height, dpb_size;
1258
1259     dpb_size = get_max_dec_frame_buffering(sps);
1260     if (priv->dpb_size < dpb_size) {
1261         GST_DEBUG("DPB size increased");
1262         reset_context = TRUE;
1263     }
1264
1265     profile = get_profile(decoder, sps, dpb_size);
1266     if (!profile) {
1267         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1268         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1269     }
1270
1271     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1272         GST_DEBUG("profile changed");
1273         reset_context = TRUE;
1274         priv->profile = profile;
1275     }
1276
1277     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1278     if (!chroma_type) {
1279         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1280         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1281     }
1282
1283     if (priv->chroma_type != chroma_type) {
1284         GST_DEBUG("chroma format changed");
1285         reset_context     = TRUE;
1286         priv->chroma_type = chroma_type;
1287     }
1288
1289     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1290     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1291         !sps->frame_mbs_only_flag;
1292     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1293         GST_DEBUG("size changed");
1294         reset_context   = TRUE;
1295         priv->mb_width  = mb_width;
1296         priv->mb_height = mb_height;
1297     }
1298
1299     priv->progressive_sequence = sps->frame_mbs_only_flag;
1300 #if 0
1301     /* XXX: we only output complete frames for now */
1302     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1303 #endif
1304
1305     gst_vaapi_decoder_set_pixel_aspect_ratio(
1306         base_decoder,
1307         sps->vui_parameters.par_n,
1308         sps->vui_parameters.par_d
1309     );
1310
1311     if (!reset_context && priv->has_context)
1312         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1313
1314     /* XXX: fix surface size when cropping is implemented */
1315     info.profile    = priv->profile;
1316     info.entrypoint = priv->entrypoint;
1317     info.chroma_type = priv->chroma_type;
1318     info.width      = sps->width;
1319     info.height     = sps->height;
1320     info.ref_frames = dpb_size;
1321
1322     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1323         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1324     priv->has_context = TRUE;
1325
1326     /* Reset DPB */
1327     if (!dpb_reset(decoder, dpb_size))
1328         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1329
1330     /* Reset MVC data */
1331     if (!mvc_reset(decoder))
1332         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1333     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1334 }
1335
1336 static void
1337 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1338     const GstH264SPS *sps)
1339 {
1340     guint i;
1341
1342     /* There are always 6 4x4 scaling lists */
1343     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1344     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1345
1346     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1347         gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
1348             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1349 }
1350
1351 static void
1352 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1353     const GstH264SPS *sps)
1354 {
1355     guint i, n;
1356
1357     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1358     if (!pps->transform_8x8_mode_flag)
1359         return;
1360
1361     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1362     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1363
1364     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1365     for (i = 0; i < n; i++) {
1366         gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
1367             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1368     }
1369 }
1370
1371 static GstVaapiDecoderStatus
1372 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1373 {
1374     GstVaapiPicture * const base_picture = &picture->base;
1375     GstH264PPS * const pps = get_pps(decoder);
1376     GstH264SPS * const sps = get_sps(decoder);
1377     VAIQMatrixBufferH264 *iq_matrix;
1378
1379     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1380     if (!base_picture->iq_matrix) {
1381         GST_ERROR("failed to allocate IQ matrix");
1382         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1383     }
1384     iq_matrix = base_picture->iq_matrix->param;
1385
1386     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1387        is not large enough to hold lists for 4:4:4 */
1388     if (sps->chroma_format_idc == 3)
1389         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1390
1391     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1392     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1393
1394     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1395 }
1396
1397 static inline gboolean
1398 is_valid_state(guint state, guint ref_state)
1399 {
1400     return (state & ref_state) == ref_state;
1401 }
1402
1403 static GstVaapiDecoderStatus
1404 decode_current_picture(GstVaapiDecoderH264 *decoder)
1405 {
1406     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1407     GstVaapiPictureH264 * const picture = priv->current_picture;
1408
1409     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1410         goto drop_frame;
1411     priv->decoder_state = 0;
1412
1413     if (!picture)
1414         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1415
1416     if (!exec_ref_pic_marking(decoder, picture))
1417         goto error;
1418     if (!dpb_add(decoder, picture))
1419         goto error;
1420     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1421         goto error;
1422     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1423     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1424
1425 error:
1426     /* XXX: fix for cases where first field failed to be decoded */
1427     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1428     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1429
1430 drop_frame:
1431     priv->decoder_state = 0;
1432     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1433 }
1434
1435 static GstVaapiDecoderStatus
1436 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1437 {
1438     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1439     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1440     GstH264SPS * const sps = &pi->data.sps;
1441     GstH264ParserResult result;
1442
1443     GST_DEBUG("parse SPS");
1444
1445     priv->parser_state = 0;
1446
1447     /* Variables that don't have inferred values per the H.264
1448        standard but that should get a default value anyway */
1449     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1450
1451     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1452     if (result != GST_H264_PARSER_OK)
1453         return get_status(result);
1454
1455     /* Reset defaults */
1456     priv->max_views = 1;
1457
1458     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1459     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1460 }
1461
1462 static GstVaapiDecoderStatus
1463 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1464 {
1465     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1466     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1467     GstH264SPS * const sps = &pi->data.sps;
1468     GstH264ParserResult result;
1469
1470     GST_DEBUG("parse subset SPS");
1471
1472     /* Variables that don't have inferred values per the H.264
1473        standard but that should get a default value anyway */
1474     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1475
1476     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1477         TRUE);
1478     if (result != GST_H264_PARSER_OK)
1479         return get_status(result);
1480
1481     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1482     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1483 }
1484
1485 static GstVaapiDecoderStatus
1486 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1487 {
1488     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1489     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1490     GstH264PPS * const pps = &pi->data.pps;
1491     GstH264ParserResult result;
1492
1493     GST_DEBUG("parse PPS");
1494
1495     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1496
1497     /* Variables that don't have inferred values per the H.264
1498        standard but that should get a default value anyway */
1499     pps->slice_group_map_type = 0;
1500     pps->slice_group_change_rate_minus1 = 0;
1501
1502     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1503     if (result != GST_H264_PARSER_OK)
1504         return get_status(result);
1505
1506     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1507     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1508 }
1509
1510 static GstVaapiDecoderStatus
1511 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1512 {
1513     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1514     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1515     GArray ** const sei_ptr = &pi->data.sei;
1516     GstH264ParserResult result;
1517
1518     GST_DEBUG("parse SEI");
1519
1520     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1521     if (result != GST_H264_PARSER_OK) {
1522         GST_WARNING("failed to parse SEI messages");
1523         return get_status(result);
1524     }
1525     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1526 }
1527
1528 static GstVaapiDecoderStatus
1529 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1530 {
1531     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1532     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1533     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1534     GstH264NalUnit * const nalu = &pi->nalu;
1535     GstH264SPS *sps;
1536     GstH264ParserResult result;
1537     guint num_views;
1538
1539     GST_DEBUG("parse slice");
1540
1541     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1542                            GST_H264_VIDEO_STATE_GOT_PPS);
1543
1544     /* Propagate Prefix NAL unit info, if necessary */
1545     switch (nalu->type) {
1546     case GST_H264_NAL_SLICE:
1547     case GST_H264_NAL_SLICE_IDR: {
1548         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1549         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1550             /* MVC sequences shall have a Prefix NAL unit immediately
1551                preceding this NAL unit */
1552             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1553             pi->nalu.extension = prev_pi->nalu.extension;
1554         }
1555         else {
1556             /* In the very unlikely case there is no Prefix NAL unit
1557                immediately preceding this NAL unit, try to infer some
1558                defaults (H.7.4.1.1) */
1559             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1560             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1561             nalu->idr_pic_flag = !mvc->non_idr_flag;
1562             mvc->priority_id = 0;
1563             mvc->view_id = 0;
1564             mvc->temporal_id = 0;
1565             mvc->anchor_pic_flag = 0;
1566             mvc->inter_view_flag = 1;
1567         }
1568         break;
1569     }
1570     }
1571
1572     /* Variables that don't have inferred values per the H.264
1573        standard but that should get a default value anyway */
1574     slice_hdr->cabac_init_idc = 0;
1575     slice_hdr->direct_spatial_mv_pred_flag = 0;
1576
1577     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1578         slice_hdr, TRUE, TRUE);
1579     if (result != GST_H264_PARSER_OK)
1580         return get_status(result);
1581
1582     sps = slice_hdr->pps->sequence;
1583
1584     /* Update MVC data */
1585     num_views = get_num_views(sps);
1586     if (priv->max_views < num_views) {
1587         priv->max_views = num_views;
1588         GST_DEBUG("maximum number of views changed to %u", num_views);
1589     }
1590     pi->view_id = get_view_id(&pi->nalu);
1591     pi->voc = get_view_order_index(sps, pi->view_id);
1592
1593     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1594     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1595 }
1596
1597 static GstVaapiDecoderStatus
1598 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1599 {
1600     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1601     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1602     GstH264SPS * const sps = &pi->data.sps;
1603
1604     GST_DEBUG("decode SPS");
1605
1606     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1607     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1608 }
1609
1610 static GstVaapiDecoderStatus
1611 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1612 {
1613     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1614     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1615     GstH264SPS * const sps = &pi->data.sps;
1616
1617     GST_DEBUG("decode subset SPS");
1618
1619     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1620     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1621 }
1622
1623 static GstVaapiDecoderStatus
1624 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1625 {
1626     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1627     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1628     GstH264PPS * const pps = &pi->data.pps;
1629
1630     GST_DEBUG("decode PPS");
1631
1632     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1633     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1634 }
1635
1636 static GstVaapiDecoderStatus
1637 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1638 {
1639     GstVaapiDecoderStatus status;
1640
1641     GST_DEBUG("decode sequence-end");
1642
1643     status = decode_current_picture(decoder);
1644     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1645         return status;
1646
1647     dpb_flush(decoder, NULL);
1648     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1649 }
1650
1651 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1652 static void
1653 init_picture_poc_0(
1654     GstVaapiDecoderH264 *decoder,
1655     GstVaapiPictureH264 *picture,
1656     GstH264SliceHdr     *slice_hdr
1657 )
1658 {
1659     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1660     GstH264SPS * const sps = get_sps(decoder);
1661     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1662     gint32 temp_poc;
1663
1664     GST_DEBUG("decode picture order count type 0");
1665
1666     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1667         priv->prev_poc_msb = 0;
1668         priv->prev_poc_lsb = 0;
1669     }
1670     else if (priv->prev_pic_has_mmco5) {
1671         priv->prev_poc_msb = 0;
1672         priv->prev_poc_lsb =
1673             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1674              0 : priv->field_poc[TOP_FIELD]);
1675     }
1676     else {
1677         priv->prev_poc_msb = priv->poc_msb;
1678         priv->prev_poc_lsb = priv->poc_lsb;
1679     }
1680
1681     // (8-3)
1682     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1683     if (priv->poc_lsb < priv->prev_poc_lsb &&
1684         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1685         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1686     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1687              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1688         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1689     else
1690         priv->poc_msb = priv->prev_poc_msb;
1691
1692     temp_poc = priv->poc_msb + priv->poc_lsb;
1693     switch (picture->structure) {
1694     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1695         // (8-4, 8-5)
1696         priv->field_poc[TOP_FIELD] = temp_poc;
1697         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1698             slice_hdr->delta_pic_order_cnt_bottom;
1699         break;
1700     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1701         // (8-4)
1702         priv->field_poc[TOP_FIELD] = temp_poc;
1703         break;
1704     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1705         // (8-5)
1706         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1707         break;
1708     }
1709 }
1710
1711 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1712 static void
1713 init_picture_poc_1(
1714     GstVaapiDecoderH264 *decoder,
1715     GstVaapiPictureH264 *picture,
1716     GstH264SliceHdr     *slice_hdr
1717 )
1718 {
1719     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1720     GstH264SPS * const sps = get_sps(decoder);
1721     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1722     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1723     guint i;
1724
1725     GST_DEBUG("decode picture order count type 1");
1726
1727     if (priv->prev_pic_has_mmco5)
1728         prev_frame_num_offset = 0;
1729     else
1730         prev_frame_num_offset = priv->frame_num_offset;
1731
1732     // (8-6)
1733     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1734         priv->frame_num_offset = 0;
1735     else if (priv->prev_frame_num > priv->frame_num)
1736         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1737     else
1738         priv->frame_num_offset = prev_frame_num_offset;
1739
1740     // (8-7)
1741     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1742         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1743     else
1744         abs_frame_num = 0;
1745     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1746         abs_frame_num = abs_frame_num - 1;
1747
1748     if (abs_frame_num > 0) {
1749         gint32 expected_delta_per_poc_cycle;
1750         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1751
1752         expected_delta_per_poc_cycle = 0;
1753         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1754             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1755
1756         // (8-8)
1757         poc_cycle_cnt = (abs_frame_num - 1) /
1758             sps->num_ref_frames_in_pic_order_cnt_cycle;
1759         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1760             sps->num_ref_frames_in_pic_order_cnt_cycle;
1761
1762         // (8-9)
1763         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1764         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1765             expected_poc += sps->offset_for_ref_frame[i];
1766     }
1767     else
1768         expected_poc = 0;
1769     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1770         expected_poc += sps->offset_for_non_ref_pic;
1771
1772     // (8-10)
1773     switch (picture->structure) {
1774     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1775         priv->field_poc[TOP_FIELD] = expected_poc +
1776             slice_hdr->delta_pic_order_cnt[0];
1777         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1778             sps->offset_for_top_to_bottom_field +
1779             slice_hdr->delta_pic_order_cnt[1];
1780         break;
1781     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1782         priv->field_poc[TOP_FIELD] = expected_poc +
1783             slice_hdr->delta_pic_order_cnt[0];
1784         break;
1785     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1786         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1787             sps->offset_for_top_to_bottom_field +
1788             slice_hdr->delta_pic_order_cnt[0];
1789         break;
1790     }
1791 }
1792
1793 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1794 static void
1795 init_picture_poc_2(
1796     GstVaapiDecoderH264 *decoder,
1797     GstVaapiPictureH264 *picture,
1798     GstH264SliceHdr     *slice_hdr
1799 )
1800 {
1801     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1802     GstH264SPS * const sps = get_sps(decoder);
1803     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1804     gint32 prev_frame_num_offset, temp_poc;
1805
1806     GST_DEBUG("decode picture order count type 2");
1807
1808     if (priv->prev_pic_has_mmco5)
1809         prev_frame_num_offset = 0;
1810     else
1811         prev_frame_num_offset = priv->frame_num_offset;
1812
1813     // (8-11)
1814     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1815         priv->frame_num_offset = 0;
1816     else if (priv->prev_frame_num > priv->frame_num)
1817         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1818     else
1819         priv->frame_num_offset = prev_frame_num_offset;
1820
1821     // (8-12)
1822     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1823         temp_poc = 0;
1824     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1825         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1826     else
1827         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1828
1829     // (8-13)
1830     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1831         priv->field_poc[TOP_FIELD] = temp_poc;
1832     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1833         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1834 }
1835
1836 /* 8.2.1 - Decoding process for picture order count */
1837 static void
1838 init_picture_poc(
1839     GstVaapiDecoderH264 *decoder,
1840     GstVaapiPictureH264 *picture,
1841     GstH264SliceHdr     *slice_hdr
1842 )
1843 {
1844     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1845     GstH264SPS * const sps = get_sps(decoder);
1846
1847     switch (sps->pic_order_cnt_type) {
1848     case 0:
1849         init_picture_poc_0(decoder, picture, slice_hdr);
1850         break;
1851     case 1:
1852         init_picture_poc_1(decoder, picture, slice_hdr);
1853         break;
1854     case 2:
1855         init_picture_poc_2(decoder, picture, slice_hdr);
1856         break;
1857     }
1858
1859     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1860         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1861     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1862         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1863     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1864 }
1865
1866 static int
1867 compare_picture_pic_num_dec(const void *a, const void *b)
1868 {
1869     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1870     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1871
1872     return picB->pic_num - picA->pic_num;
1873 }
1874
1875 static int
1876 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1877 {
1878     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1879     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1880
1881     return picA->long_term_pic_num - picB->long_term_pic_num;
1882 }
1883
1884 static int
1885 compare_picture_poc_dec(const void *a, const void *b)
1886 {
1887     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1888     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1889
1890     return picB->base.poc - picA->base.poc;
1891 }
1892
1893 static int
1894 compare_picture_poc_inc(const void *a, const void *b)
1895 {
1896     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1897     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1898
1899     return picA->base.poc - picB->base.poc;
1900 }
1901
1902 static int
1903 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1904 {
1905     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1906     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1907
1908     return picB->frame_num_wrap - picA->frame_num_wrap;
1909 }
1910
1911 static int
1912 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1913 {
1914     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1915     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1916
1917     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1918 }
1919
1920 /* 8.2.4.1 - Decoding process for picture numbers */
1921 static void
1922 init_picture_refs_pic_num(
1923     GstVaapiDecoderH264 *decoder,
1924     GstVaapiPictureH264 *picture,
1925     GstH264SliceHdr     *slice_hdr
1926 )
1927 {
1928     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1929     GstH264SPS * const sps = get_sps(decoder);
1930     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1931     guint i;
1932
1933     GST_DEBUG("decode picture numbers");
1934
1935     for (i = 0; i < priv->short_ref_count; i++) {
1936         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1937
1938         // (H.8.2)
1939         if (pic->base.view_id != picture->base.view_id)
1940             continue;
1941
1942         // (8-27)
1943         if (pic->frame_num > priv->frame_num)
1944             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1945         else
1946             pic->frame_num_wrap = pic->frame_num;
1947
1948         // (8-28, 8-30, 8-31)
1949         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1950             pic->pic_num = pic->frame_num_wrap;
1951         else {
1952             if (pic->structure == picture->structure)
1953                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1954             else
1955                 pic->pic_num = 2 * pic->frame_num_wrap;
1956         }
1957     }
1958
1959     for (i = 0; i < priv->long_ref_count; i++) {
1960         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1961
1962         // (H.8.2)
1963         if (pic->base.view_id != picture->base.view_id)
1964             continue;
1965
1966         // (8-29, 8-32, 8-33)
1967         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1968             pic->long_term_pic_num = pic->long_term_frame_idx;
1969         else {
1970             if (pic->structure == picture->structure)
1971                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
1972             else
1973                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
1974         }
1975     }
1976 }
1977
1978 #define SORT_REF_LIST(list, n, compare_func) \
1979     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
1980
1981 static void
1982 init_picture_refs_fields_1(
1983     guint                picture_structure,
1984     GstVaapiPictureH264 *RefPicList[32],
1985     guint               *RefPicList_count,
1986     GstVaapiPictureH264 *ref_list[32],
1987     guint                ref_list_count
1988 )
1989 {
1990     guint i, j, n;
1991
1992     i = 0;
1993     j = 0;
1994     n = *RefPicList_count;
1995     do {
1996         g_assert(n < 32);
1997         for (; i < ref_list_count; i++) {
1998             if (ref_list[i]->structure == picture_structure) {
1999                 RefPicList[n++] = ref_list[i++];
2000                 break;
2001             }
2002         }
2003         for (; j < ref_list_count; j++) {
2004             if (ref_list[j]->structure != picture_structure) {
2005                 RefPicList[n++] = ref_list[j++];
2006                 break;
2007             }
2008         }
2009     } while (i < ref_list_count || j < ref_list_count);
2010     *RefPicList_count = n;
2011 }
2012
2013 static inline void
2014 init_picture_refs_fields(
2015     GstVaapiPictureH264 *picture,
2016     GstVaapiPictureH264 *RefPicList[32],
2017     guint               *RefPicList_count,
2018     GstVaapiPictureH264 *short_ref[32],
2019     guint                short_ref_count,
2020     GstVaapiPictureH264 *long_ref[32],
2021     guint                long_ref_count
2022 )
2023 {
2024     guint n = 0;
2025
2026     /* 8.2.4.2.5 - reference picture lists in fields */
2027     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2028         short_ref, short_ref_count);
2029     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2030         long_ref, long_ref_count);
2031     *RefPicList_count = n;
2032 }
2033
2034 /* Finds the inter-view reference picture with the supplied view id */
2035 static GstVaapiPictureH264 *
2036 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2037 {
2038     GPtrArray * const inter_views = decoder->priv.inter_views;
2039     guint i;
2040
2041     for (i = 0; i < inter_views->len; i++) {
2042         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2043         if (picture->base.view_id == view_id)
2044             return picture;
2045     }
2046
2047     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2048         view_id);
2049     return NULL;
2050 }
2051
2052 /* Checks whether the view id exists in the supplied list of view ids */
2053 static gboolean
2054 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2055 {
2056     guint i;
2057
2058     for (i = 0; i < num_view_ids; i++) {
2059         if (view_ids[i] == view_id)
2060             return TRUE;
2061     }
2062     return FALSE;
2063 }
2064
2065 /* Checks whether the inter-view reference picture with the supplied
2066    view id is used for decoding the current view component picture */
2067 static gboolean
2068 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2069     guint16 view_id, GstVaapiPictureH264 *picture)
2070 {
2071     const GstH264SPS * const sps = get_sps(decoder);
2072     const GstH264SPSExtMVCView *view;
2073
2074     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2075         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2076         return FALSE;
2077
2078     view = &sps->extension.mvc.view[picture->base.voc];
2079     if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2080         return (find_view_id(view_id, view->anchor_ref_l0,
2081                     view->num_anchor_refs_l0) ||
2082                 find_view_id(view_id, view->anchor_ref_l1,
2083                     view->num_anchor_refs_l1));
2084
2085     return (find_view_id(view_id, view->non_anchor_ref_l0,
2086                 view->num_non_anchor_refs_l0) ||
2087             find_view_id(view_id, view->non_anchor_ref_l1,
2088                 view->num_non_anchor_refs_l1));
2089 }
2090
2091 /* H.8.2.1 - Initialization process for inter-view prediction references */
2092 static void
2093 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2094     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2095     const guint16 *view_ids, guint num_view_ids)
2096 {
2097     guint j, n;
2098
2099     n = *ref_list_count_ptr;
2100     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2101         GstVaapiPictureH264 * const pic =
2102             find_inter_view_reference(decoder, view_ids[j]);
2103         if (pic)
2104             ref_list[n++] = pic;
2105     }
2106     *ref_list_count_ptr = n;
2107 }
2108
2109 static inline void
2110 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2111     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2112 {
2113     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2114     const GstH264SPS * const sps = get_sps(decoder);
2115     const GstH264SPSExtMVCView *view;
2116
2117     GST_DEBUG("initialize reference picture list for inter-view prediction");
2118
2119     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2120         return;
2121     view = &sps->extension.mvc.view[picture->base.voc];
2122
2123 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2124         init_picture_refs_mvc_1(decoder,                                \
2125             priv->RefPicList##ref_list,                                 \
2126             &priv->RefPicList##ref_list##_count,                        \
2127             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2128             view->view_list##_l##ref_list,                              \
2129             view->num_##view_list##s_l##ref_list);                      \
2130     } while (0)
2131
2132     if (list == 0) {
2133         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2134             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2135         else
2136             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2137     }
2138     else {
2139         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2140             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2141         else
2142             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2143     }
2144
2145 #undef INVOKE_INIT_PICTURE_REFS_MVC
2146 }
2147
2148 static void
2149 init_picture_refs_p_slice(
2150     GstVaapiDecoderH264 *decoder,
2151     GstVaapiPictureH264 *picture,
2152     GstH264SliceHdr     *slice_hdr
2153 )
2154 {
2155     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2156     GstVaapiPictureH264 **ref_list;
2157     guint i;
2158
2159     GST_DEBUG("decode reference picture list for P and SP slices");
2160
2161     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2162         /* 8.2.4.2.1 - P and SP slices in frames */
2163         if (priv->short_ref_count > 0) {
2164             ref_list = priv->RefPicList0;
2165             for (i = 0; i < priv->short_ref_count; i++)
2166                 ref_list[i] = priv->short_ref[i];
2167             SORT_REF_LIST(ref_list, i, pic_num_dec);
2168             priv->RefPicList0_count += i;
2169         }
2170
2171         if (priv->long_ref_count > 0) {
2172             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2173             for (i = 0; i < priv->long_ref_count; i++)
2174                 ref_list[i] = priv->long_ref[i];
2175             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2176             priv->RefPicList0_count += i;
2177         }
2178     }
2179     else {
2180         /* 8.2.4.2.2 - P and SP slices in fields */
2181         GstVaapiPictureH264 *short_ref[32];
2182         guint short_ref_count = 0;
2183         GstVaapiPictureH264 *long_ref[32];
2184         guint long_ref_count = 0;
2185
2186         if (priv->short_ref_count > 0) {
2187             for (i = 0; i < priv->short_ref_count; i++)
2188                 short_ref[i] = priv->short_ref[i];
2189             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2190             short_ref_count = i;
2191         }
2192
2193         if (priv->long_ref_count > 0) {
2194             for (i = 0; i < priv->long_ref_count; i++)
2195                 long_ref[i] = priv->long_ref[i];
2196             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2197             long_ref_count = i;
2198         }
2199
2200         init_picture_refs_fields(
2201             picture,
2202             priv->RefPicList0, &priv->RefPicList0_count,
2203             short_ref,          short_ref_count,
2204             long_ref,           long_ref_count
2205         );
2206     }
2207
2208     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2209         /* RefPicList0 */
2210         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2211     }
2212 }
2213
2214 static void
2215 init_picture_refs_b_slice(
2216     GstVaapiDecoderH264 *decoder,
2217     GstVaapiPictureH264 *picture,
2218     GstH264SliceHdr     *slice_hdr
2219 )
2220 {
2221     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2222     GstVaapiPictureH264 **ref_list;
2223     guint i, n;
2224
2225     GST_DEBUG("decode reference picture list for B slices");
2226
2227     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2228         /* 8.2.4.2.3 - B slices in frames */
2229
2230         /* RefPicList0 */
2231         if (priv->short_ref_count > 0) {
2232             // 1. Short-term references
2233             ref_list = priv->RefPicList0;
2234             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2235                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2236                     ref_list[n++] = priv->short_ref[i];
2237             }
2238             SORT_REF_LIST(ref_list, n, poc_dec);
2239             priv->RefPicList0_count += n;
2240
2241             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2242             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2243                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2244                     ref_list[n++] = priv->short_ref[i];
2245             }
2246             SORT_REF_LIST(ref_list, n, poc_inc);
2247             priv->RefPicList0_count += n;
2248         }
2249
2250         if (priv->long_ref_count > 0) {
2251             // 2. Long-term references
2252             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2253             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2254                 ref_list[n++] = priv->long_ref[i];
2255             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2256             priv->RefPicList0_count += n;
2257         }
2258
2259         /* RefPicList1 */
2260         if (priv->short_ref_count > 0) {
2261             // 1. Short-term references
2262             ref_list = priv->RefPicList1;
2263             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2264                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2265                     ref_list[n++] = priv->short_ref[i];
2266             }
2267             SORT_REF_LIST(ref_list, n, poc_inc);
2268             priv->RefPicList1_count += n;
2269
2270             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2271             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2272                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2273                     ref_list[n++] = priv->short_ref[i];
2274             }
2275             SORT_REF_LIST(ref_list, n, poc_dec);
2276             priv->RefPicList1_count += n;
2277         }
2278
2279         if (priv->long_ref_count > 0) {
2280             // 2. Long-term references
2281             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2282             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2283                 ref_list[n++] = priv->long_ref[i];
2284             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2285             priv->RefPicList1_count += n;
2286         }
2287     }
2288     else {
2289         /* 8.2.4.2.4 - B slices in fields */
2290         GstVaapiPictureH264 *short_ref0[32];
2291         guint short_ref0_count = 0;
2292         GstVaapiPictureH264 *short_ref1[32];
2293         guint short_ref1_count = 0;
2294         GstVaapiPictureH264 *long_ref[32];
2295         guint long_ref_count = 0;
2296
2297         /* refFrameList0ShortTerm */
2298         if (priv->short_ref_count > 0) {
2299             ref_list = short_ref0;
2300             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2301                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2302                     ref_list[n++] = priv->short_ref[i];
2303             }
2304             SORT_REF_LIST(ref_list, n, poc_dec);
2305             short_ref0_count += n;
2306
2307             ref_list = &short_ref0[short_ref0_count];
2308             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2309                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2310                     ref_list[n++] = priv->short_ref[i];
2311             }
2312             SORT_REF_LIST(ref_list, n, poc_inc);
2313             short_ref0_count += n;
2314         }
2315
2316         /* refFrameList1ShortTerm */
2317         if (priv->short_ref_count > 0) {
2318             ref_list = short_ref1;
2319             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2320                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2321                     ref_list[n++] = priv->short_ref[i];
2322             }
2323             SORT_REF_LIST(ref_list, n, poc_inc);
2324             short_ref1_count += n;
2325
2326             ref_list = &short_ref1[short_ref1_count];
2327             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2328                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2329                     ref_list[n++] = priv->short_ref[i];
2330             }
2331             SORT_REF_LIST(ref_list, n, poc_dec);
2332             short_ref1_count += n;
2333         }
2334
2335         /* refFrameListLongTerm */
2336         if (priv->long_ref_count > 0) {
2337             for (i = 0; i < priv->long_ref_count; i++)
2338                 long_ref[i] = priv->long_ref[i];
2339             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2340             long_ref_count = i;
2341         }
2342
2343         init_picture_refs_fields(
2344             picture,
2345             priv->RefPicList0, &priv->RefPicList0_count,
2346             short_ref0,         short_ref0_count,
2347             long_ref,           long_ref_count
2348         );
2349
2350         init_picture_refs_fields(
2351             picture,
2352             priv->RefPicList1, &priv->RefPicList1_count,
2353             short_ref1,         short_ref1_count,
2354             long_ref,           long_ref_count
2355         );
2356    }
2357
2358     /* Check whether RefPicList1 is identical to RefPicList0, then
2359        swap if necessary */
2360     if (priv->RefPicList1_count > 1 &&
2361         priv->RefPicList1_count == priv->RefPicList0_count &&
2362         memcmp(priv->RefPicList0, priv->RefPicList1,
2363                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2364         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2365         priv->RefPicList1[0] = priv->RefPicList1[1];
2366         priv->RefPicList1[1] = tmp;
2367     }
2368
2369     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2370         /* RefPicList0 */
2371         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2372
2373         /* RefPicList1 */
2374         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2375     }
2376 }
2377
2378 #undef SORT_REF_LIST
2379
2380 static gint
2381 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2382 {
2383     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2384     guint i;
2385
2386     for (i = 0; i < priv->short_ref_count; i++) {
2387         if (priv->short_ref[i]->pic_num == pic_num)
2388             return i;
2389     }
2390     GST_ERROR("found no short-term reference picture with PicNum = %d",
2391               pic_num);
2392     return -1;
2393 }
2394
2395 static gint
2396 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2397 {
2398     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2399     guint i;
2400
2401     for (i = 0; i < priv->long_ref_count; i++) {
2402         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2403             return i;
2404     }
2405     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2406               long_term_pic_num);
2407     return -1;
2408 }
2409
2410 static void
2411 exec_picture_refs_modification_1(
2412     GstVaapiDecoderH264           *decoder,
2413     GstVaapiPictureH264           *picture,
2414     GstH264SliceHdr               *slice_hdr,
2415     guint                          list
2416 )
2417 {
2418     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2419     GstH264SPS * const sps = get_sps(decoder);
2420     GstH264RefPicListModification *ref_pic_list_modification;
2421     guint num_ref_pic_list_modifications;
2422     GstVaapiPictureH264 **ref_list;
2423     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2424     const guint16 *view_ids = NULL;
2425     guint i, j, n, num_refs, num_view_ids = 0;
2426     gint found_ref_idx;
2427     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2428
2429     GST_DEBUG("modification process of reference picture list %u", list);
2430
2431     if (list == 0) {
2432         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2433         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2434         ref_list                       = priv->RefPicList0;
2435         ref_list_count_ptr             = &priv->RefPicList0_count;
2436         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2437
2438         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2439             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2440             const GstH264SPSExtMVCView * const view =
2441                 &sps->extension.mvc.view[picture->base.voc];
2442             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2443                 view_ids = view->anchor_ref_l0;
2444                 num_view_ids = view->num_anchor_refs_l0;
2445             }
2446             else {
2447                 view_ids = view->non_anchor_ref_l0;
2448                 num_view_ids = view->num_non_anchor_refs_l0;
2449             }
2450         }
2451     }
2452     else {
2453         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2454         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2455         ref_list                       = priv->RefPicList1;
2456         ref_list_count_ptr             = &priv->RefPicList1_count;
2457         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2458
2459         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2460             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2461             const GstH264SPSExtMVCView * const view =
2462                 &sps->extension.mvc.view[picture->base.voc];
2463             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2464                 view_ids = view->anchor_ref_l1;
2465                 num_view_ids = view->num_anchor_refs_l1;
2466             }
2467             else {
2468                 view_ids = view->non_anchor_ref_l1;
2469                 num_view_ids = view->num_non_anchor_refs_l1;
2470             }
2471         }
2472     }
2473     ref_list_count = *ref_list_count_ptr;
2474
2475     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2476         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2477         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2478     }
2479     else {
2480         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2481         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2482     }
2483
2484     picNumPred = CurrPicNum;
2485     picViewIdxPred = -1;
2486
2487     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2488         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2489         if (l->modification_of_pic_nums_idc == 3)
2490             break;
2491
2492         /* 8.2.4.3.1 - Short-term reference pictures */
2493         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2494             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2495             gint32 picNum, picNumNoWrap;
2496
2497             // (8-34)
2498             if (l->modification_of_pic_nums_idc == 0) {
2499                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2500                 if (picNumNoWrap < 0)
2501                     picNumNoWrap += MaxPicNum;
2502             }
2503
2504             // (8-35)
2505             else {
2506                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2507                 if (picNumNoWrap >= MaxPicNum)
2508                     picNumNoWrap -= MaxPicNum;
2509             }
2510             picNumPred = picNumNoWrap;
2511
2512             // (8-36)
2513             picNum = picNumNoWrap;
2514             if (picNum > CurrPicNum)
2515                 picNum -= MaxPicNum;
2516
2517             // (8-37)
2518             for (j = num_refs; j > ref_list_idx; j--)
2519                 ref_list[j] = ref_list[j - 1];
2520             found_ref_idx = find_short_term_reference(decoder, picNum);
2521             ref_list[ref_list_idx++] =
2522                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2523             n = ref_list_idx;
2524             for (j = ref_list_idx; j <= num_refs; j++) {
2525                 gint32 PicNumF;
2526                 if (!ref_list[j])
2527                     continue;
2528                 PicNumF =
2529                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2530                     ref_list[j]->pic_num : MaxPicNum;
2531                 if (PicNumF != picNum ||
2532                     ref_list[j]->base.view_id != picture->base.view_id)
2533                     ref_list[n++] = ref_list[j];
2534             }
2535         }
2536
2537         /* 8.2.4.3.2 - Long-term reference pictures */
2538         else if (l->modification_of_pic_nums_idc == 2) {
2539
2540             for (j = num_refs; j > ref_list_idx; j--)
2541                 ref_list[j] = ref_list[j - 1];
2542             found_ref_idx =
2543                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2544             ref_list[ref_list_idx++] =
2545                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2546             n = ref_list_idx;
2547             for (j = ref_list_idx; j <= num_refs; j++) {
2548                 gint32 LongTermPicNumF;
2549                 if (!ref_list[j])
2550                     continue;
2551                 LongTermPicNumF =
2552                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2553                     ref_list[j]->long_term_pic_num : INT_MAX;
2554                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2555                     ref_list[j]->base.view_id != picture->base.view_id)
2556                     ref_list[n++] = ref_list[j];
2557             }
2558         }
2559
2560         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2561         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2562                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2563                  (l->modification_of_pic_nums_idc == 4 ||
2564                   l->modification_of_pic_nums_idc == 5)) {
2565             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2566             gint32 picViewIdx, targetViewId;
2567
2568             // (H-6)
2569             if (l->modification_of_pic_nums_idc == 4) {
2570                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2571                 if (picViewIdx < 0)
2572                     picViewIdx += num_view_ids;
2573             }
2574
2575             // (H-7)
2576             else {
2577                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2578                 if (picViewIdx >= num_view_ids)
2579                     picViewIdx -= num_view_ids;
2580             }
2581             picViewIdxPred = picViewIdx;
2582
2583             // (H-8, H-9)
2584             targetViewId = view_ids[picViewIdx];
2585
2586             // (H-10)
2587             for (j = num_refs; j > ref_list_idx; j--)
2588                 ref_list[j] = ref_list[j - 1];
2589             ref_list[ref_list_idx++] =
2590                 find_inter_view_reference(decoder, targetViewId);
2591             n = ref_list_idx;
2592             for (j = ref_list_idx; j <= num_refs; j++) {
2593                 if (!ref_list[j])
2594                     continue;
2595                 if (ref_list[j]->base.view_id != targetViewId ||
2596                     ref_list[j]->base.poc != picture->base.poc)
2597                     ref_list[n++] = ref_list[j];
2598             }
2599         }
2600     }
2601
2602 #if DEBUG
2603     for (i = 0; i < num_refs; i++)
2604         if (!ref_list[i])
2605             GST_ERROR("list %u entry %u is empty", list, i);
2606 #endif
2607     *ref_list_count_ptr = num_refs;
2608 }
2609
2610 /* 8.2.4.3 - Modification process for reference picture lists */
2611 static void
2612 exec_picture_refs_modification(
2613     GstVaapiDecoderH264 *decoder,
2614     GstVaapiPictureH264 *picture,
2615     GstH264SliceHdr     *slice_hdr
2616 )
2617 {
2618     GST_DEBUG("execute ref_pic_list_modification()");
2619
2620     /* RefPicList0 */
2621     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2622         slice_hdr->ref_pic_list_modification_flag_l0)
2623         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2624
2625     /* RefPicList1 */
2626     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2627         slice_hdr->ref_pic_list_modification_flag_l1)
2628         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2629 }
2630
2631 static void
2632 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2633     GstVaapiPictureH264 *picture)
2634 {
2635     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2636     guint i, j, short_ref_count, long_ref_count;
2637
2638     short_ref_count = 0;
2639     long_ref_count  = 0;
2640     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2641         for (i = 0; i < priv->dpb_count; i++) {
2642             GstVaapiFrameStore * const fs = priv->dpb[i];
2643             GstVaapiPictureH264 *pic;
2644             if (!gst_vaapi_frame_store_has_frame(fs))
2645                 continue;
2646             pic = fs->buffers[0];
2647             if (pic->base.view_id != picture->base.view_id)
2648                 continue;
2649             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2650                 priv->short_ref[short_ref_count++] = pic;
2651             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2652                 priv->long_ref[long_ref_count++] = pic;
2653             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2654             pic->other_field = fs->buffers[1];
2655         }
2656     }
2657     else {
2658         for (i = 0; i < priv->dpb_count; i++) {
2659             GstVaapiFrameStore * const fs = priv->dpb[i];
2660             for (j = 0; j < fs->num_buffers; j++) {
2661                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2662                 if (pic->base.view_id != picture->base.view_id)
2663                     continue;
2664                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2665                     priv->short_ref[short_ref_count++] = pic;
2666                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2667                     priv->long_ref[long_ref_count++] = pic;
2668                 pic->structure = pic->base.structure;
2669                 pic->other_field = fs->buffers[j ^ 1];
2670             }
2671         }
2672     }
2673
2674     for (i = short_ref_count; i < priv->short_ref_count; i++)
2675         priv->short_ref[i] = NULL;
2676     priv->short_ref_count = short_ref_count;
2677
2678     for (i = long_ref_count; i < priv->long_ref_count; i++)
2679         priv->long_ref[i] = NULL;
2680     priv->long_ref_count = long_ref_count;
2681 }
2682
2683 static void
2684 init_picture_refs(
2685     GstVaapiDecoderH264 *decoder,
2686     GstVaapiPictureH264 *picture,
2687     GstH264SliceHdr     *slice_hdr
2688 )
2689 {
2690     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2691     guint i, num_refs;
2692
2693     init_picture_ref_lists(decoder, picture);
2694     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2695
2696     priv->RefPicList0_count = 0;
2697     priv->RefPicList1_count = 0;
2698
2699     switch (slice_hdr->type % 5) {
2700     case GST_H264_P_SLICE:
2701     case GST_H264_SP_SLICE:
2702         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2703         break;
2704     case GST_H264_B_SLICE:
2705         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2706         break;
2707     default:
2708         break;
2709     }
2710
2711     exec_picture_refs_modification(decoder, picture, slice_hdr);
2712
2713     switch (slice_hdr->type % 5) {
2714     case GST_H264_B_SLICE:
2715         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2716         for (i = priv->RefPicList1_count; i < num_refs; i++)
2717             priv->RefPicList1[i] = NULL;
2718         priv->RefPicList1_count = num_refs;
2719
2720         // fall-through
2721     case GST_H264_P_SLICE:
2722     case GST_H264_SP_SLICE:
2723         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2724         for (i = priv->RefPicList0_count; i < num_refs; i++)
2725             priv->RefPicList0[i] = NULL;
2726         priv->RefPicList0_count = num_refs;
2727         break;
2728     default:
2729         break;
2730     }
2731 }
2732
2733 static gboolean
2734 init_picture(
2735     GstVaapiDecoderH264 *decoder,
2736     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2737 {
2738     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2739     GstVaapiPicture * const base_picture = &picture->base;
2740     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2741
2742     priv->prev_frame_num        = priv->frame_num;
2743     priv->frame_num             = slice_hdr->frame_num;
2744     picture->frame_num          = priv->frame_num;
2745     picture->frame_num_wrap     = priv->frame_num;
2746     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2747     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2748     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2749     base_picture->view_id       = pi->view_id;
2750     base_picture->voc           = pi->voc;
2751
2752     /* Initialize extensions */
2753     switch (pi->nalu.extension_type) {
2754     case GST_H264_NAL_EXTENSION_MVC: {
2755         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2756
2757         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2758         if (mvc->inter_view_flag)
2759             GST_VAAPI_PICTURE_FLAG_SET(picture,
2760                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2761         if (mvc->anchor_pic_flag)
2762             GST_VAAPI_PICTURE_FLAG_SET(picture,
2763                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2764         break;
2765     }
2766     }
2767
2768     /* Reset decoder state for IDR pictures */
2769     if (pi->nalu.idr_pic_flag) {
2770         GST_DEBUG("<IDR>");
2771         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2772         dpb_flush(decoder, picture);
2773     }
2774
2775     /* Initialize picture structure */
2776     if (!slice_hdr->field_pic_flag)
2777         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2778     else {
2779         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2780         if (!slice_hdr->bottom_field_flag)
2781             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2782         else
2783             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2784     }
2785     picture->structure = base_picture->structure;
2786
2787     /* Initialize reference flags */
2788     if (pi->nalu.ref_idc) {
2789         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2790             &slice_hdr->dec_ref_pic_marking;
2791
2792         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2793             dec_ref_pic_marking->long_term_reference_flag)
2794             GST_VAAPI_PICTURE_FLAG_SET(picture,
2795                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2796         else
2797             GST_VAAPI_PICTURE_FLAG_SET(picture,
2798                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2799     }
2800
2801     init_picture_poc(decoder, picture, slice_hdr);
2802     return TRUE;
2803 }
2804
2805 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2806 static gboolean
2807 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2808 {
2809     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2810     GstH264SPS * const sps = get_sps(decoder);
2811     GstVaapiPictureH264 *ref_picture;
2812     guint i, m, max_num_ref_frames;
2813
2814     GST_DEBUG("reference picture marking process (sliding window)");
2815
2816     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2817         return TRUE;
2818
2819     max_num_ref_frames = sps->num_ref_frames;
2820     if (max_num_ref_frames == 0)
2821         max_num_ref_frames = 1;
2822     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2823         max_num_ref_frames <<= 1;
2824
2825     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2826         return TRUE;
2827     if (priv->short_ref_count < 1)
2828         return FALSE;
2829
2830     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2831         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2832         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2833             m = i;
2834     }
2835
2836     ref_picture = priv->short_ref[m];
2837     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2838     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2839
2840     /* Both fields need to be marked as "unused for reference", so
2841        remove the other field from the short_ref[] list as well */
2842     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2843         for (i = 0; i < priv->short_ref_count; i++) {
2844             if (priv->short_ref[i] == ref_picture->other_field) {
2845                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2846                 break;
2847             }
2848         }
2849     }
2850     return TRUE;
2851 }
2852
2853 static inline gint32
2854 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2855 {
2856     gint32 pic_num;
2857
2858     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2859         pic_num = picture->frame_num_wrap;
2860     else
2861         pic_num = 2 * picture->frame_num_wrap + 1;
2862     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2863     return pic_num;
2864 }
2865
2866 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2867 static void
2868 exec_ref_pic_marking_adaptive_mmco_1(
2869     GstVaapiDecoderH264  *decoder,
2870     GstVaapiPictureH264  *picture,
2871     GstH264RefPicMarking *ref_pic_marking
2872 )
2873 {
2874     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2875     gint32 i, picNumX;
2876
2877     picNumX = get_picNumX(picture, ref_pic_marking);
2878     i = find_short_term_reference(decoder, picNumX);
2879     if (i < 0)
2880         return;
2881
2882     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2883         GST_VAAPI_PICTURE_IS_FRAME(picture));
2884     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2885 }
2886
2887 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2888 static void
2889 exec_ref_pic_marking_adaptive_mmco_2(
2890     GstVaapiDecoderH264  *decoder,
2891     GstVaapiPictureH264  *picture,
2892     GstH264RefPicMarking *ref_pic_marking
2893 )
2894 {
2895     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2896     gint32 i;
2897
2898     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2899     if (i < 0)
2900         return;
2901
2902     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2903         GST_VAAPI_PICTURE_IS_FRAME(picture));
2904     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2905 }
2906
2907 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2908 static void
2909 exec_ref_pic_marking_adaptive_mmco_3(
2910     GstVaapiDecoderH264  *decoder,
2911     GstVaapiPictureH264  *picture,
2912     GstH264RefPicMarking *ref_pic_marking
2913 )
2914 {
2915     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2916     GstVaapiPictureH264 *ref_picture, *other_field;
2917     gint32 i, picNumX;
2918
2919     for (i = 0; i < priv->long_ref_count; i++) {
2920         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2921             break;
2922     }
2923     if (i != priv->long_ref_count) {
2924         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2925         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2926     }
2927
2928     picNumX = get_picNumX(picture, ref_pic_marking);
2929     i = find_short_term_reference(decoder, picNumX);
2930     if (i < 0)
2931         return;
2932
2933     ref_picture = priv->short_ref[i];
2934     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2935     priv->long_ref[priv->long_ref_count++] = ref_picture;
2936
2937     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2938     gst_vaapi_picture_h264_set_reference(ref_picture,
2939         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
2940         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
2941
2942     /* Assign LongTermFrameIdx to the other field if it was also
2943        marked as "used for long-term reference */
2944     other_field = ref_picture->other_field;
2945     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
2946         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
2947 }
2948
2949 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
2950  * as "unused for reference" */
2951 static void
2952 exec_ref_pic_marking_adaptive_mmco_4(
2953     GstVaapiDecoderH264  *decoder,
2954     GstVaapiPictureH264  *picture,
2955     GstH264RefPicMarking *ref_pic_marking
2956 )
2957 {
2958     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2959     gint32 i, long_term_frame_idx;
2960
2961     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
2962
2963     for (i = 0; i < priv->long_ref_count; i++) {
2964         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
2965             continue;
2966         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
2967         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2968         i--;
2969     }
2970 }
2971
2972 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
2973 static void
2974 exec_ref_pic_marking_adaptive_mmco_5(
2975     GstVaapiDecoderH264  *decoder,
2976     GstVaapiPictureH264  *picture,
2977     GstH264RefPicMarking *ref_pic_marking
2978 )
2979 {
2980     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2981
2982     dpb_flush(decoder, picture);
2983
2984     priv->prev_pic_has_mmco5 = TRUE;
2985
2986     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
2987     priv->frame_num = 0;
2988     priv->frame_num_offset = 0;
2989     picture->frame_num = 0;
2990
2991     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
2992     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
2993         picture->field_poc[TOP_FIELD] -= picture->base.poc;
2994     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
2995         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
2996     picture->base.poc = 0;
2997 }
2998
2999 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3000 static void
3001 exec_ref_pic_marking_adaptive_mmco_6(
3002     GstVaapiDecoderH264  *decoder,
3003     GstVaapiPictureH264  *picture,
3004     GstH264RefPicMarking *ref_pic_marking
3005 )
3006 {
3007     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3008     GstVaapiPictureH264 *other_field;
3009     guint i;
3010
3011     for (i = 0; i < priv->long_ref_count; i++) {
3012         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3013             break;
3014     }
3015     if (i != priv->long_ref_count) {
3016         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3017         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3018     }
3019
3020     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3021     gst_vaapi_picture_h264_set_reference(picture,
3022         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3023         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3024
3025     /* Assign LongTermFrameIdx to the other field if it was also
3026        marked as "used for long-term reference */
3027     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3028     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3029         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3030 }
3031
3032 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3033 static gboolean
3034 exec_ref_pic_marking_adaptive(
3035     GstVaapiDecoderH264     *decoder,
3036     GstVaapiPictureH264     *picture,
3037     GstH264DecRefPicMarking *dec_ref_pic_marking
3038 )
3039 {
3040     guint i;
3041
3042     GST_DEBUG("reference picture marking process (adaptive memory control)");
3043
3044     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3045         GstVaapiDecoderH264  *decoder,
3046         GstVaapiPictureH264  *picture,
3047         GstH264RefPicMarking *ref_pic_marking
3048     );
3049
3050     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3051         NULL,
3052         exec_ref_pic_marking_adaptive_mmco_1,
3053         exec_ref_pic_marking_adaptive_mmco_2,
3054         exec_ref_pic_marking_adaptive_mmco_3,
3055         exec_ref_pic_marking_adaptive_mmco_4,
3056         exec_ref_pic_marking_adaptive_mmco_5,
3057         exec_ref_pic_marking_adaptive_mmco_6,
3058     };
3059
3060     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3061         GstH264RefPicMarking * const ref_pic_marking =
3062             &dec_ref_pic_marking->ref_pic_marking[i];
3063
3064         const guint mmco = ref_pic_marking->memory_management_control_operation;
3065         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3066             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3067         else {
3068             GST_ERROR("unhandled MMCO %u", mmco);
3069             return FALSE;
3070         }
3071     }
3072     return TRUE;
3073 }
3074
3075 /* 8.2.5 - Execute reference picture marking process */
3076 static gboolean
3077 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3078 {
3079     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3080
3081     priv->prev_pic_has_mmco5 = FALSE;
3082     priv->prev_pic_structure = picture->structure;
3083
3084     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3085         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3086
3087     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3088         return TRUE;
3089
3090     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3091         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3092             &picture->last_slice_hdr->dec_ref_pic_marking;
3093         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3094             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3095                 return FALSE;
3096         }
3097         else {
3098             if (!exec_ref_pic_marking_sliding_window(decoder))
3099                 return FALSE;
3100         }
3101     }
3102     return TRUE;
3103 }
3104
3105 static void
3106 vaapi_init_picture(VAPictureH264 *pic)
3107 {
3108     pic->picture_id           = VA_INVALID_ID;
3109     pic->frame_idx            = 0;
3110     pic->flags                = VA_PICTURE_H264_INVALID;
3111     pic->TopFieldOrderCnt     = 0;
3112     pic->BottomFieldOrderCnt  = 0;
3113 }
3114
3115 static void
3116 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3117     guint picture_structure)
3118 {
3119     if (!picture_structure)
3120         picture_structure = picture->structure;
3121
3122     pic->picture_id = picture->base.surface_id;
3123     pic->flags = 0;
3124
3125     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3126         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3127         pic->frame_idx = picture->long_term_frame_idx;
3128     }
3129     else {
3130         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3131             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3132         pic->frame_idx = picture->frame_num;
3133     }
3134
3135     switch (picture_structure) {
3136     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3137         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3138         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3139         break;
3140     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3141         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3142         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3143         pic->BottomFieldOrderCnt = 0;
3144         break;
3145     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3146         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3147         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3148         pic->TopFieldOrderCnt = 0;
3149         break;
3150     }
3151 }
3152
3153 static void
3154 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3155     GstVaapiPictureH264 *picture)
3156 {
3157     vaapi_fill_picture(pic, picture, 0);
3158
3159     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3160     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3161         /* The inter-view reference components and inter-view only
3162            reference components that are included in the reference
3163            picture lists are considered as not being marked as "used for
3164            short-term reference" or "used for long-term reference" */
3165         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3166                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3167     }
3168 }
3169
3170 static gboolean
3171 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3172 {
3173     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3174     GstVaapiPicture * const base_picture = &picture->base;
3175     GstH264PPS * const pps = get_pps(decoder);
3176     GstH264SPS * const sps = get_sps(decoder);
3177     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3178     guint i, n;
3179
3180     /* Fill in VAPictureParameterBufferH264 */
3181     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3182
3183     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3184         GstVaapiFrameStore * const fs = priv->dpb[i];
3185         if ((gst_vaapi_frame_store_has_reference(fs) &&
3186              fs->view_id == picture->base.view_id) ||
3187             (gst_vaapi_frame_store_has_inter_view(fs) &&
3188              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3189             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3190                 fs->buffers[0], fs->structure);
3191         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3192             break;
3193     }
3194     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3195         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3196
3197 #define COPY_FIELD(s, f) \
3198     pic_param->f = (s)->f
3199
3200 #define COPY_BFM(a, s, f) \
3201     pic_param->a.bits.f = (s)->f
3202
3203     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3204     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3205     pic_param->frame_num                    = priv->frame_num;
3206
3207     COPY_FIELD(sps, bit_depth_luma_minus8);
3208     COPY_FIELD(sps, bit_depth_chroma_minus8);
3209     COPY_FIELD(sps, num_ref_frames);
3210     COPY_FIELD(pps, num_slice_groups_minus1);
3211     COPY_FIELD(pps, slice_group_map_type);
3212     COPY_FIELD(pps, slice_group_change_rate_minus1);
3213     COPY_FIELD(pps, pic_init_qp_minus26);
3214     COPY_FIELD(pps, pic_init_qs_minus26);
3215     COPY_FIELD(pps, chroma_qp_index_offset);
3216     COPY_FIELD(pps, second_chroma_qp_index_offset);
3217
3218     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3219     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3220     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3221
3222     COPY_BFM(seq_fields, sps, chroma_format_idc);
3223     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3224     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3225     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3226     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3227     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3228     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3229     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3230     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3231
3232     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3233     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3234     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3235
3236     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3237     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3238     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3239     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3240     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3241     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3242     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3243     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3244     return TRUE;
3245 }
3246
3247 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3248 static gboolean
3249 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3250 {
3251     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3252     GstH264PPS * const pps = slice_hdr->pps;
3253     GstH264SPS * const sps = pps->sequence;
3254     GstH264SliceHdr *prev_slice_hdr;
3255
3256     if (!prev_pi)
3257         return TRUE;
3258     prev_slice_hdr = &prev_pi->data.slice_hdr;
3259
3260 #define CHECK_EXPR(expr, field_name) do {              \
3261         if (!(expr)) {                                 \
3262             GST_DEBUG(field_name " differs in value"); \
3263             return TRUE;                               \
3264         }                                              \
3265     } while (0)
3266
3267 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3268     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3269
3270     /* view_id differs in value and VOIdx of current slice_hdr is less
3271        than the VOIdx of the prev_slice_hdr */
3272     CHECK_VALUE(pi, prev_pi, view_id);
3273
3274     /* frame_num differs in value, regardless of inferred values to 0 */
3275     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3276
3277     /* pic_parameter_set_id differs in value */
3278     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3279
3280     /* field_pic_flag differs in value */
3281     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3282
3283     /* bottom_field_flag is present in both and differs in value */
3284     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3285         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3286
3287     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3288     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3289                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3290
3291     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3292        value or delta_pic_order_cnt_bottom differs in value */
3293     if (sps->pic_order_cnt_type == 0) {
3294         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3295         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3296             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3297     }
3298
3299     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3300        differs in value or delta_pic_order_cnt[1] differs in value */
3301     else if (sps->pic_order_cnt_type == 1) {
3302         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3303         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3304     }
3305
3306     /* IdrPicFlag differs in value */
3307     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3308
3309     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3310     if (pi->nalu.idr_pic_flag)
3311         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3312
3313 #undef CHECK_EXPR
3314 #undef CHECK_VALUE
3315     return FALSE;
3316 }
3317
3318 /* Detection of a new access unit, assuming we are already in presence
3319    of a new picture */
3320 static inline gboolean
3321 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3322 {
3323     if (!prev_pi || prev_pi->view_id == pi->view_id)
3324         return TRUE;
3325     return pi->voc < prev_pi->voc;
3326 }
3327
3328 /* Finds the first field picture corresponding to the supplied picture */
3329 static GstVaapiPictureH264 *
3330 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3331 {
3332     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3333     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3334     GstVaapiFrameStore *fs;
3335
3336     if (!slice_hdr->field_pic_flag)
3337         return NULL;
3338
3339     fs = priv->prev_frames[pi->voc];
3340     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3341         return NULL;
3342
3343     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3344         return fs->buffers[0];
3345     return NULL;
3346 }
3347
3348 static GstVaapiDecoderStatus
3349 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3350 {
3351     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3352     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3353     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3354     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3355     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3356     GstVaapiPictureH264 *picture, *first_field;
3357     GstVaapiDecoderStatus status;
3358
3359     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3360     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3361
3362     /* Only decode base stream for MVC */
3363     switch (sps->profile_idc) {
3364     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3365     case GST_H264_PROFILE_STEREO_HIGH:
3366         if (0) {
3367             GST_DEBUG("drop picture from substream");
3368             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3369         }
3370         break;
3371     }
3372
3373     status = ensure_context(decoder, sps);
3374     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3375         return status;
3376
3377     priv->decoder_state = 0;
3378
3379     first_field = find_first_field(decoder, pi);
3380     if (first_field) {
3381         /* Re-use current picture where the first field was decoded */
3382         picture = gst_vaapi_picture_h264_new_field(first_field);
3383         if (!picture) {
3384             GST_ERROR("failed to allocate field picture");
3385             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3386         }
3387     }
3388     else {
3389         /* Create new picture */
3390         picture = gst_vaapi_picture_h264_new(decoder);
3391         if (!picture) {
3392             GST_ERROR("failed to allocate picture");
3393             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3394         }
3395     }
3396     gst_vaapi_picture_replace(&priv->current_picture, picture);
3397     gst_vaapi_picture_unref(picture);
3398
3399     /* Clear inter-view references list if this is the primary coded
3400        picture of the current access unit */
3401     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3402         g_ptr_array_set_size(priv->inter_views, 0);
3403
3404     /* Update cropping rectangle */
3405     if (sps->frame_cropping_flag) {
3406         GstVaapiRectangle crop_rect;
3407         crop_rect.x = sps->crop_rect_x;
3408         crop_rect.y = sps->crop_rect_y;
3409         crop_rect.width = sps->crop_rect_width;
3410         crop_rect.height = sps->crop_rect_height;
3411         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3412     }
3413
3414     status = ensure_quant_matrix(decoder, picture);
3415     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3416         GST_ERROR("failed to reset quantizer matrix");
3417         return status;
3418     }
3419
3420     if (!init_picture(decoder, picture, pi))
3421         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3422     if (!fill_picture(decoder, picture))
3423         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3424
3425     priv->decoder_state = pi->state;
3426     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3427 }
3428
3429 static inline guint
3430 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3431 {
3432     guint epb_count;
3433
3434     epb_count = slice_hdr->n_emulation_prevention_bytes;
3435     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3436 }
3437
3438 static gboolean
3439 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3440     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3441 {
3442     VASliceParameterBufferH264 * const slice_param = slice->param;
3443     GstH264PPS * const pps = get_pps(decoder);
3444     GstH264SPS * const sps = get_sps(decoder);
3445     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3446     guint num_weight_tables = 0;
3447     gint i, j;
3448
3449     if (pps->weighted_pred_flag &&
3450         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3451         num_weight_tables = 1;
3452     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3453         num_weight_tables = 2;
3454     else
3455         num_weight_tables = 0;
3456
3457     slice_param->luma_log2_weight_denom   = 0;
3458     slice_param->chroma_log2_weight_denom = 0;
3459     slice_param->luma_weight_l0_flag      = 0;
3460     slice_param->chroma_weight_l0_flag    = 0;
3461     slice_param->luma_weight_l1_flag      = 0;
3462     slice_param->chroma_weight_l1_flag    = 0;
3463
3464     if (num_weight_tables < 1)
3465         return TRUE;
3466
3467     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3468     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3469
3470     slice_param->luma_weight_l0_flag = 1;
3471     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3472         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3473         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3474     }
3475
3476     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3477     if (slice_param->chroma_weight_l0_flag) {
3478         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3479             for (j = 0; j < 2; j++) {
3480                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3481                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3482             }
3483         }
3484     }
3485
3486     if (num_weight_tables < 2)
3487         return TRUE;
3488
3489     slice_param->luma_weight_l1_flag = 1;
3490     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3491         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3492         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3493     }
3494
3495     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3496     if (slice_param->chroma_weight_l1_flag) {
3497         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3498             for (j = 0; j < 2; j++) {
3499                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3500                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3501             }
3502         }
3503     }
3504     return TRUE;
3505 }
3506
3507 static gboolean
3508 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3509     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3510 {
3511     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3512     VASliceParameterBufferH264 * const slice_param = slice->param;
3513     guint i, num_ref_lists = 0;
3514
3515     slice_param->num_ref_idx_l0_active_minus1 = 0;
3516     slice_param->num_ref_idx_l1_active_minus1 = 0;
3517
3518     if (GST_H264_IS_B_SLICE(slice_hdr))
3519         num_ref_lists = 2;
3520     else if (GST_H264_IS_I_SLICE(slice_hdr))
3521         num_ref_lists = 0;
3522     else
3523         num_ref_lists = 1;
3524
3525     if (num_ref_lists < 1)
3526         return TRUE;
3527
3528     slice_param->num_ref_idx_l0_active_minus1 =
3529         slice_hdr->num_ref_idx_l0_active_minus1;
3530
3531     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3532         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3533             priv->RefPicList0[i]);
3534     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3535         vaapi_init_picture(&slice_param->RefPicList0[i]);
3536
3537     if (num_ref_lists < 2)
3538         return TRUE;
3539
3540     slice_param->num_ref_idx_l1_active_minus1 =
3541         slice_hdr->num_ref_idx_l1_active_minus1;
3542
3543     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3544         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3545             priv->RefPicList1[i]);
3546     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3547         vaapi_init_picture(&slice_param->RefPicList1[i]);
3548     return TRUE;
3549 }
3550
3551 static gboolean
3552 fill_slice(GstVaapiDecoderH264 *decoder,
3553     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3554 {
3555     VASliceParameterBufferH264 * const slice_param = slice->param;
3556     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3557
3558     /* Fill in VASliceParameterBufferH264 */
3559     slice_param->slice_data_bit_offset =
3560         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3561     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3562     slice_param->slice_type                     = slice_hdr->type % 5;
3563     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3564     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3565     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3566     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3567     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3568     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3569
3570     if (!fill_RefPicList(decoder, slice, slice_hdr))
3571         return FALSE;
3572     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3573         return FALSE;
3574     return TRUE;
3575 }
3576
3577 static GstVaapiDecoderStatus
3578 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3579 {
3580     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3581     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3582     GstVaapiPictureH264 * const picture = priv->current_picture;
3583     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3584     GstVaapiSlice *slice;
3585     GstBuffer * const buffer =
3586         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3587     GstMapInfo map_info;
3588
3589     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3590
3591     if (!is_valid_state(pi->state,
3592             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3593         GST_WARNING("failed to receive enough headers to decode slice");
3594         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3595     }
3596
3597     if (!ensure_pps(decoder, slice_hdr->pps)) {
3598         GST_ERROR("failed to activate PPS");
3599         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3600     }
3601
3602     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3603         GST_ERROR("failed to activate SPS");
3604         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3605     }
3606
3607     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3608         GST_ERROR("failed to map buffer");
3609         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3610     }
3611
3612     /* Check wether this is the first/last slice in the current access unit */
3613     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3614         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3615     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3616         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3617
3618     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3619         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3620     gst_buffer_unmap(buffer, &map_info);
3621     if (!slice) {
3622         GST_ERROR("failed to allocate slice");
3623         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3624     }
3625
3626     init_picture_refs(decoder, picture, slice_hdr);
3627     if (!fill_slice(decoder, slice, pi)) {
3628         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3629         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3630     }
3631
3632     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3633     picture->last_slice_hdr = slice_hdr;
3634     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3635     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3636 }
3637
3638 static inline gint
3639 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3640 {
3641     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3642                                                      0xffffff00, 0x00000100,
3643                                                      ofs, size,
3644                                                      scp);
3645 }
3646
3647 static GstVaapiDecoderStatus
3648 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3649 {
3650     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3651     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3652     GstVaapiDecoderStatus status;
3653
3654     priv->decoder_state |= pi->state;
3655     switch (pi->nalu.type) {
3656     case GST_H264_NAL_SPS:
3657         status = decode_sps(decoder, unit);
3658         break;
3659     case GST_H264_NAL_SUBSET_SPS:
3660         status = decode_subset_sps(decoder, unit);
3661         break;
3662     case GST_H264_NAL_PPS:
3663         status = decode_pps(decoder, unit);
3664         break;
3665     case GST_H264_NAL_SLICE_EXT:
3666     case GST_H264_NAL_SLICE_IDR:
3667         /* fall-through. IDR specifics are handled in init_picture() */
3668     case GST_H264_NAL_SLICE:
3669         status = decode_slice(decoder, unit);
3670         break;
3671     case GST_H264_NAL_SEQ_END:
3672     case GST_H264_NAL_STREAM_END:
3673         status = decode_sequence_end(decoder);
3674         break;
3675     case GST_H264_NAL_SEI:
3676         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3677         break;
3678     default:
3679         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3680         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3681         break;
3682     }
3683     return status;
3684 }
3685
3686 static GstVaapiDecoderStatus
3687 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3688     const guchar *buf, guint buf_size)
3689 {
3690     GstVaapiDecoderH264 * const decoder =
3691         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3692     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3693     GstVaapiDecoderStatus status;
3694     GstVaapiDecoderUnit unit;
3695     GstVaapiParserInfoH264 *pi = NULL;
3696     GstH264ParserResult result;
3697     guint i, ofs, num_sps, num_pps;
3698
3699     unit.parsed_info = NULL;
3700
3701     if (buf_size < 8)
3702         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3703
3704     if (buf[0] != 1) {
3705         GST_ERROR("failed to decode codec-data, not in avcC format");
3706         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3707     }
3708
3709     priv->nal_length_size = (buf[4] & 0x03) + 1;
3710
3711     num_sps = buf[5] & 0x1f;
3712     ofs = 6;
3713
3714     for (i = 0; i < num_sps; i++) {
3715         pi = gst_vaapi_parser_info_h264_new();
3716         if (!pi)
3717             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3718         unit.parsed_info = pi;
3719
3720         result = gst_h264_parser_identify_nalu_avc(
3721             priv->parser,
3722             buf, ofs, buf_size, 2,
3723             &pi->nalu
3724         );
3725         if (result != GST_H264_PARSER_OK) {
3726             status = get_status(result);
3727             goto cleanup;
3728         }
3729
3730         status = parse_sps(decoder, &unit);
3731         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3732             goto cleanup;
3733         ofs = pi->nalu.offset + pi->nalu.size;
3734
3735         status = decode_sps(decoder, &unit);
3736         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3737             goto cleanup;
3738         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3739     }
3740
3741     num_pps = buf[ofs];
3742     ofs++;
3743
3744     for (i = 0; i < num_pps; i++) {
3745         pi = gst_vaapi_parser_info_h264_new();
3746         if (!pi)
3747             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3748         unit.parsed_info = pi;
3749
3750         result = gst_h264_parser_identify_nalu_avc(
3751             priv->parser,
3752             buf, ofs, buf_size, 2,
3753             &pi->nalu
3754         );
3755         if (result != GST_H264_PARSER_OK) {
3756             status = get_status(result);
3757             goto cleanup;
3758         }
3759
3760         status = parse_pps(decoder, &unit);
3761         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3762             goto cleanup;
3763         ofs = pi->nalu.offset + pi->nalu.size;
3764
3765         status = decode_pps(decoder, &unit);
3766         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3767             goto cleanup;
3768         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3769     }
3770
3771     priv->is_avcC = TRUE;
3772     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3773
3774 cleanup:
3775     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3776     return status;
3777 }
3778
3779 static GstVaapiDecoderStatus
3780 ensure_decoder(GstVaapiDecoderH264 *decoder)
3781 {
3782     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3783     GstVaapiDecoderStatus status;
3784
3785     if (!priv->is_opened) {
3786         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3787         if (!priv->is_opened)
3788             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3789
3790         status = gst_vaapi_decoder_decode_codec_data(
3791             GST_VAAPI_DECODER_CAST(decoder));
3792         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3793             return status;
3794     }
3795     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3796 }
3797
3798 static GstVaapiDecoderStatus
3799 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3800     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3801 {
3802     GstVaapiDecoderH264 * const decoder =
3803         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3804     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3805     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3806     GstVaapiParserInfoH264 *pi;
3807     GstVaapiDecoderStatus status;
3808     GstH264ParserResult result;
3809     guchar *buf;
3810     guint i, size, buf_size, nalu_size, flags;
3811     guint32 start_code;
3812     gint ofs, ofs2;
3813
3814     status = ensure_decoder(decoder);
3815     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3816         return status;
3817
3818     size = gst_adapter_available(adapter);
3819
3820     if (priv->is_avcC) {
3821         if (size < priv->nal_length_size)
3822             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3823
3824         buf = (guchar *)&start_code;
3825         g_assert(priv->nal_length_size <= sizeof(start_code));
3826         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3827
3828         nalu_size = 0;
3829         for (i = 0; i < priv->nal_length_size; i++)
3830             nalu_size = (nalu_size << 8) | buf[i];
3831
3832         buf_size = priv->nal_length_size + nalu_size;
3833         if (size < buf_size)
3834             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3835     }
3836     else {
3837         if (size < 4)
3838             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3839
3840         ofs = scan_for_start_code(adapter, 0, size, NULL);
3841         if (ofs < 0)
3842             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3843
3844         if (ofs > 0) {
3845             gst_adapter_flush(adapter, ofs);
3846             size -= ofs;
3847         }
3848
3849         ofs2 = ps->input_offset2 - ofs - 4;
3850         if (ofs2 < 4)
3851             ofs2 = 4;
3852
3853         ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3854             scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3855         if (ofs < 0) {
3856             // Assume the whole NAL unit is present if end-of-stream
3857             if (!at_eos) {
3858                 ps->input_offset2 = size;
3859                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3860             }
3861             ofs = size;
3862         }
3863         buf_size = ofs;
3864     }
3865     ps->input_offset2 = 0;
3866
3867     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3868     if (!buf)
3869         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3870
3871     unit->size = buf_size;
3872
3873     pi = gst_vaapi_parser_info_h264_new();
3874     if (!pi)
3875         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3876
3877     gst_vaapi_decoder_unit_set_parsed_info(unit,
3878         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3879
3880     if (priv->is_avcC)
3881         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3882             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3883     else
3884         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3885             buf, 0, buf_size, &pi->nalu);
3886     status = get_status(result);
3887     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3888         return status;
3889
3890     switch (pi->nalu.type) {
3891     case GST_H264_NAL_SPS:
3892         status = parse_sps(decoder, unit);
3893         break;
3894     case GST_H264_NAL_SUBSET_SPS:
3895         status = parse_subset_sps(decoder, unit);
3896         break;
3897     case GST_H264_NAL_PPS:
3898         status = parse_pps(decoder, unit);
3899         break;
3900     case GST_H264_NAL_SEI:
3901         status = parse_sei(decoder, unit);
3902         break;
3903     case GST_H264_NAL_SLICE_EXT:
3904         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3905             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3906             break;
3907         }
3908         /* fall-through */
3909     case GST_H264_NAL_SLICE_IDR:
3910     case GST_H264_NAL_SLICE:
3911         status = parse_slice(decoder, unit);
3912         break;
3913     default:
3914         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3915         break;
3916     }
3917     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3918         return status;
3919
3920     flags = 0;
3921     switch (pi->nalu.type) {
3922     case GST_H264_NAL_AU_DELIMITER:
3923         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3924         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3925         /* fall-through */
3926     case GST_H264_NAL_FILLER_DATA:
3927         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3928         break;
3929     case GST_H264_NAL_STREAM_END:
3930         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3931         /* fall-through */
3932     case GST_H264_NAL_SEQ_END:
3933         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
3934         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
3935         break;
3936     case GST_H264_NAL_SPS:
3937     case GST_H264_NAL_SUBSET_SPS:
3938     case GST_H264_NAL_PPS:
3939     case GST_H264_NAL_SEI:
3940         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3941         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3942         break;
3943     case GST_H264_NAL_SLICE_EXT:
3944         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3945             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3946             break;
3947         }
3948         /* fall-through */
3949     case GST_H264_NAL_SLICE_IDR:
3950     case GST_H264_NAL_SLICE:
3951         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
3952         if (is_new_picture(pi, priv->prev_slice_pi)) {
3953             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3954             if (is_new_access_unit(pi, priv->prev_slice_pi))
3955                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3956         }
3957         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
3958         break;
3959     case GST_H264_NAL_SPS_EXT:
3960     case GST_H264_NAL_SLICE_AUX:
3961         /* skip SPS extension and auxiliary slice for now */
3962         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3963         break;
3964     case GST_H264_NAL_PREFIX_UNIT:
3965         /* skip Prefix NAL units for now */
3966         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
3967             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
3968             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3969         break;
3970     default:
3971         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
3972             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
3973                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3974         break;
3975     }
3976     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
3977         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
3978     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
3979
3980     pi->nalu.data = NULL;
3981     pi->state = priv->parser_state;
3982     pi->flags = flags;
3983     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
3984     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3985 }
3986
3987 static GstVaapiDecoderStatus
3988 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
3989     GstVaapiDecoderUnit *unit)
3990 {
3991     GstVaapiDecoderH264 * const decoder =
3992         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3993     GstVaapiDecoderStatus status;
3994
3995     status = ensure_decoder(decoder);
3996     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3997         return status;
3998     return decode_unit(decoder, unit);
3999 }
4000
4001 static GstVaapiDecoderStatus
4002 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4003     GstVaapiDecoderUnit *unit)
4004 {
4005     GstVaapiDecoderH264 * const decoder =
4006         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4007
4008     return decode_picture(decoder, unit);
4009 }
4010
4011 static GstVaapiDecoderStatus
4012 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4013 {
4014     GstVaapiDecoderH264 * const decoder =
4015         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4016
4017     return decode_current_picture(decoder);
4018 }
4019
4020 static GstVaapiDecoderStatus
4021 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4022 {
4023     GstVaapiDecoderH264 * const decoder =
4024         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4025
4026     dpb_flush(decoder, NULL);
4027     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4028 }
4029
4030 static void
4031 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4032 {
4033     GstVaapiMiniObjectClass * const object_class =
4034         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4035     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4036
4037     object_class->size          = sizeof(GstVaapiDecoderH264);
4038     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4039
4040     decoder_class->create       = gst_vaapi_decoder_h264_create;
4041     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4042     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4043     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4044     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4045     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4046     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4047
4048     decoder_class->decode_codec_data =
4049         gst_vaapi_decoder_h264_decode_codec_data;
4050 }
4051
4052 static inline const GstVaapiDecoderClass *
4053 gst_vaapi_decoder_h264_class(void)
4054 {
4055     static GstVaapiDecoderH264Class g_class;
4056     static gsize g_class_init = FALSE;
4057
4058     if (g_once_init_enter(&g_class_init)) {
4059         gst_vaapi_decoder_h264_class_init(&g_class);
4060         g_once_init_leave(&g_class_init, TRUE);
4061     }
4062     return GST_VAAPI_DECODER_CLASS(&g_class);
4063 }
4064
4065 /**
4066  * gst_vaapi_decoder_h264_new:
4067  * @display: a #GstVaapiDisplay
4068  * @caps: a #GstCaps holding codec information
4069  *
4070  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4071  * hold extra information like codec-data and pictured coded size.
4072  *
4073  * Return value: the newly allocated #GstVaapiDecoder object
4074  */
4075 GstVaapiDecoder *
4076 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4077 {
4078     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4079 }