decoder: h264: fix tracking of DPB size changes.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiStreamAlignH264     stream_alignment;
449     GstVaapiPictureH264        *current_picture;
450     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
451     GstVaapiParserInfoH264     *active_sps;
452     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
453     GstVaapiParserInfoH264     *active_pps;
454     GstVaapiParserInfoH264     *prev_pi;
455     GstVaapiParserInfoH264     *prev_slice_pi;
456     GstVaapiFrameStore        **prev_frames;
457     guint                       prev_frames_alloc;
458     GstVaapiFrameStore        **dpb;
459     guint                       dpb_count;
460     guint                       dpb_size;
461     guint                       dpb_size_max;
462     guint                       max_views;
463     GstVaapiProfile             profile;
464     GstVaapiEntrypoint          entrypoint;
465     GstVaapiChromaType          chroma_type;
466     GPtrArray                  *inter_views;
467     GstVaapiPictureH264        *short_ref[32];
468     guint                       short_ref_count;
469     GstVaapiPictureH264        *long_ref[32];
470     guint                       long_ref_count;
471     GstVaapiPictureH264        *RefPicList0[32];
472     guint                       RefPicList0_count;
473     GstVaapiPictureH264        *RefPicList1[32];
474     guint                       RefPicList1_count;
475     guint                       nal_length_size;
476     guint                       mb_width;
477     guint                       mb_height;
478     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479     gint32                      poc_msb;                // PicOrderCntMsb
480     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
481     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
482     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
483     gint32                      frame_num_offset;       // FrameNumOffset
484     gint32                      frame_num;              // frame_num (from slice_header())
485     gint32                      prev_frame_num;         // prevFrameNum
486     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
487     gboolean                    prev_pic_structure;     // previous picture structure
488     guint                       is_opened               : 1;
489     guint                       is_avcC                 : 1;
490     guint                       has_context             : 1;
491     guint                       progressive_sequence    : 1;
492 };
493
494 /**
495  * GstVaapiDecoderH264:
496  *
497  * A decoder based on H264.
498  */
499 struct _GstVaapiDecoderH264 {
500     /*< private >*/
501     GstVaapiDecoder             parent_instance;
502     GstVaapiDecoderH264Private  priv;
503 };
504
505 /**
506  * GstVaapiDecoderH264Class:
507  *
508  * A decoder class based on H264.
509  */
510 struct _GstVaapiDecoderH264Class {
511     /*< private >*/
512     GstVaapiDecoderClass parent_class;
513 };
514
515 static gboolean
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
517
518 static gboolean
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520     GstVaapiPictureH264 *picture);
521
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524     GstVaapiFrameStore *fs)
525 {
526     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
527 }
528
529 /* Determines if the supplied profile is one of the MVC set */
530 static gboolean
531 is_mvc_profile(GstH264Profile profile)
532 {
533     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534         profile == GST_H264_PROFILE_STEREO_HIGH;
535 }
536
537 /* Determines the view_id from the supplied NAL unit */
538 static inline guint
539 get_view_id(GstH264NalUnit *nalu)
540 {
541     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
542 }
543
544 /* Determines the view order index (VOIdx) from the supplied view_id */
545 static gint
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
547 {
548     GstH264SPSExtMVC *mvc;
549     gint i;
550
551     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
552         return 0;
553
554     mvc = &sps->extension.mvc;
555     for (i = 0; i <= mvc->num_views_minus1; i++) {
556         if (mvc->view[i].view_id == view_id)
557             return i;
558     }
559     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
560     return -1;
561 }
562
563 /* Determines NumViews */
564 static guint
565 get_num_views(GstH264SPS *sps)
566 {
567     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568         sps->extension.mvc.num_views_minus1 : 0);
569 }
570
571 /* Get number of reference frames to use */
572 static guint
573 get_max_dec_frame_buffering(GstH264SPS *sps)
574 {
575     guint num_views, max_dpb_frames;
576     guint max_dec_frame_buffering, PicSizeMbs;
577     GstVaapiLevelH264 level;
578     const GstVaapiH264LevelLimits *level_limits;
579
580     /* Table A-1 - Level limits */
581     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582         level = GST_VAAPI_LEVEL_H264_L1b;
583     else
584         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586     if (G_UNLIKELY(!level_limits)) {
587         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588         max_dec_frame_buffering = 16;
589     }
590     else {
591         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592                       (sps->pic_height_in_map_units_minus1 + 1) *
593                       (sps->frame_mbs_only_flag ? 1 : 2));
594         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
595     }
596     if (is_mvc_profile(sps->profile_idc))
597         max_dec_frame_buffering <<= 1;
598
599     /* VUI parameters */
600     if (sps->vui_parameters_present_flag) {
601         GstH264VUIParams * const vui_params = &sps->vui_parameters;
602         if (vui_params->bitstream_restriction_flag)
603             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
604         else {
605             switch (sps->profile_idc) {
606             case 44:  // CAVLC 4:4:4 Intra profile
607             case GST_H264_PROFILE_SCALABLE_HIGH:
608             case GST_H264_PROFILE_HIGH:
609             case GST_H264_PROFILE_HIGH10:
610             case GST_H264_PROFILE_HIGH_422:
611             case GST_H264_PROFILE_HIGH_444:
612                 if (sps->constraint_set3_flag)
613                     max_dec_frame_buffering = 0;
614                 break;
615             }
616         }
617     }
618
619     num_views = get_num_views(sps);
620     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621     if (max_dec_frame_buffering > max_dpb_frames)
622         max_dec_frame_buffering = max_dpb_frames;
623     else if (max_dec_frame_buffering < sps->num_ref_frames)
624         max_dec_frame_buffering = sps->num_ref_frames;
625     return MAX(1, max_dec_frame_buffering);
626 }
627
628 static void
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
630 {
631     gpointer * const entries = array;
632     guint num_entries = *array_length_ptr;
633
634     g_return_if_fail(index < num_entries);
635
636     if (index != --num_entries)
637         entries[index] = entries[num_entries];
638     entries[num_entries] = NULL;
639     *array_length_ptr = num_entries;
640 }
641
642 #if 1
643 static inline void
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
645 {
646     array_remove_index_fast(array, array_length_ptr, index);
647 }
648 #else
649 static void
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
651 {
652     gpointer * const entries = array;
653     const guint num_entries = *array_length_ptr - 1;
654     guint i;
655
656     g_return_if_fail(index <= num_entries);
657
658     for (i = index; i < num_entries; i++)
659         entries[i] = entries[i + 1];
660     entries[num_entries] = NULL;
661     *array_length_ptr = num_entries;
662 }
663 #endif
664
665 #define ARRAY_REMOVE_INDEX(array, index) \
666     array_remove_index(array, &array##_count, index)
667
668 static void
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
670 {
671     GstVaapiDecoderH264Private * const priv = &decoder->priv;
672     guint i, num_frames = --priv->dpb_count;
673
674     if (USE_STRICT_DPB_ORDERING) {
675         for (i = index; i < num_frames; i++)
676             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
677     }
678     else if (index != num_frames)
679         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
681 }
682
683 static gboolean
684 dpb_output(
685     GstVaapiDecoderH264 *decoder,
686     GstVaapiFrameStore  *fs,
687     GstVaapiPictureH264 *picture
688 )
689 {
690     picture->output_needed = FALSE;
691
692     if (fs) {
693         if (--fs->output_needed > 0)
694             return TRUE;
695         picture = fs->buffers[0];
696     }
697     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
698 }
699
700 static inline void
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
702 {
703     GstVaapiDecoderH264Private * const priv = &decoder->priv;
704     GstVaapiFrameStore * const fs = priv->dpb[i];
705
706     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707         dpb_remove_index(decoder, i);
708 }
709
710 /* Finds the frame store holding the supplied picture */
711 static gint
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
713 {
714     GstVaapiDecoderH264Private * const priv = &decoder->priv;
715     gint i, j;
716
717     for (i = 0; i < priv->dpb_count; i++) {
718         GstVaapiFrameStore * const fs = priv->dpb[i];
719         for (j = 0; j < fs->num_buffers; j++) {
720             if (fs->buffers[j] == picture)
721                 return i;
722         }
723     }
724     return -1;
725 }
726
727 /* Finds the picture with the lowest POC that needs to be output */
728 static gint
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730     GstVaapiPictureH264 **found_picture_ptr)
731 {
732     GstVaapiDecoderH264Private * const priv = &decoder->priv;
733     GstVaapiPictureH264 *found_picture = NULL;
734     guint i, j, found_index;
735
736     for (i = 0; i < priv->dpb_count; i++) {
737         GstVaapiFrameStore * const fs = priv->dpb[i];
738         if (!fs->output_needed)
739             continue;
740         if (picture && picture->base.view_id != fs->view_id)
741             continue;
742         for (j = 0; j < fs->num_buffers; j++) {
743             GstVaapiPictureH264 * const pic = fs->buffers[j];
744             if (!pic->output_needed)
745                 continue;
746             if (!found_picture || found_picture->base.poc > pic->base.poc ||
747                 (found_picture->base.poc == pic->base.poc &&
748                  found_picture->base.voc > pic->base.voc))
749                 found_picture = pic, found_index = i;
750         }
751     }
752
753     if (found_picture_ptr)
754         *found_picture_ptr = found_picture;
755     return found_picture ? found_index : -1;
756 }
757
758 /* Finds the picture with the lowest VOC that needs to be output */
759 static gint
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761     GstVaapiPictureH264 **found_picture_ptr)
762 {
763     GstVaapiDecoderH264Private * const priv = &decoder->priv;
764     GstVaapiPictureH264 *found_picture = NULL;
765     guint i, j, found_index;
766
767     for (i = 0; i < priv->dpb_count; i++) {
768         GstVaapiFrameStore * const fs = priv->dpb[i];
769         if (!fs->output_needed || fs->view_id == picture->base.view_id)
770             continue;
771         for (j = 0; j < fs->num_buffers; j++) {
772             GstVaapiPictureH264 * const pic = fs->buffers[j];
773             if (!pic->output_needed || pic->base.poc != picture->base.poc)
774                 continue;
775             if (!found_picture || found_picture->base.voc > pic->base.voc)
776                 found_picture = pic, found_index = i;
777         }
778     }
779
780     if (found_picture_ptr)
781         *found_picture_ptr = found_picture;
782     return found_picture ? found_index : -1;
783 }
784
785 static gboolean
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787     GstVaapiPictureH264 *picture, guint voc)
788 {
789     GstVaapiDecoderH264Private * const priv = &decoder->priv;
790     GstVaapiPictureH264 *found_picture;
791     gint found_index;
792     gboolean success;
793
794     if (priv->max_views == 1)
795         return TRUE;
796
797     /* Emit all other view components that were in the same access
798        unit than the picture we have just found */
799     found_picture = picture;
800     for (;;) {
801         found_index = dpb_find_lowest_voc(decoder, found_picture,
802             &found_picture);
803         if (found_index < 0 || found_picture->base.voc >= voc)
804             break;
805         success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806         dpb_evict(decoder, found_picture, found_index);
807         if (!success)
808             return FALSE;
809     }
810     return TRUE;
811 }
812
813 static gboolean
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
815 {
816     GstVaapiDecoderH264Private * const priv = &decoder->priv;
817     GstVaapiPictureH264 *found_picture;
818     gint found_index;
819     gboolean success;
820
821     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
822     if (found_index < 0)
823         return FALSE;
824
825     if (picture && picture->base.poc != found_picture->base.poc)
826         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
827
828     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829     dpb_evict(decoder, found_picture, found_index);
830     if (priv->max_views == 1)
831         return success;
832
833     if (picture && picture->base.poc != found_picture->base.poc)
834         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
835     return success;
836 }
837
838 static void
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 {
841     GstVaapiDecoderH264Private * const priv = &decoder->priv;
842     guint i, n;
843
844     for (i = 0; i < priv->dpb_count; i++) {
845         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
846             continue;
847         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
848     }
849
850     /* Compact the resulting DPB, i.e. remove holes */
851     for (i = 0, n = 0; i < priv->dpb_count; i++) {
852         if (priv->dpb[i]) {
853             if (i != n) {
854                 priv->dpb[n] = priv->dpb[i];
855                 priv->dpb[i] = NULL;
856             }
857             n++;
858         }
859     }
860     priv->dpb_count = n;
861
862     /* Clear previous frame buffers only if this is a "flush-all" operation,
863        or if the picture is the first one in the access unit */
864     if (priv->prev_frames && (!picture ||
865             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
866                 GST_VAAPI_PICTURE_FLAG_AU_START))) {
867         for (i = 0; i < priv->max_views; i++)
868             gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
869     }
870 }
871
872 static void
873 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
874 {
875     while (dpb_bump(decoder, picture))
876         ;
877     dpb_clear(decoder, picture);
878 }
879
880 static void
881 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
882 {
883     GstVaapiDecoderH264Private * const priv = &decoder->priv;
884     const gboolean is_last_picture = /* in the access unit */
885         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
886     guint i;
887
888     // Remove all unused inter-view only reference components of the current AU
889     i = 0;
890     while (i < priv->dpb_count) {
891         GstVaapiFrameStore * const fs = priv->dpb[i];
892         if (fs->view_id != picture->base.view_id &&
893             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
894             (is_last_picture ||
895              !is_inter_view_reference_for_next_frames(decoder, fs)))
896             dpb_remove_index(decoder, i);
897         else
898             i++;
899     }
900 }
901
902 static gboolean
903 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
904 {
905     GstVaapiDecoderH264Private * const priv = &decoder->priv;
906     GstVaapiFrameStore *fs;
907     guint i;
908
909     if (priv->max_views > 1)
910         dpb_prune_mvc(decoder, picture);
911
912     // Remove all unused pictures
913     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
914         i = 0;
915         while (i < priv->dpb_count) {
916             GstVaapiFrameStore * const fs = priv->dpb[i];
917             if (fs->view_id == picture->base.view_id &&
918                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
919                 dpb_remove_index(decoder, i);
920             else
921                 i++;
922         }
923     }
924
925     // Check if picture is the second field and the first field is still in DPB
926     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
927         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
928         const gint found_index = dpb_find_picture(decoder,
929             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
930         if (found_index >= 0)
931             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
932     }
933
934     // Create new frame store, and split fields if necessary
935     fs = gst_vaapi_frame_store_new(picture);
936     if (!fs)
937         return FALSE;
938     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
939     gst_vaapi_frame_store_unref(fs);
940
941     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
942         if (!gst_vaapi_frame_store_split_fields(fs))
943             return FALSE;
944     }
945
946     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
947     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
948         while (priv->dpb_count == priv->dpb_size) {
949             if (!dpb_bump(decoder, picture))
950                 return FALSE;
951         }
952     }
953
954     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
955     else {
956         const gboolean StoreInterViewOnlyRefFlag =
957             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
958                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
959             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
960                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
961         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
962             return TRUE;
963         while (priv->dpb_count == priv->dpb_size) {
964             GstVaapiPictureH264 *found_picture;
965             if (!StoreInterViewOnlyRefFlag) {
966                 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
967                     found_picture->base.poc > picture->base.poc)
968                     return dpb_output(decoder, NULL, picture);
969             }
970             if (!dpb_bump(decoder, picture))
971                 return FALSE;
972         }
973     }
974
975     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
976     if (picture->output_flag) {
977         picture->output_needed = TRUE;
978         fs->output_needed++;
979     }
980     return TRUE;
981 }
982
983 static gboolean
984 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
985 {
986     GstVaapiDecoderH264Private * const priv = &decoder->priv;
987
988     if (dpb_size > priv->dpb_size_max) {
989         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
990         if (!priv->dpb)
991             return FALSE;
992         memset(&priv->dpb[priv->dpb_size_max], 0,
993             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
994         priv->dpb_size_max = dpb_size;
995     }
996     priv->dpb_size = dpb_size;
997
998     GST_DEBUG("DPB size %u", priv->dpb_size);
999     return TRUE;
1000 }
1001
1002 static void
1003 unref_inter_view(GstVaapiPictureH264 *picture)
1004 {
1005     if (!picture)
1006         return;
1007     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1008     gst_vaapi_picture_unref(picture);
1009 }
1010
1011 /* Resets MVC resources */
1012 static gboolean
1013 mvc_reset(GstVaapiDecoderH264 *decoder)
1014 {
1015     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1016     guint i;
1017
1018     // Resize array of inter-view references
1019     if (!priv->inter_views) {
1020         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1021             (GDestroyNotify)unref_inter_view);
1022         if (!priv->inter_views)
1023             return FALSE;
1024     }
1025
1026     // Resize array of previous frame buffers
1027     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1028         gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1029
1030     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1031         sizeof(*priv->prev_frames));
1032     if (!priv->prev_frames) {
1033         priv->prev_frames_alloc = 0;
1034         return FALSE;
1035     }
1036     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1037         priv->prev_frames[i] = NULL;
1038     priv->prev_frames_alloc = priv->max_views;
1039     return TRUE;
1040 }
1041
1042 static GstVaapiDecoderStatus
1043 get_status(GstH264ParserResult result)
1044 {
1045     GstVaapiDecoderStatus status;
1046
1047     switch (result) {
1048     case GST_H264_PARSER_OK:
1049         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1050         break;
1051     case GST_H264_PARSER_NO_NAL_END:
1052         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1053         break;
1054     case GST_H264_PARSER_ERROR:
1055         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1056         break;
1057     default:
1058         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1059         break;
1060     }
1061     return status;
1062 }
1063
1064 static void
1065 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1066 {
1067     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1068
1069     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1070     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1071     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1072
1073     dpb_clear(decoder, NULL);
1074
1075     if (priv->inter_views) {
1076         g_ptr_array_unref(priv->inter_views);
1077         priv->inter_views = NULL;
1078     }
1079
1080     if (priv->parser) {
1081         gst_h264_nal_parser_free(priv->parser);
1082         priv->parser = NULL;
1083     }
1084 }
1085
1086 static gboolean
1087 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1088 {
1089     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1090
1091     gst_vaapi_decoder_h264_close(decoder);
1092
1093     priv->parser = gst_h264_nal_parser_new();
1094     if (!priv->parser)
1095         return FALSE;
1096     return TRUE;
1097 }
1098
1099 static void
1100 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1101 {
1102     GstVaapiDecoderH264 * const decoder =
1103         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1104     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1105     guint i;
1106
1107     gst_vaapi_decoder_h264_close(decoder);
1108
1109     g_free(priv->dpb);
1110     priv->dpb = NULL;
1111     priv->dpb_size = 0;
1112
1113     g_free(priv->prev_frames);
1114     priv->prev_frames = NULL;
1115     priv->prev_frames_alloc = 0;
1116
1117     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1118         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1119     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1120
1121     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1122         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1123     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1124 }
1125
1126 static gboolean
1127 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1128 {
1129     GstVaapiDecoderH264 * const decoder =
1130         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1131     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1132
1133     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1134     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1135     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1136     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1137     priv->progressive_sequence  = TRUE;
1138     return TRUE;
1139 }
1140
1141 /* Activates the supplied PPS */
1142 static GstH264PPS *
1143 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1144 {
1145     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1146     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1147
1148     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1149     return pi ? &pi->data.pps : NULL;
1150 }
1151
1152 /* Returns the active PPS */
1153 static inline GstH264PPS *
1154 get_pps(GstVaapiDecoderH264 *decoder)
1155 {
1156     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1157
1158     return pi ? &pi->data.pps : NULL;
1159 }
1160
1161 /* Activate the supplied SPS */
1162 static GstH264SPS *
1163 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1164 {
1165     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1166     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1167
1168     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1169     return pi ? &pi->data.sps : NULL;
1170 }
1171
1172 /* Returns the active SPS */
1173 static inline GstH264SPS *
1174 get_sps(GstVaapiDecoderH264 *decoder)
1175 {
1176     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1177
1178     return pi ? &pi->data.sps : NULL;
1179 }
1180
1181 static void
1182 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1183     GstVaapiProfile profile)
1184 {
1185     guint n_profiles = *n_profiles_ptr;
1186
1187     profiles[n_profiles++] = profile;
1188     switch (profile) {
1189     case GST_VAAPI_PROFILE_H264_MAIN:
1190         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1191         break;
1192     default:
1193         break;
1194     }
1195     *n_profiles_ptr = n_profiles;
1196 }
1197
1198 /* Fills in compatible profiles for MVC decoding */
1199 static void
1200 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1201     guint *n_profiles_ptr, guint dpb_size)
1202 {
1203     const gchar * const vendor_string =
1204         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1205
1206     gboolean add_high_profile = FALSE;
1207     struct map {
1208         const gchar *str;
1209         guint str_len;
1210     };
1211     const struct map *m;
1212
1213     // Drivers that support slice level decoding
1214     if (vendor_string && dpb_size <= 16) {
1215         static const struct map drv_names[] = {
1216             { "Intel i965 driver", 17 },
1217             { NULL, 0 }
1218         };
1219         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1220             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1221                 add_high_profile = TRUE;
1222         }
1223     }
1224
1225     if (add_high_profile)
1226         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1227 }
1228
1229 static GstVaapiProfile
1230 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1231 {
1232     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1233     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1234     GstVaapiProfile profile, profiles[4];
1235     guint i, n_profiles = 0;
1236
1237     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1238     if (!profile)
1239         return GST_VAAPI_PROFILE_UNKNOWN;
1240
1241     fill_profiles(profiles, &n_profiles, profile);
1242     switch (profile) {
1243     case GST_VAAPI_PROFILE_H264_BASELINE:
1244         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1245             fill_profiles(profiles, &n_profiles,
1246                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1247             fill_profiles(profiles, &n_profiles,
1248                 GST_VAAPI_PROFILE_H264_MAIN);
1249         }
1250         break;
1251     case GST_VAAPI_PROFILE_H264_EXTENDED:
1252         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1253             fill_profiles(profiles, &n_profiles,
1254                 GST_VAAPI_PROFILE_H264_MAIN);
1255         }
1256         break;
1257     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1258         if (priv->max_views == 2) {
1259             fill_profiles(profiles, &n_profiles,
1260                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1261         }
1262         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1263         break;
1264     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1265         if (sps->frame_mbs_only_flag) {
1266             fill_profiles(profiles, &n_profiles,
1267                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1268         }
1269         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1270         break;
1271     default:
1272         break;
1273     }
1274
1275     /* If the preferred profile (profiles[0]) matches one that we already
1276        found, then just return it now instead of searching for it again */
1277     if (profiles[0] == priv->profile)
1278         return priv->profile;
1279
1280     for (i = 0; i < n_profiles; i++) {
1281         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1282             return profiles[i];
1283     }
1284     return GST_VAAPI_PROFILE_UNKNOWN;
1285 }
1286
1287 static GstVaapiDecoderStatus
1288 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1289 {
1290     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1291     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1292     GstVaapiContextInfo info;
1293     GstVaapiProfile profile;
1294     GstVaapiChromaType chroma_type;
1295     gboolean reset_context = FALSE;
1296     guint mb_width, mb_height, dpb_size, num_views;
1297
1298     num_views = get_num_views(sps);
1299     if (priv->max_views < num_views) {
1300         priv->max_views = num_views;
1301         GST_DEBUG("maximum number of views changed to %u", num_views);
1302     }
1303
1304     dpb_size = get_max_dec_frame_buffering(sps);
1305     if (priv->dpb_size < dpb_size) {
1306         GST_DEBUG("DPB size increased");
1307         reset_context = TRUE;
1308     }
1309
1310     profile = get_profile(decoder, sps, dpb_size);
1311     if (!profile) {
1312         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1313         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1314     }
1315
1316     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1317         GST_DEBUG("profile changed");
1318         reset_context = TRUE;
1319         priv->profile = profile;
1320     }
1321
1322     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1323     if (!chroma_type) {
1324         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1325         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1326     }
1327
1328     if (priv->chroma_type != chroma_type) {
1329         GST_DEBUG("chroma format changed");
1330         reset_context     = TRUE;
1331         priv->chroma_type = chroma_type;
1332     }
1333
1334     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1335     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1336         !sps->frame_mbs_only_flag;
1337     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1338         GST_DEBUG("size changed");
1339         reset_context   = TRUE;
1340         priv->mb_width  = mb_width;
1341         priv->mb_height = mb_height;
1342     }
1343
1344     priv->progressive_sequence = sps->frame_mbs_only_flag;
1345     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1346
1347     gst_vaapi_decoder_set_pixel_aspect_ratio(
1348         base_decoder,
1349         sps->vui_parameters.par_n,
1350         sps->vui_parameters.par_d
1351     );
1352
1353     if (!reset_context && priv->has_context)
1354         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1355
1356     /* XXX: fix surface size when cropping is implemented */
1357     info.profile    = priv->profile;
1358     info.entrypoint = priv->entrypoint;
1359     info.chroma_type = priv->chroma_type;
1360     info.width      = sps->width;
1361     info.height     = sps->height;
1362     info.ref_frames = dpb_size;
1363
1364     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1365         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1366     priv->has_context = TRUE;
1367
1368     /* Reset DPB */
1369     if (!dpb_reset(decoder, dpb_size))
1370         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1371
1372     /* Reset MVC data */
1373     if (!mvc_reset(decoder))
1374         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1375     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1376 }
1377
1378 static void
1379 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1380     const GstH264SPS *sps)
1381 {
1382     guint i;
1383
1384     /* There are always 6 4x4 scaling lists */
1385     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1386     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1387
1388     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1389         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1390             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1391 }
1392
1393 static void
1394 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1395     const GstH264SPS *sps)
1396 {
1397     guint i, n;
1398
1399     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1400     if (!pps->transform_8x8_mode_flag)
1401         return;
1402
1403     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1404     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1405
1406     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1407     for (i = 0; i < n; i++) {
1408         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1409             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1410     }
1411 }
1412
1413 static GstVaapiDecoderStatus
1414 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1415 {
1416     GstVaapiPicture * const base_picture = &picture->base;
1417     GstH264PPS * const pps = get_pps(decoder);
1418     GstH264SPS * const sps = get_sps(decoder);
1419     VAIQMatrixBufferH264 *iq_matrix;
1420
1421     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1422     if (!base_picture->iq_matrix) {
1423         GST_ERROR("failed to allocate IQ matrix");
1424         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1425     }
1426     iq_matrix = base_picture->iq_matrix->param;
1427
1428     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1429        is not large enough to hold lists for 4:4:4 */
1430     if (sps->chroma_format_idc == 3)
1431         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1432
1433     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1434     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1435
1436     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1437 }
1438
1439 static inline gboolean
1440 is_valid_state(guint state, guint ref_state)
1441 {
1442     return (state & ref_state) == ref_state;
1443 }
1444
1445 static GstVaapiDecoderStatus
1446 decode_current_picture(GstVaapiDecoderH264 *decoder)
1447 {
1448     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1449     GstVaapiPictureH264 * const picture = priv->current_picture;
1450
1451     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1452         goto drop_frame;
1453     priv->decoder_state = 0;
1454
1455     if (!picture)
1456         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1457
1458     if (!exec_ref_pic_marking(decoder, picture))
1459         goto error;
1460     if (!dpb_add(decoder, picture))
1461         goto error;
1462     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1463         goto error;
1464     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1465     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1466
1467 error:
1468     /* XXX: fix for cases where first field failed to be decoded */
1469     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1470     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1471
1472 drop_frame:
1473     priv->decoder_state = 0;
1474     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1475 }
1476
1477 static GstVaapiDecoderStatus
1478 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1479 {
1480     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1481     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1482     GstH264SPS * const sps = &pi->data.sps;
1483     GstH264ParserResult result;
1484
1485     GST_DEBUG("parse SPS");
1486
1487     priv->parser_state = 0;
1488
1489     /* Variables that don't have inferred values per the H.264
1490        standard but that should get a default value anyway */
1491     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1492
1493     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1494     if (result != GST_H264_PARSER_OK)
1495         return get_status(result);
1496
1497     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1498     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1499 }
1500
1501 static GstVaapiDecoderStatus
1502 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1503 {
1504     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1505     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1506     GstH264SPS * const sps = &pi->data.sps;
1507     GstH264ParserResult result;
1508
1509     GST_DEBUG("parse subset SPS");
1510
1511     /* Variables that don't have inferred values per the H.264
1512        standard but that should get a default value anyway */
1513     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1514
1515     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1516         TRUE);
1517     if (result != GST_H264_PARSER_OK)
1518         return get_status(result);
1519
1520     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1521     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1522 }
1523
1524 static GstVaapiDecoderStatus
1525 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1526 {
1527     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1528     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1529     GstH264PPS * const pps = &pi->data.pps;
1530     GstH264ParserResult result;
1531
1532     GST_DEBUG("parse PPS");
1533
1534     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1535
1536     /* Variables that don't have inferred values per the H.264
1537        standard but that should get a default value anyway */
1538     pps->slice_group_map_type = 0;
1539     pps->slice_group_change_rate_minus1 = 0;
1540
1541     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1542     if (result != GST_H264_PARSER_OK)
1543         return get_status(result);
1544
1545     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1546     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1547 }
1548
1549 static GstVaapiDecoderStatus
1550 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1551 {
1552     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1553     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1554     GArray ** const sei_ptr = &pi->data.sei;
1555     GstH264ParserResult result;
1556
1557     GST_DEBUG("parse SEI");
1558
1559     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1560     if (result != GST_H264_PARSER_OK) {
1561         GST_WARNING("failed to parse SEI messages");
1562         return get_status(result);
1563     }
1564     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1565 }
1566
1567 static GstVaapiDecoderStatus
1568 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1569 {
1570     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1571     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1572     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1573     GstH264NalUnit * const nalu = &pi->nalu;
1574     GstH264SPS *sps;
1575     GstH264ParserResult result;
1576
1577     GST_DEBUG("parse slice");
1578
1579     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1580                            GST_H264_VIDEO_STATE_GOT_PPS);
1581
1582     /* Propagate Prefix NAL unit info, if necessary */
1583     switch (nalu->type) {
1584     case GST_H264_NAL_SLICE:
1585     case GST_H264_NAL_SLICE_IDR: {
1586         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1587         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1588             /* MVC sequences shall have a Prefix NAL unit immediately
1589                preceding this NAL unit */
1590             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1591             pi->nalu.extension = prev_pi->nalu.extension;
1592         }
1593         else {
1594             /* In the very unlikely case there is no Prefix NAL unit
1595                immediately preceding this NAL unit, try to infer some
1596                defaults (H.7.4.1.1) */
1597             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1598             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1599             nalu->idr_pic_flag = !mvc->non_idr_flag;
1600             mvc->priority_id = 0;
1601             mvc->view_id = 0;
1602             mvc->temporal_id = 0;
1603             mvc->anchor_pic_flag = 0;
1604             mvc->inter_view_flag = 1;
1605         }
1606         break;
1607     }
1608     }
1609
1610     /* Variables that don't have inferred values per the H.264
1611        standard but that should get a default value anyway */
1612     slice_hdr->cabac_init_idc = 0;
1613     slice_hdr->direct_spatial_mv_pred_flag = 0;
1614
1615     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1616         slice_hdr, TRUE, TRUE);
1617     if (result != GST_H264_PARSER_OK)
1618         return get_status(result);
1619
1620     sps = slice_hdr->pps->sequence;
1621
1622     /* Update MVC data */
1623     pi->view_id = get_view_id(&pi->nalu);
1624     pi->voc = get_view_order_index(sps, pi->view_id);
1625
1626     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1627     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1628 }
1629
1630 static GstVaapiDecoderStatus
1631 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1632 {
1633     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1634     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1635     GstH264SPS * const sps = &pi->data.sps;
1636
1637     GST_DEBUG("decode SPS");
1638
1639     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1640     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1641 }
1642
1643 static GstVaapiDecoderStatus
1644 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1645 {
1646     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1647     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1648     GstH264SPS * const sps = &pi->data.sps;
1649
1650     GST_DEBUG("decode subset SPS");
1651
1652     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1653     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1654 }
1655
1656 static GstVaapiDecoderStatus
1657 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1658 {
1659     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1660     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1661     GstH264PPS * const pps = &pi->data.pps;
1662
1663     GST_DEBUG("decode PPS");
1664
1665     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1666     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1667 }
1668
1669 static GstVaapiDecoderStatus
1670 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1671 {
1672     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1673     GstVaapiDecoderStatus status;
1674
1675     GST_DEBUG("decode sequence-end");
1676
1677     status = decode_current_picture(decoder);
1678     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1679         return status;
1680
1681     dpb_flush(decoder, NULL);
1682
1683     /* Reset defaults, should there be a new sequence available next */
1684     priv->max_views = 1;
1685     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1686 }
1687
1688 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1689 static void
1690 init_picture_poc_0(
1691     GstVaapiDecoderH264 *decoder,
1692     GstVaapiPictureH264 *picture,
1693     GstH264SliceHdr     *slice_hdr
1694 )
1695 {
1696     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1697     GstH264SPS * const sps = get_sps(decoder);
1698     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1699     gint32 temp_poc;
1700
1701     GST_DEBUG("decode picture order count type 0");
1702
1703     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1704         priv->prev_poc_msb = 0;
1705         priv->prev_poc_lsb = 0;
1706     }
1707     else if (priv->prev_pic_has_mmco5) {
1708         priv->prev_poc_msb = 0;
1709         priv->prev_poc_lsb =
1710             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1711              0 : priv->field_poc[TOP_FIELD]);
1712     }
1713     else {
1714         priv->prev_poc_msb = priv->poc_msb;
1715         priv->prev_poc_lsb = priv->poc_lsb;
1716     }
1717
1718     // (8-3)
1719     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1720     if (priv->poc_lsb < priv->prev_poc_lsb &&
1721         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1722         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1723     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1724              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1725         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1726     else
1727         priv->poc_msb = priv->prev_poc_msb;
1728
1729     temp_poc = priv->poc_msb + priv->poc_lsb;
1730     switch (picture->structure) {
1731     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1732         // (8-4, 8-5)
1733         priv->field_poc[TOP_FIELD] = temp_poc;
1734         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1735             slice_hdr->delta_pic_order_cnt_bottom;
1736         break;
1737     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1738         // (8-4)
1739         priv->field_poc[TOP_FIELD] = temp_poc;
1740         break;
1741     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1742         // (8-5)
1743         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1744         break;
1745     }
1746 }
1747
1748 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1749 static void
1750 init_picture_poc_1(
1751     GstVaapiDecoderH264 *decoder,
1752     GstVaapiPictureH264 *picture,
1753     GstH264SliceHdr     *slice_hdr
1754 )
1755 {
1756     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1757     GstH264SPS * const sps = get_sps(decoder);
1758     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1759     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1760     guint i;
1761
1762     GST_DEBUG("decode picture order count type 1");
1763
1764     if (priv->prev_pic_has_mmco5)
1765         prev_frame_num_offset = 0;
1766     else
1767         prev_frame_num_offset = priv->frame_num_offset;
1768
1769     // (8-6)
1770     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1771         priv->frame_num_offset = 0;
1772     else if (priv->prev_frame_num > priv->frame_num)
1773         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1774     else
1775         priv->frame_num_offset = prev_frame_num_offset;
1776
1777     // (8-7)
1778     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1779         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1780     else
1781         abs_frame_num = 0;
1782     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1783         abs_frame_num = abs_frame_num - 1;
1784
1785     if (abs_frame_num > 0) {
1786         gint32 expected_delta_per_poc_cycle;
1787         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1788
1789         expected_delta_per_poc_cycle = 0;
1790         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1791             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1792
1793         // (8-8)
1794         poc_cycle_cnt = (abs_frame_num - 1) /
1795             sps->num_ref_frames_in_pic_order_cnt_cycle;
1796         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1797             sps->num_ref_frames_in_pic_order_cnt_cycle;
1798
1799         // (8-9)
1800         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1801         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1802             expected_poc += sps->offset_for_ref_frame[i];
1803     }
1804     else
1805         expected_poc = 0;
1806     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1807         expected_poc += sps->offset_for_non_ref_pic;
1808
1809     // (8-10)
1810     switch (picture->structure) {
1811     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1812         priv->field_poc[TOP_FIELD] = expected_poc +
1813             slice_hdr->delta_pic_order_cnt[0];
1814         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1815             sps->offset_for_top_to_bottom_field +
1816             slice_hdr->delta_pic_order_cnt[1];
1817         break;
1818     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1819         priv->field_poc[TOP_FIELD] = expected_poc +
1820             slice_hdr->delta_pic_order_cnt[0];
1821         break;
1822     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1823         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1824             sps->offset_for_top_to_bottom_field +
1825             slice_hdr->delta_pic_order_cnt[0];
1826         break;
1827     }
1828 }
1829
1830 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1831 static void
1832 init_picture_poc_2(
1833     GstVaapiDecoderH264 *decoder,
1834     GstVaapiPictureH264 *picture,
1835     GstH264SliceHdr     *slice_hdr
1836 )
1837 {
1838     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1839     GstH264SPS * const sps = get_sps(decoder);
1840     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1841     gint32 prev_frame_num_offset, temp_poc;
1842
1843     GST_DEBUG("decode picture order count type 2");
1844
1845     if (priv->prev_pic_has_mmco5)
1846         prev_frame_num_offset = 0;
1847     else
1848         prev_frame_num_offset = priv->frame_num_offset;
1849
1850     // (8-11)
1851     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1852         priv->frame_num_offset = 0;
1853     else if (priv->prev_frame_num > priv->frame_num)
1854         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1855     else
1856         priv->frame_num_offset = prev_frame_num_offset;
1857
1858     // (8-12)
1859     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1860         temp_poc = 0;
1861     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1862         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1863     else
1864         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1865
1866     // (8-13)
1867     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1868         priv->field_poc[TOP_FIELD] = temp_poc;
1869     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1870         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1871 }
1872
1873 /* 8.2.1 - Decoding process for picture order count */
1874 static void
1875 init_picture_poc(
1876     GstVaapiDecoderH264 *decoder,
1877     GstVaapiPictureH264 *picture,
1878     GstH264SliceHdr     *slice_hdr
1879 )
1880 {
1881     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1882     GstH264SPS * const sps = get_sps(decoder);
1883
1884     switch (sps->pic_order_cnt_type) {
1885     case 0:
1886         init_picture_poc_0(decoder, picture, slice_hdr);
1887         break;
1888     case 1:
1889         init_picture_poc_1(decoder, picture, slice_hdr);
1890         break;
1891     case 2:
1892         init_picture_poc_2(decoder, picture, slice_hdr);
1893         break;
1894     }
1895
1896     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1897         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1898     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1899         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1900     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1901 }
1902
1903 static int
1904 compare_picture_pic_num_dec(const void *a, const void *b)
1905 {
1906     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1907     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1908
1909     return picB->pic_num - picA->pic_num;
1910 }
1911
1912 static int
1913 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1914 {
1915     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1916     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1917
1918     return picA->long_term_pic_num - picB->long_term_pic_num;
1919 }
1920
1921 static int
1922 compare_picture_poc_dec(const void *a, const void *b)
1923 {
1924     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1925     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1926
1927     return picB->base.poc - picA->base.poc;
1928 }
1929
1930 static int
1931 compare_picture_poc_inc(const void *a, const void *b)
1932 {
1933     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1934     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1935
1936     return picA->base.poc - picB->base.poc;
1937 }
1938
1939 static int
1940 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1941 {
1942     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1943     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1944
1945     return picB->frame_num_wrap - picA->frame_num_wrap;
1946 }
1947
1948 static int
1949 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1950 {
1951     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1952     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1953
1954     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1955 }
1956
1957 /* 8.2.4.1 - Decoding process for picture numbers */
1958 static void
1959 init_picture_refs_pic_num(
1960     GstVaapiDecoderH264 *decoder,
1961     GstVaapiPictureH264 *picture,
1962     GstH264SliceHdr     *slice_hdr
1963 )
1964 {
1965     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1966     GstH264SPS * const sps = get_sps(decoder);
1967     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1968     guint i;
1969
1970     GST_DEBUG("decode picture numbers");
1971
1972     for (i = 0; i < priv->short_ref_count; i++) {
1973         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1974
1975         // (H.8.2)
1976         if (pic->base.view_id != picture->base.view_id)
1977             continue;
1978
1979         // (8-27)
1980         if (pic->frame_num > priv->frame_num)
1981             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1982         else
1983             pic->frame_num_wrap = pic->frame_num;
1984
1985         // (8-28, 8-30, 8-31)
1986         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1987             pic->pic_num = pic->frame_num_wrap;
1988         else {
1989             if (pic->structure == picture->structure)
1990                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1991             else
1992                 pic->pic_num = 2 * pic->frame_num_wrap;
1993         }
1994     }
1995
1996     for (i = 0; i < priv->long_ref_count; i++) {
1997         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1998
1999         // (H.8.2)
2000         if (pic->base.view_id != picture->base.view_id)
2001             continue;
2002
2003         // (8-29, 8-32, 8-33)
2004         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2005             pic->long_term_pic_num = pic->long_term_frame_idx;
2006         else {
2007             if (pic->structure == picture->structure)
2008                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2009             else
2010                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2011         }
2012     }
2013 }
2014
2015 #define SORT_REF_LIST(list, n, compare_func) \
2016     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2017
2018 static void
2019 init_picture_refs_fields_1(
2020     guint                picture_structure,
2021     GstVaapiPictureH264 *RefPicList[32],
2022     guint               *RefPicList_count,
2023     GstVaapiPictureH264 *ref_list[32],
2024     guint                ref_list_count
2025 )
2026 {
2027     guint i, j, n;
2028
2029     i = 0;
2030     j = 0;
2031     n = *RefPicList_count;
2032     do {
2033         g_assert(n < 32);
2034         for (; i < ref_list_count; i++) {
2035             if (ref_list[i]->structure == picture_structure) {
2036                 RefPicList[n++] = ref_list[i++];
2037                 break;
2038             }
2039         }
2040         for (; j < ref_list_count; j++) {
2041             if (ref_list[j]->structure != picture_structure) {
2042                 RefPicList[n++] = ref_list[j++];
2043                 break;
2044             }
2045         }
2046     } while (i < ref_list_count || j < ref_list_count);
2047     *RefPicList_count = n;
2048 }
2049
2050 static inline void
2051 init_picture_refs_fields(
2052     GstVaapiPictureH264 *picture,
2053     GstVaapiPictureH264 *RefPicList[32],
2054     guint               *RefPicList_count,
2055     GstVaapiPictureH264 *short_ref[32],
2056     guint                short_ref_count,
2057     GstVaapiPictureH264 *long_ref[32],
2058     guint                long_ref_count
2059 )
2060 {
2061     guint n = 0;
2062
2063     /* 8.2.4.2.5 - reference picture lists in fields */
2064     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2065         short_ref, short_ref_count);
2066     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2067         long_ref, long_ref_count);
2068     *RefPicList_count = n;
2069 }
2070
2071 /* Finds the inter-view reference picture with the supplied view id */
2072 static GstVaapiPictureH264 *
2073 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2074 {
2075     GPtrArray * const inter_views = decoder->priv.inter_views;
2076     guint i;
2077
2078     for (i = 0; i < inter_views->len; i++) {
2079         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2080         if (picture->base.view_id == view_id)
2081             return picture;
2082     }
2083
2084     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2085         view_id);
2086     return NULL;
2087 }
2088
2089 /* Checks whether the view id exists in the supplied list of view ids */
2090 static gboolean
2091 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2092 {
2093     guint i;
2094
2095     for (i = 0; i < num_view_ids; i++) {
2096         if (view_ids[i] == view_id)
2097             return TRUE;
2098     }
2099     return FALSE;
2100 }
2101
2102 static gboolean
2103 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2104     gboolean is_anchor)
2105 {
2106     if (is_anchor)
2107         return (find_view_id(view_id, view->anchor_ref_l0,
2108                     view->num_anchor_refs_l0) ||
2109                 find_view_id(view_id, view->anchor_ref_l1,
2110                     view->num_anchor_refs_l1));
2111
2112     return (find_view_id(view_id, view->non_anchor_ref_l0,
2113                 view->num_non_anchor_refs_l0) ||
2114             find_view_id(view_id, view->non_anchor_ref_l1,
2115                 view->num_non_anchor_refs_l1));
2116 }
2117
2118 /* Checks whether the inter-view reference picture with the supplied
2119    view id is used for decoding the current view component picture */
2120 static gboolean
2121 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2122     guint16 view_id, GstVaapiPictureH264 *picture)
2123 {
2124     const GstH264SPS * const sps = get_sps(decoder);
2125     gboolean is_anchor;
2126
2127     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2128         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2129         return FALSE;
2130
2131     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2132     return find_view_id_in_view(view_id,
2133         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2134 }
2135
2136 /* Checks whether the supplied inter-view reference picture is used
2137    for decoding the next view component pictures */
2138 static gboolean
2139 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2140     GstVaapiPictureH264 *picture)
2141 {
2142     const GstH264SPS * const sps = get_sps(decoder);
2143     gboolean is_anchor;
2144     guint i, num_views;
2145
2146     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2147         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2148         return FALSE;
2149
2150     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2151     num_views = sps->extension.mvc.num_views_minus1 + 1;
2152     for (i = picture->base.voc + 1; i < num_views; i++) {
2153         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2154         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2155             return TRUE;
2156     }
2157     return FALSE;
2158 }
2159
2160 /* H.8.2.1 - Initialization process for inter-view prediction references */
2161 static void
2162 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2163     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2164     const guint16 *view_ids, guint num_view_ids)
2165 {
2166     guint j, n;
2167
2168     n = *ref_list_count_ptr;
2169     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2170         GstVaapiPictureH264 * const pic =
2171             find_inter_view_reference(decoder, view_ids[j]);
2172         if (pic)
2173             ref_list[n++] = pic;
2174     }
2175     *ref_list_count_ptr = n;
2176 }
2177
2178 static inline void
2179 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2180     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2181 {
2182     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2183     const GstH264SPS * const sps = get_sps(decoder);
2184     const GstH264SPSExtMVCView *view;
2185
2186     GST_DEBUG("initialize reference picture list for inter-view prediction");
2187
2188     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2189         return;
2190     view = &sps->extension.mvc.view[picture->base.voc];
2191
2192 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2193         init_picture_refs_mvc_1(decoder,                                \
2194             priv->RefPicList##ref_list,                                 \
2195             &priv->RefPicList##ref_list##_count,                        \
2196             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2197             view->view_list##_l##ref_list,                              \
2198             view->num_##view_list##s_l##ref_list);                      \
2199     } while (0)
2200
2201     if (list == 0) {
2202         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2203             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2204         else
2205             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2206     }
2207     else {
2208         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2209             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2210         else
2211             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2212     }
2213
2214 #undef INVOKE_INIT_PICTURE_REFS_MVC
2215 }
2216
2217 static void
2218 init_picture_refs_p_slice(
2219     GstVaapiDecoderH264 *decoder,
2220     GstVaapiPictureH264 *picture,
2221     GstH264SliceHdr     *slice_hdr
2222 )
2223 {
2224     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2225     GstVaapiPictureH264 **ref_list;
2226     guint i;
2227
2228     GST_DEBUG("decode reference picture list for P and SP slices");
2229
2230     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2231         /* 8.2.4.2.1 - P and SP slices in frames */
2232         if (priv->short_ref_count > 0) {
2233             ref_list = priv->RefPicList0;
2234             for (i = 0; i < priv->short_ref_count; i++)
2235                 ref_list[i] = priv->short_ref[i];
2236             SORT_REF_LIST(ref_list, i, pic_num_dec);
2237             priv->RefPicList0_count += i;
2238         }
2239
2240         if (priv->long_ref_count > 0) {
2241             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2242             for (i = 0; i < priv->long_ref_count; i++)
2243                 ref_list[i] = priv->long_ref[i];
2244             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2245             priv->RefPicList0_count += i;
2246         }
2247     }
2248     else {
2249         /* 8.2.4.2.2 - P and SP slices in fields */
2250         GstVaapiPictureH264 *short_ref[32];
2251         guint short_ref_count = 0;
2252         GstVaapiPictureH264 *long_ref[32];
2253         guint long_ref_count = 0;
2254
2255         if (priv->short_ref_count > 0) {
2256             for (i = 0; i < priv->short_ref_count; i++)
2257                 short_ref[i] = priv->short_ref[i];
2258             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2259             short_ref_count = i;
2260         }
2261
2262         if (priv->long_ref_count > 0) {
2263             for (i = 0; i < priv->long_ref_count; i++)
2264                 long_ref[i] = priv->long_ref[i];
2265             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2266             long_ref_count = i;
2267         }
2268
2269         init_picture_refs_fields(
2270             picture,
2271             priv->RefPicList0, &priv->RefPicList0_count,
2272             short_ref,          short_ref_count,
2273             long_ref,           long_ref_count
2274         );
2275     }
2276
2277     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2278         /* RefPicList0 */
2279         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2280     }
2281 }
2282
2283 static void
2284 init_picture_refs_b_slice(
2285     GstVaapiDecoderH264 *decoder,
2286     GstVaapiPictureH264 *picture,
2287     GstH264SliceHdr     *slice_hdr
2288 )
2289 {
2290     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2291     GstVaapiPictureH264 **ref_list;
2292     guint i, n;
2293
2294     GST_DEBUG("decode reference picture list for B slices");
2295
2296     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2297         /* 8.2.4.2.3 - B slices in frames */
2298
2299         /* RefPicList0 */
2300         if (priv->short_ref_count > 0) {
2301             // 1. Short-term references
2302             ref_list = priv->RefPicList0;
2303             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2304                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2305                     ref_list[n++] = priv->short_ref[i];
2306             }
2307             SORT_REF_LIST(ref_list, n, poc_dec);
2308             priv->RefPicList0_count += n;
2309
2310             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2311             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2312                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2313                     ref_list[n++] = priv->short_ref[i];
2314             }
2315             SORT_REF_LIST(ref_list, n, poc_inc);
2316             priv->RefPicList0_count += n;
2317         }
2318
2319         if (priv->long_ref_count > 0) {
2320             // 2. Long-term references
2321             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2322             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2323                 ref_list[n++] = priv->long_ref[i];
2324             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2325             priv->RefPicList0_count += n;
2326         }
2327
2328         /* RefPicList1 */
2329         if (priv->short_ref_count > 0) {
2330             // 1. Short-term references
2331             ref_list = priv->RefPicList1;
2332             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2333                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2334                     ref_list[n++] = priv->short_ref[i];
2335             }
2336             SORT_REF_LIST(ref_list, n, poc_inc);
2337             priv->RefPicList1_count += n;
2338
2339             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2340             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2341                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2342                     ref_list[n++] = priv->short_ref[i];
2343             }
2344             SORT_REF_LIST(ref_list, n, poc_dec);
2345             priv->RefPicList1_count += n;
2346         }
2347
2348         if (priv->long_ref_count > 0) {
2349             // 2. Long-term references
2350             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2351             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2352                 ref_list[n++] = priv->long_ref[i];
2353             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2354             priv->RefPicList1_count += n;
2355         }
2356     }
2357     else {
2358         /* 8.2.4.2.4 - B slices in fields */
2359         GstVaapiPictureH264 *short_ref0[32];
2360         guint short_ref0_count = 0;
2361         GstVaapiPictureH264 *short_ref1[32];
2362         guint short_ref1_count = 0;
2363         GstVaapiPictureH264 *long_ref[32];
2364         guint long_ref_count = 0;
2365
2366         /* refFrameList0ShortTerm */
2367         if (priv->short_ref_count > 0) {
2368             ref_list = short_ref0;
2369             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2370                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2371                     ref_list[n++] = priv->short_ref[i];
2372             }
2373             SORT_REF_LIST(ref_list, n, poc_dec);
2374             short_ref0_count += n;
2375
2376             ref_list = &short_ref0[short_ref0_count];
2377             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2378                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2379                     ref_list[n++] = priv->short_ref[i];
2380             }
2381             SORT_REF_LIST(ref_list, n, poc_inc);
2382             short_ref0_count += n;
2383         }
2384
2385         /* refFrameList1ShortTerm */
2386         if (priv->short_ref_count > 0) {
2387             ref_list = short_ref1;
2388             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2389                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2390                     ref_list[n++] = priv->short_ref[i];
2391             }
2392             SORT_REF_LIST(ref_list, n, poc_inc);
2393             short_ref1_count += n;
2394
2395             ref_list = &short_ref1[short_ref1_count];
2396             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2397                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2398                     ref_list[n++] = priv->short_ref[i];
2399             }
2400             SORT_REF_LIST(ref_list, n, poc_dec);
2401             short_ref1_count += n;
2402         }
2403
2404         /* refFrameListLongTerm */
2405         if (priv->long_ref_count > 0) {
2406             for (i = 0; i < priv->long_ref_count; i++)
2407                 long_ref[i] = priv->long_ref[i];
2408             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2409             long_ref_count = i;
2410         }
2411
2412         init_picture_refs_fields(
2413             picture,
2414             priv->RefPicList0, &priv->RefPicList0_count,
2415             short_ref0,         short_ref0_count,
2416             long_ref,           long_ref_count
2417         );
2418
2419         init_picture_refs_fields(
2420             picture,
2421             priv->RefPicList1, &priv->RefPicList1_count,
2422             short_ref1,         short_ref1_count,
2423             long_ref,           long_ref_count
2424         );
2425    }
2426
2427     /* Check whether RefPicList1 is identical to RefPicList0, then
2428        swap if necessary */
2429     if (priv->RefPicList1_count > 1 &&
2430         priv->RefPicList1_count == priv->RefPicList0_count &&
2431         memcmp(priv->RefPicList0, priv->RefPicList1,
2432                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2433         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2434         priv->RefPicList1[0] = priv->RefPicList1[1];
2435         priv->RefPicList1[1] = tmp;
2436     }
2437
2438     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2439         /* RefPicList0 */
2440         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2441
2442         /* RefPicList1 */
2443         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2444     }
2445 }
2446
2447 #undef SORT_REF_LIST
2448
2449 static gint
2450 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2451 {
2452     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2453     guint i;
2454
2455     for (i = 0; i < priv->short_ref_count; i++) {
2456         if (priv->short_ref[i]->pic_num == pic_num)
2457             return i;
2458     }
2459     GST_ERROR("found no short-term reference picture with PicNum = %d",
2460               pic_num);
2461     return -1;
2462 }
2463
2464 static gint
2465 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2466 {
2467     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2468     guint i;
2469
2470     for (i = 0; i < priv->long_ref_count; i++) {
2471         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2472             return i;
2473     }
2474     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2475               long_term_pic_num);
2476     return -1;
2477 }
2478
2479 static void
2480 exec_picture_refs_modification_1(
2481     GstVaapiDecoderH264           *decoder,
2482     GstVaapiPictureH264           *picture,
2483     GstH264SliceHdr               *slice_hdr,
2484     guint                          list
2485 )
2486 {
2487     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2488     GstH264SPS * const sps = get_sps(decoder);
2489     GstH264RefPicListModification *ref_pic_list_modification;
2490     guint num_ref_pic_list_modifications;
2491     GstVaapiPictureH264 **ref_list;
2492     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2493     const guint16 *view_ids = NULL;
2494     guint i, j, n, num_refs, num_view_ids = 0;
2495     gint found_ref_idx;
2496     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2497
2498     GST_DEBUG("modification process of reference picture list %u", list);
2499
2500     if (list == 0) {
2501         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2502         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2503         ref_list                       = priv->RefPicList0;
2504         ref_list_count_ptr             = &priv->RefPicList0_count;
2505         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2506
2507         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2508             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2509             const GstH264SPSExtMVCView * const view =
2510                 &sps->extension.mvc.view[picture->base.voc];
2511             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2512                 view_ids = view->anchor_ref_l0;
2513                 num_view_ids = view->num_anchor_refs_l0;
2514             }
2515             else {
2516                 view_ids = view->non_anchor_ref_l0;
2517                 num_view_ids = view->num_non_anchor_refs_l0;
2518             }
2519         }
2520     }
2521     else {
2522         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2523         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2524         ref_list                       = priv->RefPicList1;
2525         ref_list_count_ptr             = &priv->RefPicList1_count;
2526         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2527
2528         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2529             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2530             const GstH264SPSExtMVCView * const view =
2531                 &sps->extension.mvc.view[picture->base.voc];
2532             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2533                 view_ids = view->anchor_ref_l1;
2534                 num_view_ids = view->num_anchor_refs_l1;
2535             }
2536             else {
2537                 view_ids = view->non_anchor_ref_l1;
2538                 num_view_ids = view->num_non_anchor_refs_l1;
2539             }
2540         }
2541     }
2542     ref_list_count = *ref_list_count_ptr;
2543
2544     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2545         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2546         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2547     }
2548     else {
2549         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2550         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2551     }
2552
2553     picNumPred = CurrPicNum;
2554     picViewIdxPred = -1;
2555
2556     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2557         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2558         if (l->modification_of_pic_nums_idc == 3)
2559             break;
2560
2561         /* 8.2.4.3.1 - Short-term reference pictures */
2562         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2563             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2564             gint32 picNum, picNumNoWrap;
2565
2566             // (8-34)
2567             if (l->modification_of_pic_nums_idc == 0) {
2568                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2569                 if (picNumNoWrap < 0)
2570                     picNumNoWrap += MaxPicNum;
2571             }
2572
2573             // (8-35)
2574             else {
2575                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2576                 if (picNumNoWrap >= MaxPicNum)
2577                     picNumNoWrap -= MaxPicNum;
2578             }
2579             picNumPred = picNumNoWrap;
2580
2581             // (8-36)
2582             picNum = picNumNoWrap;
2583             if (picNum > CurrPicNum)
2584                 picNum -= MaxPicNum;
2585
2586             // (8-37)
2587             for (j = num_refs; j > ref_list_idx; j--)
2588                 ref_list[j] = ref_list[j - 1];
2589             found_ref_idx = find_short_term_reference(decoder, picNum);
2590             ref_list[ref_list_idx++] =
2591                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2592             n = ref_list_idx;
2593             for (j = ref_list_idx; j <= num_refs; j++) {
2594                 gint32 PicNumF;
2595                 if (!ref_list[j])
2596                     continue;
2597                 PicNumF =
2598                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2599                     ref_list[j]->pic_num : MaxPicNum;
2600                 if (PicNumF != picNum ||
2601                     ref_list[j]->base.view_id != picture->base.view_id)
2602                     ref_list[n++] = ref_list[j];
2603             }
2604         }
2605
2606         /* 8.2.4.3.2 - Long-term reference pictures */
2607         else if (l->modification_of_pic_nums_idc == 2) {
2608
2609             for (j = num_refs; j > ref_list_idx; j--)
2610                 ref_list[j] = ref_list[j - 1];
2611             found_ref_idx =
2612                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2613             ref_list[ref_list_idx++] =
2614                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2615             n = ref_list_idx;
2616             for (j = ref_list_idx; j <= num_refs; j++) {
2617                 gint32 LongTermPicNumF;
2618                 if (!ref_list[j])
2619                     continue;
2620                 LongTermPicNumF =
2621                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2622                     ref_list[j]->long_term_pic_num : INT_MAX;
2623                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2624                     ref_list[j]->base.view_id != picture->base.view_id)
2625                     ref_list[n++] = ref_list[j];
2626             }
2627         }
2628
2629         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2630         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2631                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2632                  (l->modification_of_pic_nums_idc == 4 ||
2633                   l->modification_of_pic_nums_idc == 5)) {
2634             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2635             gint32 picViewIdx, targetViewId;
2636
2637             // (H-6)
2638             if (l->modification_of_pic_nums_idc == 4) {
2639                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2640                 if (picViewIdx < 0)
2641                     picViewIdx += num_view_ids;
2642             }
2643
2644             // (H-7)
2645             else {
2646                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2647                 if (picViewIdx >= num_view_ids)
2648                     picViewIdx -= num_view_ids;
2649             }
2650             picViewIdxPred = picViewIdx;
2651
2652             // (H-8, H-9)
2653             targetViewId = view_ids[picViewIdx];
2654
2655             // (H-10)
2656             for (j = num_refs; j > ref_list_idx; j--)
2657                 ref_list[j] = ref_list[j - 1];
2658             ref_list[ref_list_idx++] =
2659                 find_inter_view_reference(decoder, targetViewId);
2660             n = ref_list_idx;
2661             for (j = ref_list_idx; j <= num_refs; j++) {
2662                 if (!ref_list[j])
2663                     continue;
2664                 if (ref_list[j]->base.view_id != targetViewId ||
2665                     ref_list[j]->base.poc != picture->base.poc)
2666                     ref_list[n++] = ref_list[j];
2667             }
2668         }
2669     }
2670
2671 #if DEBUG
2672     for (i = 0; i < num_refs; i++)
2673         if (!ref_list[i])
2674             GST_ERROR("list %u entry %u is empty", list, i);
2675 #endif
2676     *ref_list_count_ptr = num_refs;
2677 }
2678
2679 /* 8.2.4.3 - Modification process for reference picture lists */
2680 static void
2681 exec_picture_refs_modification(
2682     GstVaapiDecoderH264 *decoder,
2683     GstVaapiPictureH264 *picture,
2684     GstH264SliceHdr     *slice_hdr
2685 )
2686 {
2687     GST_DEBUG("execute ref_pic_list_modification()");
2688
2689     /* RefPicList0 */
2690     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2691         slice_hdr->ref_pic_list_modification_flag_l0)
2692         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2693
2694     /* RefPicList1 */
2695     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2696         slice_hdr->ref_pic_list_modification_flag_l1)
2697         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2698 }
2699
2700 static void
2701 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2702     GstVaapiPictureH264 *picture)
2703 {
2704     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2705     guint i, j, short_ref_count, long_ref_count;
2706
2707     short_ref_count = 0;
2708     long_ref_count  = 0;
2709     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2710         for (i = 0; i < priv->dpb_count; i++) {
2711             GstVaapiFrameStore * const fs = priv->dpb[i];
2712             GstVaapiPictureH264 *pic;
2713             if (!gst_vaapi_frame_store_has_frame(fs))
2714                 continue;
2715             pic = fs->buffers[0];
2716             if (pic->base.view_id != picture->base.view_id)
2717                 continue;
2718             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2719                 priv->short_ref[short_ref_count++] = pic;
2720             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2721                 priv->long_ref[long_ref_count++] = pic;
2722             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2723             pic->other_field = fs->buffers[1];
2724         }
2725     }
2726     else {
2727         for (i = 0; i < priv->dpb_count; i++) {
2728             GstVaapiFrameStore * const fs = priv->dpb[i];
2729             for (j = 0; j < fs->num_buffers; j++) {
2730                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2731                 if (pic->base.view_id != picture->base.view_id)
2732                     continue;
2733                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2734                     priv->short_ref[short_ref_count++] = pic;
2735                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2736                     priv->long_ref[long_ref_count++] = pic;
2737                 pic->structure = pic->base.structure;
2738                 pic->other_field = fs->buffers[j ^ 1];
2739             }
2740         }
2741     }
2742
2743     for (i = short_ref_count; i < priv->short_ref_count; i++)
2744         priv->short_ref[i] = NULL;
2745     priv->short_ref_count = short_ref_count;
2746
2747     for (i = long_ref_count; i < priv->long_ref_count; i++)
2748         priv->long_ref[i] = NULL;
2749     priv->long_ref_count = long_ref_count;
2750 }
2751
2752 static void
2753 init_picture_refs(
2754     GstVaapiDecoderH264 *decoder,
2755     GstVaapiPictureH264 *picture,
2756     GstH264SliceHdr     *slice_hdr
2757 )
2758 {
2759     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2760     guint i, num_refs;
2761
2762     init_picture_ref_lists(decoder, picture);
2763     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2764
2765     priv->RefPicList0_count = 0;
2766     priv->RefPicList1_count = 0;
2767
2768     switch (slice_hdr->type % 5) {
2769     case GST_H264_P_SLICE:
2770     case GST_H264_SP_SLICE:
2771         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2772         break;
2773     case GST_H264_B_SLICE:
2774         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2775         break;
2776     default:
2777         break;
2778     }
2779
2780     exec_picture_refs_modification(decoder, picture, slice_hdr);
2781
2782     switch (slice_hdr->type % 5) {
2783     case GST_H264_B_SLICE:
2784         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2785         for (i = priv->RefPicList1_count; i < num_refs; i++)
2786             priv->RefPicList1[i] = NULL;
2787         priv->RefPicList1_count = num_refs;
2788
2789         // fall-through
2790     case GST_H264_P_SLICE:
2791     case GST_H264_SP_SLICE:
2792         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2793         for (i = priv->RefPicList0_count; i < num_refs; i++)
2794             priv->RefPicList0[i] = NULL;
2795         priv->RefPicList0_count = num_refs;
2796         break;
2797     default:
2798         break;
2799     }
2800 }
2801
2802 static gboolean
2803 init_picture(
2804     GstVaapiDecoderH264 *decoder,
2805     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2806 {
2807     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2808     GstVaapiPicture * const base_picture = &picture->base;
2809     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2810
2811     priv->prev_frame_num        = priv->frame_num;
2812     priv->frame_num             = slice_hdr->frame_num;
2813     picture->frame_num          = priv->frame_num;
2814     picture->frame_num_wrap     = priv->frame_num;
2815     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2816     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2817     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2818     base_picture->view_id       = pi->view_id;
2819     base_picture->voc           = pi->voc;
2820
2821     /* Initialize extensions */
2822     switch (pi->nalu.extension_type) {
2823     case GST_H264_NAL_EXTENSION_MVC: {
2824         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2825
2826         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2827         if (mvc->inter_view_flag)
2828             GST_VAAPI_PICTURE_FLAG_SET(picture,
2829                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2830         if (mvc->anchor_pic_flag)
2831             GST_VAAPI_PICTURE_FLAG_SET(picture,
2832                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2833         break;
2834     }
2835     }
2836
2837     /* Reset decoder state for IDR pictures */
2838     if (pi->nalu.idr_pic_flag) {
2839         GST_DEBUG("<IDR>");
2840         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2841         dpb_flush(decoder, picture);
2842     }
2843
2844     /* Initialize picture structure */
2845     if (!slice_hdr->field_pic_flag)
2846         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2847     else {
2848         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2849         if (!slice_hdr->bottom_field_flag)
2850             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2851         else
2852             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2853     }
2854     picture->structure = base_picture->structure;
2855
2856     /* Initialize reference flags */
2857     if (pi->nalu.ref_idc) {
2858         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2859             &slice_hdr->dec_ref_pic_marking;
2860
2861         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2862             dec_ref_pic_marking->long_term_reference_flag)
2863             GST_VAAPI_PICTURE_FLAG_SET(picture,
2864                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2865         else
2866             GST_VAAPI_PICTURE_FLAG_SET(picture,
2867                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2868     }
2869
2870     init_picture_poc(decoder, picture, slice_hdr);
2871     return TRUE;
2872 }
2873
2874 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2875 static gboolean
2876 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2877 {
2878     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2879     GstH264SPS * const sps = get_sps(decoder);
2880     GstVaapiPictureH264 *ref_picture;
2881     guint i, m, max_num_ref_frames;
2882
2883     GST_DEBUG("reference picture marking process (sliding window)");
2884
2885     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2886         return TRUE;
2887
2888     max_num_ref_frames = sps->num_ref_frames;
2889     if (max_num_ref_frames == 0)
2890         max_num_ref_frames = 1;
2891     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2892         max_num_ref_frames <<= 1;
2893
2894     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2895         return TRUE;
2896     if (priv->short_ref_count < 1)
2897         return FALSE;
2898
2899     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2900         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2901         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2902             m = i;
2903     }
2904
2905     ref_picture = priv->short_ref[m];
2906     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2907     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2908
2909     /* Both fields need to be marked as "unused for reference", so
2910        remove the other field from the short_ref[] list as well */
2911     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2912         for (i = 0; i < priv->short_ref_count; i++) {
2913             if (priv->short_ref[i] == ref_picture->other_field) {
2914                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2915                 break;
2916             }
2917         }
2918     }
2919     return TRUE;
2920 }
2921
2922 static inline gint32
2923 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2924 {
2925     gint32 pic_num;
2926
2927     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2928         pic_num = picture->frame_num_wrap;
2929     else
2930         pic_num = 2 * picture->frame_num_wrap + 1;
2931     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2932     return pic_num;
2933 }
2934
2935 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2936 static void
2937 exec_ref_pic_marking_adaptive_mmco_1(
2938     GstVaapiDecoderH264  *decoder,
2939     GstVaapiPictureH264  *picture,
2940     GstH264RefPicMarking *ref_pic_marking
2941 )
2942 {
2943     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2944     gint32 i, picNumX;
2945
2946     picNumX = get_picNumX(picture, ref_pic_marking);
2947     i = find_short_term_reference(decoder, picNumX);
2948     if (i < 0)
2949         return;
2950
2951     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2952         GST_VAAPI_PICTURE_IS_FRAME(picture));
2953     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2954 }
2955
2956 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2957 static void
2958 exec_ref_pic_marking_adaptive_mmco_2(
2959     GstVaapiDecoderH264  *decoder,
2960     GstVaapiPictureH264  *picture,
2961     GstH264RefPicMarking *ref_pic_marking
2962 )
2963 {
2964     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2965     gint32 i;
2966
2967     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2968     if (i < 0)
2969         return;
2970
2971     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2972         GST_VAAPI_PICTURE_IS_FRAME(picture));
2973     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2974 }
2975
2976 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2977 static void
2978 exec_ref_pic_marking_adaptive_mmco_3(
2979     GstVaapiDecoderH264  *decoder,
2980     GstVaapiPictureH264  *picture,
2981     GstH264RefPicMarking *ref_pic_marking
2982 )
2983 {
2984     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2985     GstVaapiPictureH264 *ref_picture, *other_field;
2986     gint32 i, picNumX;
2987
2988     for (i = 0; i < priv->long_ref_count; i++) {
2989         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2990             break;
2991     }
2992     if (i != priv->long_ref_count) {
2993         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2994         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2995     }
2996
2997     picNumX = get_picNumX(picture, ref_pic_marking);
2998     i = find_short_term_reference(decoder, picNumX);
2999     if (i < 0)
3000         return;
3001
3002     ref_picture = priv->short_ref[i];
3003     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3004     priv->long_ref[priv->long_ref_count++] = ref_picture;
3005
3006     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3007     gst_vaapi_picture_h264_set_reference(ref_picture,
3008         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3009         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3010
3011     /* Assign LongTermFrameIdx to the other field if it was also
3012        marked as "used for long-term reference */
3013     other_field = ref_picture->other_field;
3014     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3015         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3016 }
3017
3018 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3019  * as "unused for reference" */
3020 static void
3021 exec_ref_pic_marking_adaptive_mmco_4(
3022     GstVaapiDecoderH264  *decoder,
3023     GstVaapiPictureH264  *picture,
3024     GstH264RefPicMarking *ref_pic_marking
3025 )
3026 {
3027     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3028     gint32 i, long_term_frame_idx;
3029
3030     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3031
3032     for (i = 0; i < priv->long_ref_count; i++) {
3033         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3034             continue;
3035         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3036         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3037         i--;
3038     }
3039 }
3040
3041 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3042 static void
3043 exec_ref_pic_marking_adaptive_mmco_5(
3044     GstVaapiDecoderH264  *decoder,
3045     GstVaapiPictureH264  *picture,
3046     GstH264RefPicMarking *ref_pic_marking
3047 )
3048 {
3049     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3050
3051     dpb_flush(decoder, picture);
3052
3053     priv->prev_pic_has_mmco5 = TRUE;
3054
3055     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3056     priv->frame_num = 0;
3057     priv->frame_num_offset = 0;
3058     picture->frame_num = 0;
3059
3060     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3061     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3062         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3063     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3064         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3065     picture->base.poc = 0;
3066 }
3067
3068 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3069 static void
3070 exec_ref_pic_marking_adaptive_mmco_6(
3071     GstVaapiDecoderH264  *decoder,
3072     GstVaapiPictureH264  *picture,
3073     GstH264RefPicMarking *ref_pic_marking
3074 )
3075 {
3076     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3077     GstVaapiPictureH264 *other_field;
3078     guint i;
3079
3080     for (i = 0; i < priv->long_ref_count; i++) {
3081         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3082             break;
3083     }
3084     if (i != priv->long_ref_count) {
3085         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3086         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3087     }
3088
3089     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3090     gst_vaapi_picture_h264_set_reference(picture,
3091         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3092         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3093
3094     /* Assign LongTermFrameIdx to the other field if it was also
3095        marked as "used for long-term reference */
3096     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3097     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3098         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3099 }
3100
3101 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3102 static gboolean
3103 exec_ref_pic_marking_adaptive(
3104     GstVaapiDecoderH264     *decoder,
3105     GstVaapiPictureH264     *picture,
3106     GstH264DecRefPicMarking *dec_ref_pic_marking
3107 )
3108 {
3109     guint i;
3110
3111     GST_DEBUG("reference picture marking process (adaptive memory control)");
3112
3113     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3114         GstVaapiDecoderH264  *decoder,
3115         GstVaapiPictureH264  *picture,
3116         GstH264RefPicMarking *ref_pic_marking
3117     );
3118
3119     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3120         NULL,
3121         exec_ref_pic_marking_adaptive_mmco_1,
3122         exec_ref_pic_marking_adaptive_mmco_2,
3123         exec_ref_pic_marking_adaptive_mmco_3,
3124         exec_ref_pic_marking_adaptive_mmco_4,
3125         exec_ref_pic_marking_adaptive_mmco_5,
3126         exec_ref_pic_marking_adaptive_mmco_6,
3127     };
3128
3129     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3130         GstH264RefPicMarking * const ref_pic_marking =
3131             &dec_ref_pic_marking->ref_pic_marking[i];
3132
3133         const guint mmco = ref_pic_marking->memory_management_control_operation;
3134         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3135             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3136         else {
3137             GST_ERROR("unhandled MMCO %u", mmco);
3138             return FALSE;
3139         }
3140     }
3141     return TRUE;
3142 }
3143
3144 /* 8.2.5 - Execute reference picture marking process */
3145 static gboolean
3146 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3147 {
3148     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3149
3150     priv->prev_pic_has_mmco5 = FALSE;
3151     priv->prev_pic_structure = picture->structure;
3152
3153     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3154         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3155
3156     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3157         return TRUE;
3158
3159     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3160         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3161             &picture->last_slice_hdr->dec_ref_pic_marking;
3162         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3163             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3164                 return FALSE;
3165         }
3166         else {
3167             if (!exec_ref_pic_marking_sliding_window(decoder))
3168                 return FALSE;
3169         }
3170     }
3171     return TRUE;
3172 }
3173
3174 static void
3175 vaapi_init_picture(VAPictureH264 *pic)
3176 {
3177     pic->picture_id           = VA_INVALID_ID;
3178     pic->frame_idx            = 0;
3179     pic->flags                = VA_PICTURE_H264_INVALID;
3180     pic->TopFieldOrderCnt     = 0;
3181     pic->BottomFieldOrderCnt  = 0;
3182 }
3183
3184 static void
3185 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3186     guint picture_structure)
3187 {
3188     if (!picture_structure)
3189         picture_structure = picture->structure;
3190
3191     pic->picture_id = picture->base.surface_id;
3192     pic->flags = 0;
3193
3194     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3195         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3196         pic->frame_idx = picture->long_term_frame_idx;
3197     }
3198     else {
3199         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3200             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3201         pic->frame_idx = picture->frame_num;
3202     }
3203
3204     switch (picture_structure) {
3205     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3206         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3207         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3208         break;
3209     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3210         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3211         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3212         pic->BottomFieldOrderCnt = 0;
3213         break;
3214     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3215         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3216         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3217         pic->TopFieldOrderCnt = 0;
3218         break;
3219     }
3220 }
3221
3222 static void
3223 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3224     GstVaapiPictureH264 *picture)
3225 {
3226     vaapi_fill_picture(pic, picture, 0);
3227
3228     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3229     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3230         /* The inter-view reference components and inter-view only
3231            reference components that are included in the reference
3232            picture lists are considered as not being marked as "used for
3233            short-term reference" or "used for long-term reference" */
3234         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3235                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3236     }
3237 }
3238
3239 static gboolean
3240 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3241 {
3242     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3243     GstVaapiPicture * const base_picture = &picture->base;
3244     GstH264PPS * const pps = get_pps(decoder);
3245     GstH264SPS * const sps = get_sps(decoder);
3246     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3247     guint i, n;
3248
3249     /* Fill in VAPictureParameterBufferH264 */
3250     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3251
3252     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3253         GstVaapiFrameStore * const fs = priv->dpb[i];
3254         if ((gst_vaapi_frame_store_has_reference(fs) &&
3255              fs->view_id == picture->base.view_id) ||
3256             (gst_vaapi_frame_store_has_inter_view(fs) &&
3257              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3258             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3259                 fs->buffers[0], fs->structure);
3260         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3261             break;
3262     }
3263     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3264         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3265
3266 #define COPY_FIELD(s, f) \
3267     pic_param->f = (s)->f
3268
3269 #define COPY_BFM(a, s, f) \
3270     pic_param->a.bits.f = (s)->f
3271
3272     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3273     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3274     pic_param->frame_num                    = priv->frame_num;
3275
3276     COPY_FIELD(sps, bit_depth_luma_minus8);
3277     COPY_FIELD(sps, bit_depth_chroma_minus8);
3278     COPY_FIELD(sps, num_ref_frames);
3279     COPY_FIELD(pps, num_slice_groups_minus1);
3280     COPY_FIELD(pps, slice_group_map_type);
3281     COPY_FIELD(pps, slice_group_change_rate_minus1);
3282     COPY_FIELD(pps, pic_init_qp_minus26);
3283     COPY_FIELD(pps, pic_init_qs_minus26);
3284     COPY_FIELD(pps, chroma_qp_index_offset);
3285     COPY_FIELD(pps, second_chroma_qp_index_offset);
3286
3287     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3288     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3289     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3290
3291     COPY_BFM(seq_fields, sps, chroma_format_idc);
3292     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3293     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3294     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3295     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3296     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3297     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3298     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3299     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3300
3301     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3302     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3303     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3304
3305     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3306     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3307     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3308     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3309     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3310     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3311     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3312     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3313     return TRUE;
3314 }
3315
3316 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3317 static gboolean
3318 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3319 {
3320     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3321     GstH264PPS * const pps = slice_hdr->pps;
3322     GstH264SPS * const sps = pps->sequence;
3323     GstH264SliceHdr *prev_slice_hdr;
3324
3325     if (!prev_pi)
3326         return TRUE;
3327     prev_slice_hdr = &prev_pi->data.slice_hdr;
3328
3329 #define CHECK_EXPR(expr, field_name) do {              \
3330         if (!(expr)) {                                 \
3331             GST_DEBUG(field_name " differs in value"); \
3332             return TRUE;                               \
3333         }                                              \
3334     } while (0)
3335
3336 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3337     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3338
3339     /* view_id differs in value and VOIdx of current slice_hdr is less
3340        than the VOIdx of the prev_slice_hdr */
3341     CHECK_VALUE(pi, prev_pi, view_id);
3342
3343     /* frame_num differs in value, regardless of inferred values to 0 */
3344     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3345
3346     /* pic_parameter_set_id differs in value */
3347     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3348
3349     /* field_pic_flag differs in value */
3350     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3351
3352     /* bottom_field_flag is present in both and differs in value */
3353     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3354         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3355
3356     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3357     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3358                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3359
3360     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3361        value or delta_pic_order_cnt_bottom differs in value */
3362     if (sps->pic_order_cnt_type == 0) {
3363         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3364         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3365             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3366     }
3367
3368     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3369        differs in value or delta_pic_order_cnt[1] differs in value */
3370     else if (sps->pic_order_cnt_type == 1) {
3371         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3372         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3373     }
3374
3375     /* IdrPicFlag differs in value */
3376     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3377
3378     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3379     if (pi->nalu.idr_pic_flag)
3380         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3381
3382 #undef CHECK_EXPR
3383 #undef CHECK_VALUE
3384     return FALSE;
3385 }
3386
3387 /* Detection of a new access unit, assuming we are already in presence
3388    of a new picture */
3389 static inline gboolean
3390 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3391 {
3392     if (!prev_pi || prev_pi->view_id == pi->view_id)
3393         return TRUE;
3394     return pi->voc < prev_pi->voc;
3395 }
3396
3397 /* Finds the first field picture corresponding to the supplied picture */
3398 static GstVaapiPictureH264 *
3399 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3400 {
3401     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3402     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3403     GstVaapiFrameStore *fs;
3404
3405     if (!slice_hdr->field_pic_flag)
3406         return NULL;
3407
3408     fs = priv->prev_frames[pi->voc];
3409     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3410         return NULL;
3411
3412     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3413         return fs->buffers[0];
3414     return NULL;
3415 }
3416
3417 static GstVaapiDecoderStatus
3418 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3419 {
3420     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3421     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3422     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3423     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3424     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3425     GstVaapiPictureH264 *picture, *first_field;
3426     GstVaapiDecoderStatus status;
3427
3428     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3429     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3430
3431     /* Only decode base stream for MVC */
3432     switch (sps->profile_idc) {
3433     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3434     case GST_H264_PROFILE_STEREO_HIGH:
3435         if (0) {
3436             GST_DEBUG("drop picture from substream");
3437             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3438         }
3439         break;
3440     }
3441
3442     status = ensure_context(decoder, sps);
3443     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3444         return status;
3445
3446     priv->decoder_state = 0;
3447
3448     first_field = find_first_field(decoder, pi);
3449     if (first_field) {
3450         /* Re-use current picture where the first field was decoded */
3451         picture = gst_vaapi_picture_h264_new_field(first_field);
3452         if (!picture) {
3453             GST_ERROR("failed to allocate field picture");
3454             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3455         }
3456     }
3457     else {
3458         /* Create new picture */
3459         picture = gst_vaapi_picture_h264_new(decoder);
3460         if (!picture) {
3461             GST_ERROR("failed to allocate picture");
3462             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3463         }
3464     }
3465     gst_vaapi_picture_replace(&priv->current_picture, picture);
3466     gst_vaapi_picture_unref(picture);
3467
3468     /* Clear inter-view references list if this is the primary coded
3469        picture of the current access unit */
3470     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3471         g_ptr_array_set_size(priv->inter_views, 0);
3472
3473     /* Update cropping rectangle */
3474     if (sps->frame_cropping_flag) {
3475         GstVaapiRectangle crop_rect;
3476         crop_rect.x = sps->crop_rect_x;
3477         crop_rect.y = sps->crop_rect_y;
3478         crop_rect.width = sps->crop_rect_width;
3479         crop_rect.height = sps->crop_rect_height;
3480         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3481     }
3482
3483     status = ensure_quant_matrix(decoder, picture);
3484     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3485         GST_ERROR("failed to reset quantizer matrix");
3486         return status;
3487     }
3488
3489     if (!init_picture(decoder, picture, pi))
3490         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3491     if (!fill_picture(decoder, picture))
3492         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3493
3494     priv->decoder_state = pi->state;
3495     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3496 }
3497
3498 static inline guint
3499 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3500 {
3501     guint epb_count;
3502
3503     epb_count = slice_hdr->n_emulation_prevention_bytes;
3504     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3505 }
3506
3507 static gboolean
3508 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3509     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3510 {
3511     VASliceParameterBufferH264 * const slice_param = slice->param;
3512     GstH264PPS * const pps = get_pps(decoder);
3513     GstH264SPS * const sps = get_sps(decoder);
3514     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3515     guint num_weight_tables = 0;
3516     gint i, j;
3517
3518     if (pps->weighted_pred_flag &&
3519         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3520         num_weight_tables = 1;
3521     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3522         num_weight_tables = 2;
3523     else
3524         num_weight_tables = 0;
3525
3526     slice_param->luma_log2_weight_denom   = 0;
3527     slice_param->chroma_log2_weight_denom = 0;
3528     slice_param->luma_weight_l0_flag      = 0;
3529     slice_param->chroma_weight_l0_flag    = 0;
3530     slice_param->luma_weight_l1_flag      = 0;
3531     slice_param->chroma_weight_l1_flag    = 0;
3532
3533     if (num_weight_tables < 1)
3534         return TRUE;
3535
3536     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3537     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3538
3539     slice_param->luma_weight_l0_flag = 1;
3540     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3541         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3542         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3543     }
3544
3545     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3546     if (slice_param->chroma_weight_l0_flag) {
3547         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3548             for (j = 0; j < 2; j++) {
3549                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3550                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3551             }
3552         }
3553     }
3554
3555     if (num_weight_tables < 2)
3556         return TRUE;
3557
3558     slice_param->luma_weight_l1_flag = 1;
3559     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3560         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3561         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3562     }
3563
3564     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3565     if (slice_param->chroma_weight_l1_flag) {
3566         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3567             for (j = 0; j < 2; j++) {
3568                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3569                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3570             }
3571         }
3572     }
3573     return TRUE;
3574 }
3575
3576 static gboolean
3577 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3578     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3579 {
3580     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3581     VASliceParameterBufferH264 * const slice_param = slice->param;
3582     guint i, num_ref_lists = 0;
3583
3584     slice_param->num_ref_idx_l0_active_minus1 = 0;
3585     slice_param->num_ref_idx_l1_active_minus1 = 0;
3586
3587     if (GST_H264_IS_B_SLICE(slice_hdr))
3588         num_ref_lists = 2;
3589     else if (GST_H264_IS_I_SLICE(slice_hdr))
3590         num_ref_lists = 0;
3591     else
3592         num_ref_lists = 1;
3593
3594     if (num_ref_lists < 1)
3595         return TRUE;
3596
3597     slice_param->num_ref_idx_l0_active_minus1 =
3598         slice_hdr->num_ref_idx_l0_active_minus1;
3599
3600     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3601         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3602             priv->RefPicList0[i]);
3603     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3604         vaapi_init_picture(&slice_param->RefPicList0[i]);
3605
3606     if (num_ref_lists < 2)
3607         return TRUE;
3608
3609     slice_param->num_ref_idx_l1_active_minus1 =
3610         slice_hdr->num_ref_idx_l1_active_minus1;
3611
3612     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3613         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3614             priv->RefPicList1[i]);
3615     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3616         vaapi_init_picture(&slice_param->RefPicList1[i]);
3617     return TRUE;
3618 }
3619
3620 static gboolean
3621 fill_slice(GstVaapiDecoderH264 *decoder,
3622     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3623 {
3624     VASliceParameterBufferH264 * const slice_param = slice->param;
3625     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3626
3627     /* Fill in VASliceParameterBufferH264 */
3628     slice_param->slice_data_bit_offset =
3629         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3630     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3631     slice_param->slice_type                     = slice_hdr->type % 5;
3632     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3633     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3634     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3635     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3636     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3637     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3638
3639     if (!fill_RefPicList(decoder, slice, slice_hdr))
3640         return FALSE;
3641     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3642         return FALSE;
3643     return TRUE;
3644 }
3645
3646 static GstVaapiDecoderStatus
3647 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3648 {
3649     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3650     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3651     GstVaapiPictureH264 * const picture = priv->current_picture;
3652     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3653     GstVaapiSlice *slice;
3654     GstBuffer * const buffer =
3655         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3656     GstMapInfo map_info;
3657
3658     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3659
3660     if (!is_valid_state(pi->state,
3661             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3662         GST_WARNING("failed to receive enough headers to decode slice");
3663         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3664     }
3665
3666     if (!ensure_pps(decoder, slice_hdr->pps)) {
3667         GST_ERROR("failed to activate PPS");
3668         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3669     }
3670
3671     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3672         GST_ERROR("failed to activate SPS");
3673         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3674     }
3675
3676     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3677         GST_ERROR("failed to map buffer");
3678         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3679     }
3680
3681     /* Check wether this is the first/last slice in the current access unit */
3682     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3683         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3684     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3685         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3686
3687     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3688         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3689     gst_buffer_unmap(buffer, &map_info);
3690     if (!slice) {
3691         GST_ERROR("failed to allocate slice");
3692         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3693     }
3694
3695     init_picture_refs(decoder, picture, slice_hdr);
3696     if (!fill_slice(decoder, slice, pi)) {
3697         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3698         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3699     }
3700
3701     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3702     picture->last_slice_hdr = slice_hdr;
3703     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3704     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3705 }
3706
3707 static inline gint
3708 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3709 {
3710     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3711                                                      0xffffff00, 0x00000100,
3712                                                      ofs, size,
3713                                                      scp);
3714 }
3715
3716 static GstVaapiDecoderStatus
3717 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3718 {
3719     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3720     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3721     GstVaapiDecoderStatus status;
3722
3723     priv->decoder_state |= pi->state;
3724     switch (pi->nalu.type) {
3725     case GST_H264_NAL_SPS:
3726         status = decode_sps(decoder, unit);
3727         break;
3728     case GST_H264_NAL_SUBSET_SPS:
3729         status = decode_subset_sps(decoder, unit);
3730         break;
3731     case GST_H264_NAL_PPS:
3732         status = decode_pps(decoder, unit);
3733         break;
3734     case GST_H264_NAL_SLICE_EXT:
3735     case GST_H264_NAL_SLICE_IDR:
3736         /* fall-through. IDR specifics are handled in init_picture() */
3737     case GST_H264_NAL_SLICE:
3738         status = decode_slice(decoder, unit);
3739         break;
3740     case GST_H264_NAL_SEQ_END:
3741     case GST_H264_NAL_STREAM_END:
3742         status = decode_sequence_end(decoder);
3743         break;
3744     case GST_H264_NAL_SEI:
3745         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3746         break;
3747     default:
3748         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3749         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3750         break;
3751     }
3752     return status;
3753 }
3754
3755 static GstVaapiDecoderStatus
3756 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3757     const guchar *buf, guint buf_size)
3758 {
3759     GstVaapiDecoderH264 * const decoder =
3760         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3761     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3762     GstVaapiDecoderStatus status;
3763     GstVaapiDecoderUnit unit;
3764     GstVaapiParserInfoH264 *pi = NULL;
3765     GstH264ParserResult result;
3766     guint i, ofs, num_sps, num_pps;
3767
3768     unit.parsed_info = NULL;
3769
3770     if (buf_size < 8)
3771         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3772
3773     if (buf[0] != 1) {
3774         GST_ERROR("failed to decode codec-data, not in avcC format");
3775         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3776     }
3777
3778     priv->nal_length_size = (buf[4] & 0x03) + 1;
3779
3780     num_sps = buf[5] & 0x1f;
3781     ofs = 6;
3782
3783     for (i = 0; i < num_sps; i++) {
3784         pi = gst_vaapi_parser_info_h264_new();
3785         if (!pi)
3786             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3787         unit.parsed_info = pi;
3788
3789         result = gst_h264_parser_identify_nalu_avc(
3790             priv->parser,
3791             buf, ofs, buf_size, 2,
3792             &pi->nalu
3793         );
3794         if (result != GST_H264_PARSER_OK) {
3795             status = get_status(result);
3796             goto cleanup;
3797         }
3798
3799         status = parse_sps(decoder, &unit);
3800         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3801             goto cleanup;
3802         ofs = pi->nalu.offset + pi->nalu.size;
3803
3804         status = decode_sps(decoder, &unit);
3805         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3806             goto cleanup;
3807         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3808     }
3809
3810     num_pps = buf[ofs];
3811     ofs++;
3812
3813     for (i = 0; i < num_pps; i++) {
3814         pi = gst_vaapi_parser_info_h264_new();
3815         if (!pi)
3816             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3817         unit.parsed_info = pi;
3818
3819         result = gst_h264_parser_identify_nalu_avc(
3820             priv->parser,
3821             buf, ofs, buf_size, 2,
3822             &pi->nalu
3823         );
3824         if (result != GST_H264_PARSER_OK) {
3825             status = get_status(result);
3826             goto cleanup;
3827         }
3828
3829         status = parse_pps(decoder, &unit);
3830         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3831             goto cleanup;
3832         ofs = pi->nalu.offset + pi->nalu.size;
3833
3834         status = decode_pps(decoder, &unit);
3835         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3836             goto cleanup;
3837         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3838     }
3839
3840     priv->is_avcC = TRUE;
3841     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3842
3843 cleanup:
3844     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3845     return status;
3846 }
3847
3848 static GstVaapiDecoderStatus
3849 ensure_decoder(GstVaapiDecoderH264 *decoder)
3850 {
3851     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3852     GstVaapiDecoderStatus status;
3853
3854     if (!priv->is_opened) {
3855         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3856         if (!priv->is_opened)
3857             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3858
3859         status = gst_vaapi_decoder_decode_codec_data(
3860             GST_VAAPI_DECODER_CAST(decoder));
3861         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3862             return status;
3863     }
3864     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3865 }
3866
3867 static GstVaapiDecoderStatus
3868 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3869     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3870 {
3871     GstVaapiDecoderH264 * const decoder =
3872         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3873     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3874     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3875     GstVaapiParserInfoH264 *pi;
3876     GstVaapiDecoderStatus status;
3877     GstH264ParserResult result;
3878     guchar *buf;
3879     guint i, size, buf_size, nalu_size, flags;
3880     guint32 start_code;
3881     gint ofs, ofs2;
3882
3883     status = ensure_decoder(decoder);
3884     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3885         return status;
3886
3887     switch (priv->stream_alignment) {
3888     case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3889         size = gst_adapter_available_fast(adapter);
3890         break;
3891     default:
3892         size = gst_adapter_available(adapter);
3893         break;
3894     }
3895
3896     if (priv->is_avcC) {
3897         if (size < priv->nal_length_size)
3898             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3899
3900         buf = (guchar *)&start_code;
3901         g_assert(priv->nal_length_size <= sizeof(start_code));
3902         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3903
3904         nalu_size = 0;
3905         for (i = 0; i < priv->nal_length_size; i++)
3906             nalu_size = (nalu_size << 8) | buf[i];
3907
3908         buf_size = priv->nal_length_size + nalu_size;
3909         if (size < buf_size)
3910             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3911     }
3912     else {
3913         if (size < 4)
3914             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3915
3916         if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3917             buf_size = size;
3918         else {
3919             ofs = scan_for_start_code(adapter, 0, size, NULL);
3920             if (ofs < 0)
3921                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3922
3923             if (ofs > 0) {
3924                 gst_adapter_flush(adapter, ofs);
3925                 size -= ofs;
3926             }
3927
3928             ofs2 = ps->input_offset2 - ofs - 4;
3929             if (ofs2 < 4)
3930                 ofs2 = 4;
3931
3932             ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3933                 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3934             if (ofs < 0) {
3935                 // Assume the whole NAL unit is present if end-of-stream
3936                 if (!at_eos) {
3937                     ps->input_offset2 = size;
3938                     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3939                 }
3940                 ofs = size;
3941             }
3942             buf_size = ofs;
3943         }
3944     }
3945     ps->input_offset2 = 0;
3946
3947     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3948     if (!buf)
3949         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3950
3951     unit->size = buf_size;
3952
3953     pi = gst_vaapi_parser_info_h264_new();
3954     if (!pi)
3955         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3956
3957     gst_vaapi_decoder_unit_set_parsed_info(unit,
3958         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3959
3960     if (priv->is_avcC)
3961         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3962             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3963     else
3964         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3965             buf, 0, buf_size, &pi->nalu);
3966     status = get_status(result);
3967     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3968         return status;
3969
3970     switch (pi->nalu.type) {
3971     case GST_H264_NAL_SPS:
3972         status = parse_sps(decoder, unit);
3973         break;
3974     case GST_H264_NAL_SUBSET_SPS:
3975         status = parse_subset_sps(decoder, unit);
3976         break;
3977     case GST_H264_NAL_PPS:
3978         status = parse_pps(decoder, unit);
3979         break;
3980     case GST_H264_NAL_SEI:
3981         status = parse_sei(decoder, unit);
3982         break;
3983     case GST_H264_NAL_SLICE_EXT:
3984         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3985             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3986             break;
3987         }
3988         /* fall-through */
3989     case GST_H264_NAL_SLICE_IDR:
3990     case GST_H264_NAL_SLICE:
3991         status = parse_slice(decoder, unit);
3992         break;
3993     default:
3994         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3995         break;
3996     }
3997     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3998         return status;
3999
4000     flags = 0;
4001     switch (pi->nalu.type) {
4002     case GST_H264_NAL_AU_DELIMITER:
4003         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4004         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4005         /* fall-through */
4006     case GST_H264_NAL_FILLER_DATA:
4007         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4008         break;
4009     case GST_H264_NAL_STREAM_END:
4010         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4011         /* fall-through */
4012     case GST_H264_NAL_SEQ_END:
4013         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4014         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4015         break;
4016     case GST_H264_NAL_SPS:
4017     case GST_H264_NAL_SUBSET_SPS:
4018     case GST_H264_NAL_PPS:
4019     case GST_H264_NAL_SEI:
4020         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4021         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4022         break;
4023     case GST_H264_NAL_SLICE_EXT:
4024         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4025             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4026             break;
4027         }
4028         /* fall-through */
4029     case GST_H264_NAL_SLICE_IDR:
4030     case GST_H264_NAL_SLICE:
4031         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4032         if (is_new_picture(pi, priv->prev_slice_pi)) {
4033             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4034             if (is_new_access_unit(pi, priv->prev_slice_pi))
4035                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4036         }
4037         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4038         break;
4039     case GST_H264_NAL_SPS_EXT:
4040     case GST_H264_NAL_SLICE_AUX:
4041         /* skip SPS extension and auxiliary slice for now */
4042         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4043         break;
4044     case GST_H264_NAL_PREFIX_UNIT:
4045         /* skip Prefix NAL units for now */
4046         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4047             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4048             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4049         break;
4050     default:
4051         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4052             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4053                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4054         break;
4055     }
4056     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4057         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4058     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4059
4060     pi->nalu.data = NULL;
4061     pi->state = priv->parser_state;
4062     pi->flags = flags;
4063     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4064     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4065 }
4066
4067 static GstVaapiDecoderStatus
4068 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4069     GstVaapiDecoderUnit *unit)
4070 {
4071     GstVaapiDecoderH264 * const decoder =
4072         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4073     GstVaapiDecoderStatus status;
4074
4075     status = ensure_decoder(decoder);
4076     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4077         return status;
4078     return decode_unit(decoder, unit);
4079 }
4080
4081 static GstVaapiDecoderStatus
4082 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4083     GstVaapiDecoderUnit *unit)
4084 {
4085     GstVaapiDecoderH264 * const decoder =
4086         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4087
4088     return decode_picture(decoder, unit);
4089 }
4090
4091 static GstVaapiDecoderStatus
4092 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4093 {
4094     GstVaapiDecoderH264 * const decoder =
4095         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4096
4097     return decode_current_picture(decoder);
4098 }
4099
4100 static GstVaapiDecoderStatus
4101 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4102 {
4103     GstVaapiDecoderH264 * const decoder =
4104         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4105
4106     dpb_flush(decoder, NULL);
4107     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4108 }
4109
4110 static void
4111 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4112 {
4113     GstVaapiMiniObjectClass * const object_class =
4114         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4115     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4116
4117     object_class->size          = sizeof(GstVaapiDecoderH264);
4118     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4119
4120     decoder_class->create       = gst_vaapi_decoder_h264_create;
4121     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4122     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4123     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4124     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4125     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4126     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4127
4128     decoder_class->decode_codec_data =
4129         gst_vaapi_decoder_h264_decode_codec_data;
4130 }
4131
4132 static inline const GstVaapiDecoderClass *
4133 gst_vaapi_decoder_h264_class(void)
4134 {
4135     static GstVaapiDecoderH264Class g_class;
4136     static gsize g_class_init = FALSE;
4137
4138     if (g_once_init_enter(&g_class_init)) {
4139         gst_vaapi_decoder_h264_class_init(&g_class);
4140         g_once_init_leave(&g_class_init, TRUE);
4141     }
4142     return GST_VAAPI_DECODER_CLASS(&g_class);
4143 }
4144
4145 /**
4146  * gst_vaapi_decoder_h264_set_alignment:
4147  * @decoder: a #GstVaapiDecoderH264
4148  * @alignment: the #GstVaapiStreamAlignH264
4149  *
4150  * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4151  * of each buffer that is supplied to the decoder. This could be no
4152  * specific alignment, NAL unit boundaries, or access unit boundaries.
4153  */
4154 void
4155 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4156     GstVaapiStreamAlignH264 alignment)
4157 {
4158     g_return_if_fail(decoder != NULL);
4159
4160     decoder->priv.stream_alignment = alignment;
4161 }
4162
4163 /**
4164  * gst_vaapi_decoder_h264_new:
4165  * @display: a #GstVaapiDisplay
4166  * @caps: a #GstCaps holding codec information
4167  *
4168  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4169  * hold extra information like codec-data and pictured coded size.
4170  *
4171  * Return value: the newly allocated #GstVaapiDecoder object
4172  */
4173 GstVaapiDecoder *
4174 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4175 {
4176     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4177 }