03dd51b3b0c7a73b511d9dfc99835c2fdd8db645
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiStreamAlignH264     stream_alignment;
449     GstVaapiPictureH264        *current_picture;
450     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
451     GstVaapiParserInfoH264     *active_sps;
452     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
453     GstVaapiParserInfoH264     *active_pps;
454     GstVaapiParserInfoH264     *prev_pi;
455     GstVaapiParserInfoH264     *prev_slice_pi;
456     GstVaapiFrameStore        **prev_frames;
457     guint                       prev_frames_alloc;
458     GstVaapiFrameStore        **dpb;
459     guint                       dpb_count;
460     guint                       dpb_size;
461     guint                       dpb_size_max;
462     guint                       max_views;
463     GstVaapiProfile             profile;
464     GstVaapiEntrypoint          entrypoint;
465     GstVaapiChromaType          chroma_type;
466     GPtrArray                  *inter_views;
467     GstVaapiPictureH264        *short_ref[32];
468     guint                       short_ref_count;
469     GstVaapiPictureH264        *long_ref[32];
470     guint                       long_ref_count;
471     GstVaapiPictureH264        *RefPicList0[32];
472     guint                       RefPicList0_count;
473     GstVaapiPictureH264        *RefPicList1[32];
474     guint                       RefPicList1_count;
475     guint                       nal_length_size;
476     guint                       mb_width;
477     guint                       mb_height;
478     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479     gint32                      poc_msb;                // PicOrderCntMsb
480     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
481     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
482     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
483     gint32                      frame_num_offset;       // FrameNumOffset
484     gint32                      frame_num;              // frame_num (from slice_header())
485     gint32                      prev_frame_num;         // prevFrameNum
486     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
487     gboolean                    prev_pic_structure;     // previous picture structure
488     guint                       is_opened               : 1;
489     guint                       is_avcC                 : 1;
490     guint                       has_context             : 1;
491     guint                       progressive_sequence    : 1;
492 };
493
494 /**
495  * GstVaapiDecoderH264:
496  *
497  * A decoder based on H264.
498  */
499 struct _GstVaapiDecoderH264 {
500     /*< private >*/
501     GstVaapiDecoder             parent_instance;
502     GstVaapiDecoderH264Private  priv;
503 };
504
505 /**
506  * GstVaapiDecoderH264Class:
507  *
508  * A decoder class based on H264.
509  */
510 struct _GstVaapiDecoderH264Class {
511     /*< private >*/
512     GstVaapiDecoderClass parent_class;
513 };
514
515 static gboolean
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
517
518 static gboolean
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520     GstVaapiPictureH264 *picture);
521
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524     GstVaapiFrameStore *fs)
525 {
526     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
527 }
528
529 /* Determines if the supplied profile is one of the MVC set */
530 static gboolean
531 is_mvc_profile(GstH264Profile profile)
532 {
533     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534         profile == GST_H264_PROFILE_STEREO_HIGH;
535 }
536
537 /* Determines the view_id from the supplied NAL unit */
538 static inline guint
539 get_view_id(GstH264NalUnit *nalu)
540 {
541     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
542 }
543
544 /* Determines the view order index (VOIdx) from the supplied view_id */
545 static gint
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
547 {
548     GstH264SPSExtMVC *mvc;
549     gint i;
550
551     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
552         return 0;
553
554     mvc = &sps->extension.mvc;
555     for (i = 0; i <= mvc->num_views_minus1; i++) {
556         if (mvc->view[i].view_id == view_id)
557             return i;
558     }
559     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
560     return -1;
561 }
562
563 /* Determines NumViews */
564 static guint
565 get_num_views(GstH264SPS *sps)
566 {
567     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568         sps->extension.mvc.num_views_minus1 : 0);
569 }
570
571 /* Get number of reference frames to use */
572 static guint
573 get_max_dec_frame_buffering(GstH264SPS *sps)
574 {
575     guint num_views, max_dpb_frames;
576     guint max_dec_frame_buffering, PicSizeMbs;
577     GstVaapiLevelH264 level;
578     const GstVaapiH264LevelLimits *level_limits;
579
580     /* Table A-1 - Level limits */
581     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582         level = GST_VAAPI_LEVEL_H264_L1b;
583     else
584         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586     if (G_UNLIKELY(!level_limits)) {
587         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588         max_dec_frame_buffering = 16;
589     }
590     else {
591         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592                       (sps->pic_height_in_map_units_minus1 + 1) *
593                       (sps->frame_mbs_only_flag ? 1 : 2));
594         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
595     }
596     if (is_mvc_profile(sps->profile_idc))
597         max_dec_frame_buffering <<= 1;
598
599     /* VUI parameters */
600     if (sps->vui_parameters_present_flag) {
601         GstH264VUIParams * const vui_params = &sps->vui_parameters;
602         if (vui_params->bitstream_restriction_flag)
603             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
604         else {
605             switch (sps->profile_idc) {
606             case 44:  // CAVLC 4:4:4 Intra profile
607             case GST_H264_PROFILE_SCALABLE_HIGH:
608             case GST_H264_PROFILE_HIGH:
609             case GST_H264_PROFILE_HIGH10:
610             case GST_H264_PROFILE_HIGH_422:
611             case GST_H264_PROFILE_HIGH_444:
612                 if (sps->constraint_set3_flag)
613                     max_dec_frame_buffering = 0;
614                 break;
615             }
616         }
617     }
618
619     num_views = get_num_views(sps);
620     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621     if (max_dec_frame_buffering > max_dpb_frames)
622         max_dec_frame_buffering = max_dpb_frames;
623     else if (max_dec_frame_buffering < sps->num_ref_frames)
624         max_dec_frame_buffering = sps->num_ref_frames;
625     return MAX(1, max_dec_frame_buffering);
626 }
627
628 static void
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
630 {
631     gpointer * const entries = array;
632     guint num_entries = *array_length_ptr;
633
634     g_return_if_fail(index < num_entries);
635
636     if (index != --num_entries)
637         entries[index] = entries[num_entries];
638     entries[num_entries] = NULL;
639     *array_length_ptr = num_entries;
640 }
641
642 #if 1
643 static inline void
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
645 {
646     array_remove_index_fast(array, array_length_ptr, index);
647 }
648 #else
649 static void
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
651 {
652     gpointer * const entries = array;
653     const guint num_entries = *array_length_ptr - 1;
654     guint i;
655
656     g_return_if_fail(index <= num_entries);
657
658     for (i = index; i < num_entries; i++)
659         entries[i] = entries[i + 1];
660     entries[num_entries] = NULL;
661     *array_length_ptr = num_entries;
662 }
663 #endif
664
665 #define ARRAY_REMOVE_INDEX(array, index) \
666     array_remove_index(array, &array##_count, index)
667
668 static void
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
670 {
671     GstVaapiDecoderH264Private * const priv = &decoder->priv;
672     guint i, num_frames = --priv->dpb_count;
673
674     if (USE_STRICT_DPB_ORDERING) {
675         for (i = index; i < num_frames; i++)
676             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
677     }
678     else if (index != num_frames)
679         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
681 }
682
683 static gboolean
684 dpb_output(
685     GstVaapiDecoderH264 *decoder,
686     GstVaapiFrameStore  *fs,
687     GstVaapiPictureH264 *picture
688 )
689 {
690     picture->output_needed = FALSE;
691
692     if (fs) {
693         if (--fs->output_needed > 0)
694             return TRUE;
695         picture = fs->buffers[0];
696     }
697     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
698 }
699
700 static inline void
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
702 {
703     GstVaapiDecoderH264Private * const priv = &decoder->priv;
704     GstVaapiFrameStore * const fs = priv->dpb[i];
705
706     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707         dpb_remove_index(decoder, i);
708 }
709
710 /* Finds the frame store holding the supplied picture */
711 static gint
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
713 {
714     GstVaapiDecoderH264Private * const priv = &decoder->priv;
715     gint i, j;
716
717     for (i = 0; i < priv->dpb_count; i++) {
718         GstVaapiFrameStore * const fs = priv->dpb[i];
719         for (j = 0; j < fs->num_buffers; j++) {
720             if (fs->buffers[j] == picture)
721                 return i;
722         }
723     }
724     return -1;
725 }
726
727 /* Finds the picture with the lowest POC that needs to be output */
728 static gint
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730     GstVaapiPictureH264 **found_picture_ptr)
731 {
732     GstVaapiDecoderH264Private * const priv = &decoder->priv;
733     GstVaapiPictureH264 *found_picture = NULL;
734     guint i, j, found_index;
735
736     for (i = 0; i < priv->dpb_count; i++) {
737         GstVaapiFrameStore * const fs = priv->dpb[i];
738         if (!fs->output_needed)
739             continue;
740         if (picture && picture->base.view_id != fs->view_id)
741             continue;
742         for (j = 0; j < fs->num_buffers; j++) {
743             GstVaapiPictureH264 * const pic = fs->buffers[j];
744             if (!pic->output_needed)
745                 continue;
746             if (!found_picture || found_picture->base.poc > pic->base.poc ||
747                 (found_picture->base.poc == pic->base.poc &&
748                  found_picture->base.voc > pic->base.voc))
749                 found_picture = pic, found_index = i;
750         }
751     }
752
753     if (found_picture_ptr)
754         *found_picture_ptr = found_picture;
755     return found_picture ? found_index : -1;
756 }
757
758 /* Finds the picture with the lowest VOC that needs to be output */
759 static gint
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761     GstVaapiPictureH264 **found_picture_ptr)
762 {
763     GstVaapiDecoderH264Private * const priv = &decoder->priv;
764     GstVaapiPictureH264 *found_picture = NULL;
765     guint i, j, found_index;
766
767     for (i = 0; i < priv->dpb_count; i++) {
768         GstVaapiFrameStore * const fs = priv->dpb[i];
769         if (!fs->output_needed || fs->view_id == picture->base.view_id)
770             continue;
771         for (j = 0; j < fs->num_buffers; j++) {
772             GstVaapiPictureH264 * const pic = fs->buffers[j];
773             if (!pic->output_needed || pic->base.poc != picture->base.poc)
774                 continue;
775             if (!found_picture || found_picture->base.voc > pic->base.voc)
776                 found_picture = pic, found_index = i;
777         }
778     }
779
780     if (found_picture_ptr)
781         *found_picture_ptr = found_picture;
782     return found_picture ? found_index : -1;
783 }
784
785 static gboolean
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787     GstVaapiPictureH264 *picture, guint voc)
788 {
789     GstVaapiDecoderH264Private * const priv = &decoder->priv;
790     GstVaapiPictureH264 *found_picture;
791     gint found_index;
792     gboolean success;
793
794     if (priv->max_views == 1)
795         return TRUE;
796
797     /* Emit all other view components that were in the same access
798        unit than the picture we have just found */
799     found_picture = picture;
800     for (;;) {
801         found_index = dpb_find_lowest_voc(decoder, found_picture,
802             &found_picture);
803         if (found_index < 0 || found_picture->base.voc >= voc)
804             break;
805         success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806         dpb_evict(decoder, found_picture, found_index);
807         if (!success)
808             return FALSE;
809     }
810     return TRUE;
811 }
812
813 static gboolean
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
815 {
816     GstVaapiDecoderH264Private * const priv = &decoder->priv;
817     GstVaapiPictureH264 *found_picture;
818     gint found_index;
819     gboolean success;
820
821     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
822     if (found_index < 0)
823         return FALSE;
824
825     if (picture && picture->base.poc != found_picture->base.poc)
826         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
827
828     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829     dpb_evict(decoder, found_picture, found_index);
830     if (priv->max_views == 1)
831         return success;
832
833     if (picture && picture->base.poc != found_picture->base.poc)
834         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
835     return success;
836 }
837
838 static void
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 {
841     GstVaapiDecoderH264Private * const priv = &decoder->priv;
842     guint i, n;
843
844     for (i = 0; i < priv->dpb_count; i++) {
845         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
846             continue;
847         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
848     }
849
850     for (i = 0, n = 0; i < priv->dpb_count; i++) {
851         if (priv->dpb[i])
852             priv->dpb[n++] = priv->dpb[i];
853     }
854     priv->dpb_count = n;
855
856     /* Clear previous frame buffers only if this is a "flush-all" operation,
857        or if the picture is the first one in the access unit */
858     if (priv->prev_frames && (!picture ||
859             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
860                 GST_VAAPI_PICTURE_FLAG_AU_START))) {
861         for (i = 0; i < priv->max_views; i++)
862             gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
863     }
864 }
865
866 static void
867 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
868 {
869     while (dpb_bump(decoder, picture))
870         ;
871     dpb_clear(decoder, picture);
872 }
873
874 static void
875 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
876 {
877     GstVaapiDecoderH264Private * const priv = &decoder->priv;
878     const gboolean is_last_picture = /* in the access unit */
879         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
880     guint i;
881
882     // Remove all unused inter-view only reference components of the current AU
883     i = 0;
884     while (i < priv->dpb_count) {
885         GstVaapiFrameStore * const fs = priv->dpb[i];
886         if (fs->view_id != picture->base.view_id &&
887             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
888             (is_last_picture ||
889              !is_inter_view_reference_for_next_frames(decoder, fs)))
890             dpb_remove_index(decoder, i);
891         else
892             i++;
893     }
894 }
895
896 static gboolean
897 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
898 {
899     GstVaapiDecoderH264Private * const priv = &decoder->priv;
900     GstVaapiFrameStore *fs;
901     guint i;
902
903     if (priv->max_views > 1)
904         dpb_prune_mvc(decoder, picture);
905
906     // Remove all unused pictures
907     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
908         i = 0;
909         while (i < priv->dpb_count) {
910             GstVaapiFrameStore * const fs = priv->dpb[i];
911             if (fs->view_id == picture->base.view_id &&
912                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
913                 dpb_remove_index(decoder, i);
914             else
915                 i++;
916         }
917     }
918
919     // Check if picture is the second field and the first field is still in DPB
920     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
921         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
922         const gint found_index = dpb_find_picture(decoder,
923             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
924         if (found_index >= 0)
925             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
926     }
927
928     // Create new frame store, and split fields if necessary
929     fs = gst_vaapi_frame_store_new(picture);
930     if (!fs)
931         return FALSE;
932     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
933     gst_vaapi_frame_store_unref(fs);
934
935     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
936         if (!gst_vaapi_frame_store_split_fields(fs))
937             return FALSE;
938     }
939
940     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
941     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
942         while (priv->dpb_count == priv->dpb_size) {
943             if (!dpb_bump(decoder, picture))
944                 return FALSE;
945         }
946     }
947
948     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
949     else {
950         const gboolean StoreInterViewOnlyRefFlag =
951             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
952                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
953             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
954                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
955         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
956             return TRUE;
957         while (priv->dpb_count == priv->dpb_size) {
958             if (!StoreInterViewOnlyRefFlag) {
959                 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
960                     return dpb_output(decoder, NULL, picture);
961             }
962             if (!dpb_bump(decoder, picture))
963                 return FALSE;
964         }
965     }
966
967     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
968     if (picture->output_flag) {
969         picture->output_needed = TRUE;
970         fs->output_needed++;
971     }
972     return TRUE;
973 }
974
975 static gboolean
976 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
977 {
978     GstVaapiDecoderH264Private * const priv = &decoder->priv;
979
980     if (dpb_size < priv->dpb_count)
981         return FALSE;
982
983     if (dpb_size > priv->dpb_size_max) {
984         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
985         if (!priv->dpb)
986             return FALSE;
987         memset(&priv->dpb[priv->dpb_size_max], 0,
988             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
989         priv->dpb_size_max = dpb_size;
990     }
991
992     if (priv->dpb_size < dpb_size)
993         priv->dpb_size = dpb_size;
994     else if (dpb_size < priv->dpb_count)
995         return FALSE;
996
997     GST_DEBUG("DPB size %u", priv->dpb_size);
998     return TRUE;
999 }
1000
1001 static void
1002 unref_inter_view(GstVaapiPictureH264 *picture)
1003 {
1004     if (!picture)
1005         return;
1006     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1007     gst_vaapi_picture_unref(picture);
1008 }
1009
1010 /* Resets MVC resources */
1011 static gboolean
1012 mvc_reset(GstVaapiDecoderH264 *decoder)
1013 {
1014     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1015     guint i;
1016
1017     // Resize array of inter-view references
1018     if (!priv->inter_views) {
1019         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1020             (GDestroyNotify)unref_inter_view);
1021         if (!priv->inter_views)
1022             return FALSE;
1023     }
1024
1025     // Resize array of previous frame buffers
1026     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1027         gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
1028
1029     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1030         sizeof(*priv->prev_frames));
1031     if (!priv->prev_frames) {
1032         priv->prev_frames_alloc = 0;
1033         return FALSE;
1034     }
1035     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1036         priv->prev_frames[i] = NULL;
1037     priv->prev_frames_alloc = priv->max_views;
1038     return TRUE;
1039 }
1040
1041 static GstVaapiDecoderStatus
1042 get_status(GstH264ParserResult result)
1043 {
1044     GstVaapiDecoderStatus status;
1045
1046     switch (result) {
1047     case GST_H264_PARSER_OK:
1048         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1049         break;
1050     case GST_H264_PARSER_NO_NAL_END:
1051         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1052         break;
1053     case GST_H264_PARSER_ERROR:
1054         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1055         break;
1056     default:
1057         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1058         break;
1059     }
1060     return status;
1061 }
1062
1063 static void
1064 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1065 {
1066     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1067
1068     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1069     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1070     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1071
1072     dpb_clear(decoder, NULL);
1073
1074     if (priv->inter_views) {
1075         g_ptr_array_unref(priv->inter_views);
1076         priv->inter_views = NULL;
1077     }
1078
1079     if (priv->parser) {
1080         gst_h264_nal_parser_free(priv->parser);
1081         priv->parser = NULL;
1082     }
1083 }
1084
1085 static gboolean
1086 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1087 {
1088     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1089
1090     gst_vaapi_decoder_h264_close(decoder);
1091
1092     priv->parser = gst_h264_nal_parser_new();
1093     if (!priv->parser)
1094         return FALSE;
1095     return TRUE;
1096 }
1097
1098 static void
1099 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1100 {
1101     GstVaapiDecoderH264 * const decoder =
1102         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1103     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1104     guint i;
1105
1106     gst_vaapi_decoder_h264_close(decoder);
1107
1108     g_free(priv->dpb);
1109     priv->dpb = NULL;
1110     priv->dpb_size = 0;
1111
1112     g_free(priv->prev_frames);
1113     priv->prev_frames = NULL;
1114     priv->prev_frames_alloc = 0;
1115
1116     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1117         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1118     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1119
1120     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1121         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1122     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1123 }
1124
1125 static gboolean
1126 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1127 {
1128     GstVaapiDecoderH264 * const decoder =
1129         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1130     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1131
1132     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1133     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1134     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1135     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1136     priv->progressive_sequence  = TRUE;
1137     return TRUE;
1138 }
1139
1140 /* Activates the supplied PPS */
1141 static GstH264PPS *
1142 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1143 {
1144     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1145     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1146
1147     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1148     return pi ? &pi->data.pps : NULL;
1149 }
1150
1151 /* Returns the active PPS */
1152 static inline GstH264PPS *
1153 get_pps(GstVaapiDecoderH264 *decoder)
1154 {
1155     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1156
1157     return pi ? &pi->data.pps : NULL;
1158 }
1159
1160 /* Activate the supplied SPS */
1161 static GstH264SPS *
1162 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1163 {
1164     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1165     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1166
1167     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1168     return pi ? &pi->data.sps : NULL;
1169 }
1170
1171 /* Returns the active SPS */
1172 static inline GstH264SPS *
1173 get_sps(GstVaapiDecoderH264 *decoder)
1174 {
1175     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1176
1177     return pi ? &pi->data.sps : NULL;
1178 }
1179
1180 static void
1181 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1182     GstVaapiProfile profile)
1183 {
1184     guint n_profiles = *n_profiles_ptr;
1185
1186     profiles[n_profiles++] = profile;
1187     switch (profile) {
1188     case GST_VAAPI_PROFILE_H264_MAIN:
1189         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1190         break;
1191     default:
1192         break;
1193     }
1194     *n_profiles_ptr = n_profiles;
1195 }
1196
1197 /* Fills in compatible profiles for MVC decoding */
1198 static void
1199 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1200     guint *n_profiles_ptr, guint dpb_size)
1201 {
1202     const gchar * const vendor_string =
1203         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1204
1205     gboolean add_high_profile = FALSE;
1206     struct map {
1207         const gchar *str;
1208         guint str_len;
1209     };
1210     const struct map *m;
1211
1212     // Drivers that support slice level decoding
1213     if (vendor_string && dpb_size <= 16) {
1214         static const struct map drv_names[] = {
1215             { "Intel i965 driver", 17 },
1216             { NULL, 0 }
1217         };
1218         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1219             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1220                 add_high_profile = TRUE;
1221         }
1222     }
1223
1224     if (add_high_profile)
1225         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1226 }
1227
1228 static GstVaapiProfile
1229 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1230 {
1231     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1232     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1233     GstVaapiProfile profile, profiles[4];
1234     guint i, n_profiles = 0;
1235
1236     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1237     if (!profile)
1238         return GST_VAAPI_PROFILE_UNKNOWN;
1239
1240     fill_profiles(profiles, &n_profiles, profile);
1241     switch (profile) {
1242     case GST_VAAPI_PROFILE_H264_BASELINE:
1243         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1244             fill_profiles(profiles, &n_profiles,
1245                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1246             fill_profiles(profiles, &n_profiles,
1247                 GST_VAAPI_PROFILE_H264_MAIN);
1248         }
1249         break;
1250     case GST_VAAPI_PROFILE_H264_EXTENDED:
1251         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1252             fill_profiles(profiles, &n_profiles,
1253                 GST_VAAPI_PROFILE_H264_MAIN);
1254         }
1255         break;
1256     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1257         if (priv->max_views == 2) {
1258             fill_profiles(profiles, &n_profiles,
1259                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1260         }
1261         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1262         break;
1263     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1264         if (sps->frame_mbs_only_flag) {
1265             fill_profiles(profiles, &n_profiles,
1266                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1267         }
1268         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1269         break;
1270     default:
1271         break;
1272     }
1273
1274     /* If the preferred profile (profiles[0]) matches one that we already
1275        found, then just return it now instead of searching for it again */
1276     if (profiles[0] == priv->profile)
1277         return priv->profile;
1278
1279     for (i = 0; i < n_profiles; i++) {
1280         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1281             return profiles[i];
1282     }
1283     return GST_VAAPI_PROFILE_UNKNOWN;
1284 }
1285
1286 static GstVaapiDecoderStatus
1287 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1288 {
1289     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1290     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1291     GstVaapiContextInfo info;
1292     GstVaapiProfile profile;
1293     GstVaapiChromaType chroma_type;
1294     gboolean reset_context = FALSE;
1295     guint mb_width, mb_height, dpb_size;
1296
1297     dpb_size = get_max_dec_frame_buffering(sps);
1298     if (priv->dpb_size < dpb_size) {
1299         GST_DEBUG("DPB size increased");
1300         reset_context = TRUE;
1301     }
1302
1303     profile = get_profile(decoder, sps, dpb_size);
1304     if (!profile) {
1305         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1306         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1307     }
1308
1309     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1310         GST_DEBUG("profile changed");
1311         reset_context = TRUE;
1312         priv->profile = profile;
1313     }
1314
1315     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1316     if (!chroma_type) {
1317         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1318         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1319     }
1320
1321     if (priv->chroma_type != chroma_type) {
1322         GST_DEBUG("chroma format changed");
1323         reset_context     = TRUE;
1324         priv->chroma_type = chroma_type;
1325     }
1326
1327     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1328     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1329         !sps->frame_mbs_only_flag;
1330     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1331         GST_DEBUG("size changed");
1332         reset_context   = TRUE;
1333         priv->mb_width  = mb_width;
1334         priv->mb_height = mb_height;
1335     }
1336
1337     priv->progressive_sequence = sps->frame_mbs_only_flag;
1338     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1339
1340     gst_vaapi_decoder_set_pixel_aspect_ratio(
1341         base_decoder,
1342         sps->vui_parameters.par_n,
1343         sps->vui_parameters.par_d
1344     );
1345
1346     if (!reset_context && priv->has_context)
1347         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1348
1349     /* XXX: fix surface size when cropping is implemented */
1350     info.profile    = priv->profile;
1351     info.entrypoint = priv->entrypoint;
1352     info.chroma_type = priv->chroma_type;
1353     info.width      = sps->width;
1354     info.height     = sps->height;
1355     info.ref_frames = dpb_size;
1356
1357     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1358         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1359     priv->has_context = TRUE;
1360
1361     /* Reset DPB */
1362     if (!dpb_reset(decoder, dpb_size))
1363         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1364
1365     /* Reset MVC data */
1366     if (!mvc_reset(decoder))
1367         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1368     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1369 }
1370
1371 static void
1372 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1373     const GstH264SPS *sps)
1374 {
1375     guint i;
1376
1377     /* There are always 6 4x4 scaling lists */
1378     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1379     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1380
1381     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1382         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1383             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1384 }
1385
1386 static void
1387 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1388     const GstH264SPS *sps)
1389 {
1390     guint i, n;
1391
1392     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1393     if (!pps->transform_8x8_mode_flag)
1394         return;
1395
1396     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1397     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1398
1399     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1400     for (i = 0; i < n; i++) {
1401         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1402             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1403     }
1404 }
1405
1406 static GstVaapiDecoderStatus
1407 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1408 {
1409     GstVaapiPicture * const base_picture = &picture->base;
1410     GstH264PPS * const pps = get_pps(decoder);
1411     GstH264SPS * const sps = get_sps(decoder);
1412     VAIQMatrixBufferH264 *iq_matrix;
1413
1414     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1415     if (!base_picture->iq_matrix) {
1416         GST_ERROR("failed to allocate IQ matrix");
1417         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1418     }
1419     iq_matrix = base_picture->iq_matrix->param;
1420
1421     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1422        is not large enough to hold lists for 4:4:4 */
1423     if (sps->chroma_format_idc == 3)
1424         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1425
1426     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1427     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1428
1429     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1430 }
1431
1432 static inline gboolean
1433 is_valid_state(guint state, guint ref_state)
1434 {
1435     return (state & ref_state) == ref_state;
1436 }
1437
1438 static GstVaapiDecoderStatus
1439 decode_current_picture(GstVaapiDecoderH264 *decoder)
1440 {
1441     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1442     GstVaapiPictureH264 * const picture = priv->current_picture;
1443
1444     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1445         goto drop_frame;
1446     priv->decoder_state = 0;
1447
1448     if (!picture)
1449         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1450
1451     if (!exec_ref_pic_marking(decoder, picture))
1452         goto error;
1453     if (!dpb_add(decoder, picture))
1454         goto error;
1455     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1456         goto error;
1457     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1458     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1459
1460 error:
1461     /* XXX: fix for cases where first field failed to be decoded */
1462     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1463     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1464
1465 drop_frame:
1466     priv->decoder_state = 0;
1467     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1468 }
1469
1470 static GstVaapiDecoderStatus
1471 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1472 {
1473     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1474     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1475     GstH264SPS * const sps = &pi->data.sps;
1476     GstH264ParserResult result;
1477
1478     GST_DEBUG("parse SPS");
1479
1480     priv->parser_state = 0;
1481
1482     /* Variables that don't have inferred values per the H.264
1483        standard but that should get a default value anyway */
1484     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1485
1486     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1487     if (result != GST_H264_PARSER_OK)
1488         return get_status(result);
1489
1490     /* Reset defaults */
1491     priv->max_views = 1;
1492
1493     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1494     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1495 }
1496
1497 static GstVaapiDecoderStatus
1498 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1499 {
1500     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1501     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1502     GstH264SPS * const sps = &pi->data.sps;
1503     GstH264ParserResult result;
1504
1505     GST_DEBUG("parse subset SPS");
1506
1507     /* Variables that don't have inferred values per the H.264
1508        standard but that should get a default value anyway */
1509     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1510
1511     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1512         TRUE);
1513     if (result != GST_H264_PARSER_OK)
1514         return get_status(result);
1515
1516     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1517     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1518 }
1519
1520 static GstVaapiDecoderStatus
1521 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1522 {
1523     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1524     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1525     GstH264PPS * const pps = &pi->data.pps;
1526     GstH264ParserResult result;
1527
1528     GST_DEBUG("parse PPS");
1529
1530     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1531
1532     /* Variables that don't have inferred values per the H.264
1533        standard but that should get a default value anyway */
1534     pps->slice_group_map_type = 0;
1535     pps->slice_group_change_rate_minus1 = 0;
1536
1537     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1538     if (result != GST_H264_PARSER_OK)
1539         return get_status(result);
1540
1541     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1542     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1543 }
1544
1545 static GstVaapiDecoderStatus
1546 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1547 {
1548     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1549     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1550     GArray ** const sei_ptr = &pi->data.sei;
1551     GstH264ParserResult result;
1552
1553     GST_DEBUG("parse SEI");
1554
1555     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1556     if (result != GST_H264_PARSER_OK) {
1557         GST_WARNING("failed to parse SEI messages");
1558         return get_status(result);
1559     }
1560     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1561 }
1562
1563 static GstVaapiDecoderStatus
1564 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1565 {
1566     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1567     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1568     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1569     GstH264NalUnit * const nalu = &pi->nalu;
1570     GstH264SPS *sps;
1571     GstH264ParserResult result;
1572     guint num_views;
1573
1574     GST_DEBUG("parse slice");
1575
1576     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1577                            GST_H264_VIDEO_STATE_GOT_PPS);
1578
1579     /* Propagate Prefix NAL unit info, if necessary */
1580     switch (nalu->type) {
1581     case GST_H264_NAL_SLICE:
1582     case GST_H264_NAL_SLICE_IDR: {
1583         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1584         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1585             /* MVC sequences shall have a Prefix NAL unit immediately
1586                preceding this NAL unit */
1587             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1588             pi->nalu.extension = prev_pi->nalu.extension;
1589         }
1590         else {
1591             /* In the very unlikely case there is no Prefix NAL unit
1592                immediately preceding this NAL unit, try to infer some
1593                defaults (H.7.4.1.1) */
1594             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1595             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1596             nalu->idr_pic_flag = !mvc->non_idr_flag;
1597             mvc->priority_id = 0;
1598             mvc->view_id = 0;
1599             mvc->temporal_id = 0;
1600             mvc->anchor_pic_flag = 0;
1601             mvc->inter_view_flag = 1;
1602         }
1603         break;
1604     }
1605     }
1606
1607     /* Variables that don't have inferred values per the H.264
1608        standard but that should get a default value anyway */
1609     slice_hdr->cabac_init_idc = 0;
1610     slice_hdr->direct_spatial_mv_pred_flag = 0;
1611
1612     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1613         slice_hdr, TRUE, TRUE);
1614     if (result != GST_H264_PARSER_OK)
1615         return get_status(result);
1616
1617     sps = slice_hdr->pps->sequence;
1618
1619     /* Update MVC data */
1620     num_views = get_num_views(sps);
1621     if (priv->max_views < num_views) {
1622         priv->max_views = num_views;
1623         GST_DEBUG("maximum number of views changed to %u", num_views);
1624     }
1625     pi->view_id = get_view_id(&pi->nalu);
1626     pi->voc = get_view_order_index(sps, pi->view_id);
1627
1628     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1629     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1630 }
1631
1632 static GstVaapiDecoderStatus
1633 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1634 {
1635     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1636     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1637     GstH264SPS * const sps = &pi->data.sps;
1638
1639     GST_DEBUG("decode SPS");
1640
1641     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1642     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1643 }
1644
1645 static GstVaapiDecoderStatus
1646 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1647 {
1648     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1649     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1650     GstH264SPS * const sps = &pi->data.sps;
1651
1652     GST_DEBUG("decode subset SPS");
1653
1654     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1655     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1656 }
1657
1658 static GstVaapiDecoderStatus
1659 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1660 {
1661     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1662     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1663     GstH264PPS * const pps = &pi->data.pps;
1664
1665     GST_DEBUG("decode PPS");
1666
1667     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1668     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1669 }
1670
1671 static GstVaapiDecoderStatus
1672 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1673 {
1674     GstVaapiDecoderStatus status;
1675
1676     GST_DEBUG("decode sequence-end");
1677
1678     status = decode_current_picture(decoder);
1679     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1680         return status;
1681
1682     dpb_flush(decoder, NULL);
1683     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1684 }
1685
1686 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1687 static void
1688 init_picture_poc_0(
1689     GstVaapiDecoderH264 *decoder,
1690     GstVaapiPictureH264 *picture,
1691     GstH264SliceHdr     *slice_hdr
1692 )
1693 {
1694     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1695     GstH264SPS * const sps = get_sps(decoder);
1696     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1697     gint32 temp_poc;
1698
1699     GST_DEBUG("decode picture order count type 0");
1700
1701     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1702         priv->prev_poc_msb = 0;
1703         priv->prev_poc_lsb = 0;
1704     }
1705     else if (priv->prev_pic_has_mmco5) {
1706         priv->prev_poc_msb = 0;
1707         priv->prev_poc_lsb =
1708             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1709              0 : priv->field_poc[TOP_FIELD]);
1710     }
1711     else {
1712         priv->prev_poc_msb = priv->poc_msb;
1713         priv->prev_poc_lsb = priv->poc_lsb;
1714     }
1715
1716     // (8-3)
1717     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1718     if (priv->poc_lsb < priv->prev_poc_lsb &&
1719         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1720         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1721     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1722              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1723         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1724     else
1725         priv->poc_msb = priv->prev_poc_msb;
1726
1727     temp_poc = priv->poc_msb + priv->poc_lsb;
1728     switch (picture->structure) {
1729     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1730         // (8-4, 8-5)
1731         priv->field_poc[TOP_FIELD] = temp_poc;
1732         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1733             slice_hdr->delta_pic_order_cnt_bottom;
1734         break;
1735     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1736         // (8-4)
1737         priv->field_poc[TOP_FIELD] = temp_poc;
1738         break;
1739     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1740         // (8-5)
1741         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1742         break;
1743     }
1744 }
1745
1746 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1747 static void
1748 init_picture_poc_1(
1749     GstVaapiDecoderH264 *decoder,
1750     GstVaapiPictureH264 *picture,
1751     GstH264SliceHdr     *slice_hdr
1752 )
1753 {
1754     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1755     GstH264SPS * const sps = get_sps(decoder);
1756     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1757     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1758     guint i;
1759
1760     GST_DEBUG("decode picture order count type 1");
1761
1762     if (priv->prev_pic_has_mmco5)
1763         prev_frame_num_offset = 0;
1764     else
1765         prev_frame_num_offset = priv->frame_num_offset;
1766
1767     // (8-6)
1768     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1769         priv->frame_num_offset = 0;
1770     else if (priv->prev_frame_num > priv->frame_num)
1771         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1772     else
1773         priv->frame_num_offset = prev_frame_num_offset;
1774
1775     // (8-7)
1776     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1777         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1778     else
1779         abs_frame_num = 0;
1780     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1781         abs_frame_num = abs_frame_num - 1;
1782
1783     if (abs_frame_num > 0) {
1784         gint32 expected_delta_per_poc_cycle;
1785         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1786
1787         expected_delta_per_poc_cycle = 0;
1788         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1789             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1790
1791         // (8-8)
1792         poc_cycle_cnt = (abs_frame_num - 1) /
1793             sps->num_ref_frames_in_pic_order_cnt_cycle;
1794         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1795             sps->num_ref_frames_in_pic_order_cnt_cycle;
1796
1797         // (8-9)
1798         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1799         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1800             expected_poc += sps->offset_for_ref_frame[i];
1801     }
1802     else
1803         expected_poc = 0;
1804     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1805         expected_poc += sps->offset_for_non_ref_pic;
1806
1807     // (8-10)
1808     switch (picture->structure) {
1809     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1810         priv->field_poc[TOP_FIELD] = expected_poc +
1811             slice_hdr->delta_pic_order_cnt[0];
1812         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1813             sps->offset_for_top_to_bottom_field +
1814             slice_hdr->delta_pic_order_cnt[1];
1815         break;
1816     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1817         priv->field_poc[TOP_FIELD] = expected_poc +
1818             slice_hdr->delta_pic_order_cnt[0];
1819         break;
1820     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1821         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1822             sps->offset_for_top_to_bottom_field +
1823             slice_hdr->delta_pic_order_cnt[0];
1824         break;
1825     }
1826 }
1827
1828 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1829 static void
1830 init_picture_poc_2(
1831     GstVaapiDecoderH264 *decoder,
1832     GstVaapiPictureH264 *picture,
1833     GstH264SliceHdr     *slice_hdr
1834 )
1835 {
1836     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1837     GstH264SPS * const sps = get_sps(decoder);
1838     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1839     gint32 prev_frame_num_offset, temp_poc;
1840
1841     GST_DEBUG("decode picture order count type 2");
1842
1843     if (priv->prev_pic_has_mmco5)
1844         prev_frame_num_offset = 0;
1845     else
1846         prev_frame_num_offset = priv->frame_num_offset;
1847
1848     // (8-11)
1849     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1850         priv->frame_num_offset = 0;
1851     else if (priv->prev_frame_num > priv->frame_num)
1852         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1853     else
1854         priv->frame_num_offset = prev_frame_num_offset;
1855
1856     // (8-12)
1857     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1858         temp_poc = 0;
1859     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1860         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1861     else
1862         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1863
1864     // (8-13)
1865     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1866         priv->field_poc[TOP_FIELD] = temp_poc;
1867     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1868         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1869 }
1870
1871 /* 8.2.1 - Decoding process for picture order count */
1872 static void
1873 init_picture_poc(
1874     GstVaapiDecoderH264 *decoder,
1875     GstVaapiPictureH264 *picture,
1876     GstH264SliceHdr     *slice_hdr
1877 )
1878 {
1879     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1880     GstH264SPS * const sps = get_sps(decoder);
1881
1882     switch (sps->pic_order_cnt_type) {
1883     case 0:
1884         init_picture_poc_0(decoder, picture, slice_hdr);
1885         break;
1886     case 1:
1887         init_picture_poc_1(decoder, picture, slice_hdr);
1888         break;
1889     case 2:
1890         init_picture_poc_2(decoder, picture, slice_hdr);
1891         break;
1892     }
1893
1894     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1895         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1896     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1897         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1898     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1899 }
1900
1901 static int
1902 compare_picture_pic_num_dec(const void *a, const void *b)
1903 {
1904     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1905     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1906
1907     return picB->pic_num - picA->pic_num;
1908 }
1909
1910 static int
1911 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1912 {
1913     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1914     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1915
1916     return picA->long_term_pic_num - picB->long_term_pic_num;
1917 }
1918
1919 static int
1920 compare_picture_poc_dec(const void *a, const void *b)
1921 {
1922     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1923     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1924
1925     return picB->base.poc - picA->base.poc;
1926 }
1927
1928 static int
1929 compare_picture_poc_inc(const void *a, const void *b)
1930 {
1931     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1932     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1933
1934     return picA->base.poc - picB->base.poc;
1935 }
1936
1937 static int
1938 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1939 {
1940     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1941     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1942
1943     return picB->frame_num_wrap - picA->frame_num_wrap;
1944 }
1945
1946 static int
1947 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1948 {
1949     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1950     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1951
1952     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1953 }
1954
1955 /* 8.2.4.1 - Decoding process for picture numbers */
1956 static void
1957 init_picture_refs_pic_num(
1958     GstVaapiDecoderH264 *decoder,
1959     GstVaapiPictureH264 *picture,
1960     GstH264SliceHdr     *slice_hdr
1961 )
1962 {
1963     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1964     GstH264SPS * const sps = get_sps(decoder);
1965     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1966     guint i;
1967
1968     GST_DEBUG("decode picture numbers");
1969
1970     for (i = 0; i < priv->short_ref_count; i++) {
1971         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1972
1973         // (H.8.2)
1974         if (pic->base.view_id != picture->base.view_id)
1975             continue;
1976
1977         // (8-27)
1978         if (pic->frame_num > priv->frame_num)
1979             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1980         else
1981             pic->frame_num_wrap = pic->frame_num;
1982
1983         // (8-28, 8-30, 8-31)
1984         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1985             pic->pic_num = pic->frame_num_wrap;
1986         else {
1987             if (pic->structure == picture->structure)
1988                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1989             else
1990                 pic->pic_num = 2 * pic->frame_num_wrap;
1991         }
1992     }
1993
1994     for (i = 0; i < priv->long_ref_count; i++) {
1995         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1996
1997         // (H.8.2)
1998         if (pic->base.view_id != picture->base.view_id)
1999             continue;
2000
2001         // (8-29, 8-32, 8-33)
2002         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2003             pic->long_term_pic_num = pic->long_term_frame_idx;
2004         else {
2005             if (pic->structure == picture->structure)
2006                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2007             else
2008                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2009         }
2010     }
2011 }
2012
2013 #define SORT_REF_LIST(list, n, compare_func) \
2014     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2015
2016 static void
2017 init_picture_refs_fields_1(
2018     guint                picture_structure,
2019     GstVaapiPictureH264 *RefPicList[32],
2020     guint               *RefPicList_count,
2021     GstVaapiPictureH264 *ref_list[32],
2022     guint                ref_list_count
2023 )
2024 {
2025     guint i, j, n;
2026
2027     i = 0;
2028     j = 0;
2029     n = *RefPicList_count;
2030     do {
2031         g_assert(n < 32);
2032         for (; i < ref_list_count; i++) {
2033             if (ref_list[i]->structure == picture_structure) {
2034                 RefPicList[n++] = ref_list[i++];
2035                 break;
2036             }
2037         }
2038         for (; j < ref_list_count; j++) {
2039             if (ref_list[j]->structure != picture_structure) {
2040                 RefPicList[n++] = ref_list[j++];
2041                 break;
2042             }
2043         }
2044     } while (i < ref_list_count || j < ref_list_count);
2045     *RefPicList_count = n;
2046 }
2047
2048 static inline void
2049 init_picture_refs_fields(
2050     GstVaapiPictureH264 *picture,
2051     GstVaapiPictureH264 *RefPicList[32],
2052     guint               *RefPicList_count,
2053     GstVaapiPictureH264 *short_ref[32],
2054     guint                short_ref_count,
2055     GstVaapiPictureH264 *long_ref[32],
2056     guint                long_ref_count
2057 )
2058 {
2059     guint n = 0;
2060
2061     /* 8.2.4.2.5 - reference picture lists in fields */
2062     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2063         short_ref, short_ref_count);
2064     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2065         long_ref, long_ref_count);
2066     *RefPicList_count = n;
2067 }
2068
2069 /* Finds the inter-view reference picture with the supplied view id */
2070 static GstVaapiPictureH264 *
2071 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2072 {
2073     GPtrArray * const inter_views = decoder->priv.inter_views;
2074     guint i;
2075
2076     for (i = 0; i < inter_views->len; i++) {
2077         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2078         if (picture->base.view_id == view_id)
2079             return picture;
2080     }
2081
2082     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2083         view_id);
2084     return NULL;
2085 }
2086
2087 /* Checks whether the view id exists in the supplied list of view ids */
2088 static gboolean
2089 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2090 {
2091     guint i;
2092
2093     for (i = 0; i < num_view_ids; i++) {
2094         if (view_ids[i] == view_id)
2095             return TRUE;
2096     }
2097     return FALSE;
2098 }
2099
2100 static gboolean
2101 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2102     gboolean is_anchor)
2103 {
2104     if (is_anchor)
2105         return (find_view_id(view_id, view->anchor_ref_l0,
2106                     view->num_anchor_refs_l0) ||
2107                 find_view_id(view_id, view->anchor_ref_l1,
2108                     view->num_anchor_refs_l1));
2109
2110     return (find_view_id(view_id, view->non_anchor_ref_l0,
2111                 view->num_non_anchor_refs_l0) ||
2112             find_view_id(view_id, view->non_anchor_ref_l1,
2113                 view->num_non_anchor_refs_l1));
2114 }
2115
2116 /* Checks whether the inter-view reference picture with the supplied
2117    view id is used for decoding the current view component picture */
2118 static gboolean
2119 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2120     guint16 view_id, GstVaapiPictureH264 *picture)
2121 {
2122     const GstH264SPS * const sps = get_sps(decoder);
2123     gboolean is_anchor;
2124
2125     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2126         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2127         return FALSE;
2128
2129     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2130     return find_view_id_in_view(view_id,
2131         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2132 }
2133
2134 /* Checks whether the supplied inter-view reference picture is used
2135    for decoding the next view component pictures */
2136 static gboolean
2137 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2138     GstVaapiPictureH264 *picture)
2139 {
2140     const GstH264SPS * const sps = get_sps(decoder);
2141     gboolean is_anchor;
2142     guint i, num_views;
2143
2144     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2145         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2146         return FALSE;
2147
2148     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2149     num_views = sps->extension.mvc.num_views_minus1 + 1;
2150     for (i = picture->base.voc + 1; i < num_views; i++) {
2151         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2152         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2153             return TRUE;
2154     }
2155     return FALSE;
2156 }
2157
2158 /* H.8.2.1 - Initialization process for inter-view prediction references */
2159 static void
2160 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2161     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2162     const guint16 *view_ids, guint num_view_ids)
2163 {
2164     guint j, n;
2165
2166     n = *ref_list_count_ptr;
2167     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2168         GstVaapiPictureH264 * const pic =
2169             find_inter_view_reference(decoder, view_ids[j]);
2170         if (pic)
2171             ref_list[n++] = pic;
2172     }
2173     *ref_list_count_ptr = n;
2174 }
2175
2176 static inline void
2177 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2178     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2179 {
2180     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2181     const GstH264SPS * const sps = get_sps(decoder);
2182     const GstH264SPSExtMVCView *view;
2183
2184     GST_DEBUG("initialize reference picture list for inter-view prediction");
2185
2186     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2187         return;
2188     view = &sps->extension.mvc.view[picture->base.voc];
2189
2190 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2191         init_picture_refs_mvc_1(decoder,                                \
2192             priv->RefPicList##ref_list,                                 \
2193             &priv->RefPicList##ref_list##_count,                        \
2194             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2195             view->view_list##_l##ref_list,                              \
2196             view->num_##view_list##s_l##ref_list);                      \
2197     } while (0)
2198
2199     if (list == 0) {
2200         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2201             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2202         else
2203             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2204     }
2205     else {
2206         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2207             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2208         else
2209             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2210     }
2211
2212 #undef INVOKE_INIT_PICTURE_REFS_MVC
2213 }
2214
2215 static void
2216 init_picture_refs_p_slice(
2217     GstVaapiDecoderH264 *decoder,
2218     GstVaapiPictureH264 *picture,
2219     GstH264SliceHdr     *slice_hdr
2220 )
2221 {
2222     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2223     GstVaapiPictureH264 **ref_list;
2224     guint i;
2225
2226     GST_DEBUG("decode reference picture list for P and SP slices");
2227
2228     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2229         /* 8.2.4.2.1 - P and SP slices in frames */
2230         if (priv->short_ref_count > 0) {
2231             ref_list = priv->RefPicList0;
2232             for (i = 0; i < priv->short_ref_count; i++)
2233                 ref_list[i] = priv->short_ref[i];
2234             SORT_REF_LIST(ref_list, i, pic_num_dec);
2235             priv->RefPicList0_count += i;
2236         }
2237
2238         if (priv->long_ref_count > 0) {
2239             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2240             for (i = 0; i < priv->long_ref_count; i++)
2241                 ref_list[i] = priv->long_ref[i];
2242             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2243             priv->RefPicList0_count += i;
2244         }
2245     }
2246     else {
2247         /* 8.2.4.2.2 - P and SP slices in fields */
2248         GstVaapiPictureH264 *short_ref[32];
2249         guint short_ref_count = 0;
2250         GstVaapiPictureH264 *long_ref[32];
2251         guint long_ref_count = 0;
2252
2253         if (priv->short_ref_count > 0) {
2254             for (i = 0; i < priv->short_ref_count; i++)
2255                 short_ref[i] = priv->short_ref[i];
2256             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2257             short_ref_count = i;
2258         }
2259
2260         if (priv->long_ref_count > 0) {
2261             for (i = 0; i < priv->long_ref_count; i++)
2262                 long_ref[i] = priv->long_ref[i];
2263             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2264             long_ref_count = i;
2265         }
2266
2267         init_picture_refs_fields(
2268             picture,
2269             priv->RefPicList0, &priv->RefPicList0_count,
2270             short_ref,          short_ref_count,
2271             long_ref,           long_ref_count
2272         );
2273     }
2274
2275     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2276         /* RefPicList0 */
2277         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2278     }
2279 }
2280
2281 static void
2282 init_picture_refs_b_slice(
2283     GstVaapiDecoderH264 *decoder,
2284     GstVaapiPictureH264 *picture,
2285     GstH264SliceHdr     *slice_hdr
2286 )
2287 {
2288     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2289     GstVaapiPictureH264 **ref_list;
2290     guint i, n;
2291
2292     GST_DEBUG("decode reference picture list for B slices");
2293
2294     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2295         /* 8.2.4.2.3 - B slices in frames */
2296
2297         /* RefPicList0 */
2298         if (priv->short_ref_count > 0) {
2299             // 1. Short-term references
2300             ref_list = priv->RefPicList0;
2301             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2302                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2303                     ref_list[n++] = priv->short_ref[i];
2304             }
2305             SORT_REF_LIST(ref_list, n, poc_dec);
2306             priv->RefPicList0_count += n;
2307
2308             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2309             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2310                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2311                     ref_list[n++] = priv->short_ref[i];
2312             }
2313             SORT_REF_LIST(ref_list, n, poc_inc);
2314             priv->RefPicList0_count += n;
2315         }
2316
2317         if (priv->long_ref_count > 0) {
2318             // 2. Long-term references
2319             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2320             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2321                 ref_list[n++] = priv->long_ref[i];
2322             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2323             priv->RefPicList0_count += n;
2324         }
2325
2326         /* RefPicList1 */
2327         if (priv->short_ref_count > 0) {
2328             // 1. Short-term references
2329             ref_list = priv->RefPicList1;
2330             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2331                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2332                     ref_list[n++] = priv->short_ref[i];
2333             }
2334             SORT_REF_LIST(ref_list, n, poc_inc);
2335             priv->RefPicList1_count += n;
2336
2337             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2338             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2339                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2340                     ref_list[n++] = priv->short_ref[i];
2341             }
2342             SORT_REF_LIST(ref_list, n, poc_dec);
2343             priv->RefPicList1_count += n;
2344         }
2345
2346         if (priv->long_ref_count > 0) {
2347             // 2. Long-term references
2348             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2349             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2350                 ref_list[n++] = priv->long_ref[i];
2351             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2352             priv->RefPicList1_count += n;
2353         }
2354     }
2355     else {
2356         /* 8.2.4.2.4 - B slices in fields */
2357         GstVaapiPictureH264 *short_ref0[32];
2358         guint short_ref0_count = 0;
2359         GstVaapiPictureH264 *short_ref1[32];
2360         guint short_ref1_count = 0;
2361         GstVaapiPictureH264 *long_ref[32];
2362         guint long_ref_count = 0;
2363
2364         /* refFrameList0ShortTerm */
2365         if (priv->short_ref_count > 0) {
2366             ref_list = short_ref0;
2367             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2368                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2369                     ref_list[n++] = priv->short_ref[i];
2370             }
2371             SORT_REF_LIST(ref_list, n, poc_dec);
2372             short_ref0_count += n;
2373
2374             ref_list = &short_ref0[short_ref0_count];
2375             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2376                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2377                     ref_list[n++] = priv->short_ref[i];
2378             }
2379             SORT_REF_LIST(ref_list, n, poc_inc);
2380             short_ref0_count += n;
2381         }
2382
2383         /* refFrameList1ShortTerm */
2384         if (priv->short_ref_count > 0) {
2385             ref_list = short_ref1;
2386             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2387                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2388                     ref_list[n++] = priv->short_ref[i];
2389             }
2390             SORT_REF_LIST(ref_list, n, poc_inc);
2391             short_ref1_count += n;
2392
2393             ref_list = &short_ref1[short_ref1_count];
2394             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2395                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2396                     ref_list[n++] = priv->short_ref[i];
2397             }
2398             SORT_REF_LIST(ref_list, n, poc_dec);
2399             short_ref1_count += n;
2400         }
2401
2402         /* refFrameListLongTerm */
2403         if (priv->long_ref_count > 0) {
2404             for (i = 0; i < priv->long_ref_count; i++)
2405                 long_ref[i] = priv->long_ref[i];
2406             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2407             long_ref_count = i;
2408         }
2409
2410         init_picture_refs_fields(
2411             picture,
2412             priv->RefPicList0, &priv->RefPicList0_count,
2413             short_ref0,         short_ref0_count,
2414             long_ref,           long_ref_count
2415         );
2416
2417         init_picture_refs_fields(
2418             picture,
2419             priv->RefPicList1, &priv->RefPicList1_count,
2420             short_ref1,         short_ref1_count,
2421             long_ref,           long_ref_count
2422         );
2423    }
2424
2425     /* Check whether RefPicList1 is identical to RefPicList0, then
2426        swap if necessary */
2427     if (priv->RefPicList1_count > 1 &&
2428         priv->RefPicList1_count == priv->RefPicList0_count &&
2429         memcmp(priv->RefPicList0, priv->RefPicList1,
2430                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2431         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2432         priv->RefPicList1[0] = priv->RefPicList1[1];
2433         priv->RefPicList1[1] = tmp;
2434     }
2435
2436     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2437         /* RefPicList0 */
2438         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2439
2440         /* RefPicList1 */
2441         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2442     }
2443 }
2444
2445 #undef SORT_REF_LIST
2446
2447 static gint
2448 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2449 {
2450     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2451     guint i;
2452
2453     for (i = 0; i < priv->short_ref_count; i++) {
2454         if (priv->short_ref[i]->pic_num == pic_num)
2455             return i;
2456     }
2457     GST_ERROR("found no short-term reference picture with PicNum = %d",
2458               pic_num);
2459     return -1;
2460 }
2461
2462 static gint
2463 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2464 {
2465     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2466     guint i;
2467
2468     for (i = 0; i < priv->long_ref_count; i++) {
2469         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2470             return i;
2471     }
2472     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2473               long_term_pic_num);
2474     return -1;
2475 }
2476
2477 static void
2478 exec_picture_refs_modification_1(
2479     GstVaapiDecoderH264           *decoder,
2480     GstVaapiPictureH264           *picture,
2481     GstH264SliceHdr               *slice_hdr,
2482     guint                          list
2483 )
2484 {
2485     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2486     GstH264SPS * const sps = get_sps(decoder);
2487     GstH264RefPicListModification *ref_pic_list_modification;
2488     guint num_ref_pic_list_modifications;
2489     GstVaapiPictureH264 **ref_list;
2490     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2491     const guint16 *view_ids = NULL;
2492     guint i, j, n, num_refs, num_view_ids = 0;
2493     gint found_ref_idx;
2494     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2495
2496     GST_DEBUG("modification process of reference picture list %u", list);
2497
2498     if (list == 0) {
2499         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2500         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2501         ref_list                       = priv->RefPicList0;
2502         ref_list_count_ptr             = &priv->RefPicList0_count;
2503         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2504
2505         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2506             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2507             const GstH264SPSExtMVCView * const view =
2508                 &sps->extension.mvc.view[picture->base.voc];
2509             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2510                 view_ids = view->anchor_ref_l0;
2511                 num_view_ids = view->num_anchor_refs_l0;
2512             }
2513             else {
2514                 view_ids = view->non_anchor_ref_l0;
2515                 num_view_ids = view->num_non_anchor_refs_l0;
2516             }
2517         }
2518     }
2519     else {
2520         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2521         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2522         ref_list                       = priv->RefPicList1;
2523         ref_list_count_ptr             = &priv->RefPicList1_count;
2524         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2525
2526         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2527             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2528             const GstH264SPSExtMVCView * const view =
2529                 &sps->extension.mvc.view[picture->base.voc];
2530             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2531                 view_ids = view->anchor_ref_l1;
2532                 num_view_ids = view->num_anchor_refs_l1;
2533             }
2534             else {
2535                 view_ids = view->non_anchor_ref_l1;
2536                 num_view_ids = view->num_non_anchor_refs_l1;
2537             }
2538         }
2539     }
2540     ref_list_count = *ref_list_count_ptr;
2541
2542     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2543         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2544         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2545     }
2546     else {
2547         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2548         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2549     }
2550
2551     picNumPred = CurrPicNum;
2552     picViewIdxPred = -1;
2553
2554     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2555         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2556         if (l->modification_of_pic_nums_idc == 3)
2557             break;
2558
2559         /* 8.2.4.3.1 - Short-term reference pictures */
2560         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2561             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2562             gint32 picNum, picNumNoWrap;
2563
2564             // (8-34)
2565             if (l->modification_of_pic_nums_idc == 0) {
2566                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2567                 if (picNumNoWrap < 0)
2568                     picNumNoWrap += MaxPicNum;
2569             }
2570
2571             // (8-35)
2572             else {
2573                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2574                 if (picNumNoWrap >= MaxPicNum)
2575                     picNumNoWrap -= MaxPicNum;
2576             }
2577             picNumPred = picNumNoWrap;
2578
2579             // (8-36)
2580             picNum = picNumNoWrap;
2581             if (picNum > CurrPicNum)
2582                 picNum -= MaxPicNum;
2583
2584             // (8-37)
2585             for (j = num_refs; j > ref_list_idx; j--)
2586                 ref_list[j] = ref_list[j - 1];
2587             found_ref_idx = find_short_term_reference(decoder, picNum);
2588             ref_list[ref_list_idx++] =
2589                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2590             n = ref_list_idx;
2591             for (j = ref_list_idx; j <= num_refs; j++) {
2592                 gint32 PicNumF;
2593                 if (!ref_list[j])
2594                     continue;
2595                 PicNumF =
2596                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2597                     ref_list[j]->pic_num : MaxPicNum;
2598                 if (PicNumF != picNum ||
2599                     ref_list[j]->base.view_id != picture->base.view_id)
2600                     ref_list[n++] = ref_list[j];
2601             }
2602         }
2603
2604         /* 8.2.4.3.2 - Long-term reference pictures */
2605         else if (l->modification_of_pic_nums_idc == 2) {
2606
2607             for (j = num_refs; j > ref_list_idx; j--)
2608                 ref_list[j] = ref_list[j - 1];
2609             found_ref_idx =
2610                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2611             ref_list[ref_list_idx++] =
2612                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2613             n = ref_list_idx;
2614             for (j = ref_list_idx; j <= num_refs; j++) {
2615                 gint32 LongTermPicNumF;
2616                 if (!ref_list[j])
2617                     continue;
2618                 LongTermPicNumF =
2619                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2620                     ref_list[j]->long_term_pic_num : INT_MAX;
2621                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2622                     ref_list[j]->base.view_id != picture->base.view_id)
2623                     ref_list[n++] = ref_list[j];
2624             }
2625         }
2626
2627         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2628         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2629                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2630                  (l->modification_of_pic_nums_idc == 4 ||
2631                   l->modification_of_pic_nums_idc == 5)) {
2632             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2633             gint32 picViewIdx, targetViewId;
2634
2635             // (H-6)
2636             if (l->modification_of_pic_nums_idc == 4) {
2637                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2638                 if (picViewIdx < 0)
2639                     picViewIdx += num_view_ids;
2640             }
2641
2642             // (H-7)
2643             else {
2644                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2645                 if (picViewIdx >= num_view_ids)
2646                     picViewIdx -= num_view_ids;
2647             }
2648             picViewIdxPred = picViewIdx;
2649
2650             // (H-8, H-9)
2651             targetViewId = view_ids[picViewIdx];
2652
2653             // (H-10)
2654             for (j = num_refs; j > ref_list_idx; j--)
2655                 ref_list[j] = ref_list[j - 1];
2656             ref_list[ref_list_idx++] =
2657                 find_inter_view_reference(decoder, targetViewId);
2658             n = ref_list_idx;
2659             for (j = ref_list_idx; j <= num_refs; j++) {
2660                 if (!ref_list[j])
2661                     continue;
2662                 if (ref_list[j]->base.view_id != targetViewId ||
2663                     ref_list[j]->base.poc != picture->base.poc)
2664                     ref_list[n++] = ref_list[j];
2665             }
2666         }
2667     }
2668
2669 #if DEBUG
2670     for (i = 0; i < num_refs; i++)
2671         if (!ref_list[i])
2672             GST_ERROR("list %u entry %u is empty", list, i);
2673 #endif
2674     *ref_list_count_ptr = num_refs;
2675 }
2676
2677 /* 8.2.4.3 - Modification process for reference picture lists */
2678 static void
2679 exec_picture_refs_modification(
2680     GstVaapiDecoderH264 *decoder,
2681     GstVaapiPictureH264 *picture,
2682     GstH264SliceHdr     *slice_hdr
2683 )
2684 {
2685     GST_DEBUG("execute ref_pic_list_modification()");
2686
2687     /* RefPicList0 */
2688     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2689         slice_hdr->ref_pic_list_modification_flag_l0)
2690         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2691
2692     /* RefPicList1 */
2693     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2694         slice_hdr->ref_pic_list_modification_flag_l1)
2695         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2696 }
2697
2698 static void
2699 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2700     GstVaapiPictureH264 *picture)
2701 {
2702     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2703     guint i, j, short_ref_count, long_ref_count;
2704
2705     short_ref_count = 0;
2706     long_ref_count  = 0;
2707     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2708         for (i = 0; i < priv->dpb_count; i++) {
2709             GstVaapiFrameStore * const fs = priv->dpb[i];
2710             GstVaapiPictureH264 *pic;
2711             if (!gst_vaapi_frame_store_has_frame(fs))
2712                 continue;
2713             pic = fs->buffers[0];
2714             if (pic->base.view_id != picture->base.view_id)
2715                 continue;
2716             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2717                 priv->short_ref[short_ref_count++] = pic;
2718             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2719                 priv->long_ref[long_ref_count++] = pic;
2720             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2721             pic->other_field = fs->buffers[1];
2722         }
2723     }
2724     else {
2725         for (i = 0; i < priv->dpb_count; i++) {
2726             GstVaapiFrameStore * const fs = priv->dpb[i];
2727             for (j = 0; j < fs->num_buffers; j++) {
2728                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2729                 if (pic->base.view_id != picture->base.view_id)
2730                     continue;
2731                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2732                     priv->short_ref[short_ref_count++] = pic;
2733                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2734                     priv->long_ref[long_ref_count++] = pic;
2735                 pic->structure = pic->base.structure;
2736                 pic->other_field = fs->buffers[j ^ 1];
2737             }
2738         }
2739     }
2740
2741     for (i = short_ref_count; i < priv->short_ref_count; i++)
2742         priv->short_ref[i] = NULL;
2743     priv->short_ref_count = short_ref_count;
2744
2745     for (i = long_ref_count; i < priv->long_ref_count; i++)
2746         priv->long_ref[i] = NULL;
2747     priv->long_ref_count = long_ref_count;
2748 }
2749
2750 static void
2751 init_picture_refs(
2752     GstVaapiDecoderH264 *decoder,
2753     GstVaapiPictureH264 *picture,
2754     GstH264SliceHdr     *slice_hdr
2755 )
2756 {
2757     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2758     guint i, num_refs;
2759
2760     init_picture_ref_lists(decoder, picture);
2761     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2762
2763     priv->RefPicList0_count = 0;
2764     priv->RefPicList1_count = 0;
2765
2766     switch (slice_hdr->type % 5) {
2767     case GST_H264_P_SLICE:
2768     case GST_H264_SP_SLICE:
2769         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2770         break;
2771     case GST_H264_B_SLICE:
2772         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2773         break;
2774     default:
2775         break;
2776     }
2777
2778     exec_picture_refs_modification(decoder, picture, slice_hdr);
2779
2780     switch (slice_hdr->type % 5) {
2781     case GST_H264_B_SLICE:
2782         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2783         for (i = priv->RefPicList1_count; i < num_refs; i++)
2784             priv->RefPicList1[i] = NULL;
2785         priv->RefPicList1_count = num_refs;
2786
2787         // fall-through
2788     case GST_H264_P_SLICE:
2789     case GST_H264_SP_SLICE:
2790         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2791         for (i = priv->RefPicList0_count; i < num_refs; i++)
2792             priv->RefPicList0[i] = NULL;
2793         priv->RefPicList0_count = num_refs;
2794         break;
2795     default:
2796         break;
2797     }
2798 }
2799
2800 static gboolean
2801 init_picture(
2802     GstVaapiDecoderH264 *decoder,
2803     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2804 {
2805     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2806     GstVaapiPicture * const base_picture = &picture->base;
2807     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2808
2809     priv->prev_frame_num        = priv->frame_num;
2810     priv->frame_num             = slice_hdr->frame_num;
2811     picture->frame_num          = priv->frame_num;
2812     picture->frame_num_wrap     = priv->frame_num;
2813     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2814     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2815     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2816     base_picture->view_id       = pi->view_id;
2817     base_picture->voc           = pi->voc;
2818
2819     /* Initialize extensions */
2820     switch (pi->nalu.extension_type) {
2821     case GST_H264_NAL_EXTENSION_MVC: {
2822         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2823
2824         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2825         if (mvc->inter_view_flag)
2826             GST_VAAPI_PICTURE_FLAG_SET(picture,
2827                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2828         if (mvc->anchor_pic_flag)
2829             GST_VAAPI_PICTURE_FLAG_SET(picture,
2830                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2831         break;
2832     }
2833     }
2834
2835     /* Reset decoder state for IDR pictures */
2836     if (pi->nalu.idr_pic_flag) {
2837         GST_DEBUG("<IDR>");
2838         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2839         dpb_flush(decoder, picture);
2840     }
2841
2842     /* Initialize picture structure */
2843     if (!slice_hdr->field_pic_flag)
2844         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2845     else {
2846         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2847         if (!slice_hdr->bottom_field_flag)
2848             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2849         else
2850             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2851     }
2852     picture->structure = base_picture->structure;
2853
2854     /* Initialize reference flags */
2855     if (pi->nalu.ref_idc) {
2856         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2857             &slice_hdr->dec_ref_pic_marking;
2858
2859         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2860             dec_ref_pic_marking->long_term_reference_flag)
2861             GST_VAAPI_PICTURE_FLAG_SET(picture,
2862                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2863         else
2864             GST_VAAPI_PICTURE_FLAG_SET(picture,
2865                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2866     }
2867
2868     init_picture_poc(decoder, picture, slice_hdr);
2869     return TRUE;
2870 }
2871
2872 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2873 static gboolean
2874 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2875 {
2876     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2877     GstH264SPS * const sps = get_sps(decoder);
2878     GstVaapiPictureH264 *ref_picture;
2879     guint i, m, max_num_ref_frames;
2880
2881     GST_DEBUG("reference picture marking process (sliding window)");
2882
2883     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2884         return TRUE;
2885
2886     max_num_ref_frames = sps->num_ref_frames;
2887     if (max_num_ref_frames == 0)
2888         max_num_ref_frames = 1;
2889     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2890         max_num_ref_frames <<= 1;
2891
2892     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2893         return TRUE;
2894     if (priv->short_ref_count < 1)
2895         return FALSE;
2896
2897     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2898         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2899         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2900             m = i;
2901     }
2902
2903     ref_picture = priv->short_ref[m];
2904     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2905     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2906
2907     /* Both fields need to be marked as "unused for reference", so
2908        remove the other field from the short_ref[] list as well */
2909     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2910         for (i = 0; i < priv->short_ref_count; i++) {
2911             if (priv->short_ref[i] == ref_picture->other_field) {
2912                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2913                 break;
2914             }
2915         }
2916     }
2917     return TRUE;
2918 }
2919
2920 static inline gint32
2921 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2922 {
2923     gint32 pic_num;
2924
2925     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2926         pic_num = picture->frame_num_wrap;
2927     else
2928         pic_num = 2 * picture->frame_num_wrap + 1;
2929     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2930     return pic_num;
2931 }
2932
2933 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2934 static void
2935 exec_ref_pic_marking_adaptive_mmco_1(
2936     GstVaapiDecoderH264  *decoder,
2937     GstVaapiPictureH264  *picture,
2938     GstH264RefPicMarking *ref_pic_marking
2939 )
2940 {
2941     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2942     gint32 i, picNumX;
2943
2944     picNumX = get_picNumX(picture, ref_pic_marking);
2945     i = find_short_term_reference(decoder, picNumX);
2946     if (i < 0)
2947         return;
2948
2949     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2950         GST_VAAPI_PICTURE_IS_FRAME(picture));
2951     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2952 }
2953
2954 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2955 static void
2956 exec_ref_pic_marking_adaptive_mmco_2(
2957     GstVaapiDecoderH264  *decoder,
2958     GstVaapiPictureH264  *picture,
2959     GstH264RefPicMarking *ref_pic_marking
2960 )
2961 {
2962     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2963     gint32 i;
2964
2965     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2966     if (i < 0)
2967         return;
2968
2969     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2970         GST_VAAPI_PICTURE_IS_FRAME(picture));
2971     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2972 }
2973
2974 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2975 static void
2976 exec_ref_pic_marking_adaptive_mmco_3(
2977     GstVaapiDecoderH264  *decoder,
2978     GstVaapiPictureH264  *picture,
2979     GstH264RefPicMarking *ref_pic_marking
2980 )
2981 {
2982     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2983     GstVaapiPictureH264 *ref_picture, *other_field;
2984     gint32 i, picNumX;
2985
2986     for (i = 0; i < priv->long_ref_count; i++) {
2987         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2988             break;
2989     }
2990     if (i != priv->long_ref_count) {
2991         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2992         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2993     }
2994
2995     picNumX = get_picNumX(picture, ref_pic_marking);
2996     i = find_short_term_reference(decoder, picNumX);
2997     if (i < 0)
2998         return;
2999
3000     ref_picture = priv->short_ref[i];
3001     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3002     priv->long_ref[priv->long_ref_count++] = ref_picture;
3003
3004     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3005     gst_vaapi_picture_h264_set_reference(ref_picture,
3006         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3007         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3008
3009     /* Assign LongTermFrameIdx to the other field if it was also
3010        marked as "used for long-term reference */
3011     other_field = ref_picture->other_field;
3012     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3013         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3014 }
3015
3016 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3017  * as "unused for reference" */
3018 static void
3019 exec_ref_pic_marking_adaptive_mmco_4(
3020     GstVaapiDecoderH264  *decoder,
3021     GstVaapiPictureH264  *picture,
3022     GstH264RefPicMarking *ref_pic_marking
3023 )
3024 {
3025     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3026     gint32 i, long_term_frame_idx;
3027
3028     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3029
3030     for (i = 0; i < priv->long_ref_count; i++) {
3031         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3032             continue;
3033         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3034         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3035         i--;
3036     }
3037 }
3038
3039 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3040 static void
3041 exec_ref_pic_marking_adaptive_mmco_5(
3042     GstVaapiDecoderH264  *decoder,
3043     GstVaapiPictureH264  *picture,
3044     GstH264RefPicMarking *ref_pic_marking
3045 )
3046 {
3047     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3048
3049     dpb_flush(decoder, picture);
3050
3051     priv->prev_pic_has_mmco5 = TRUE;
3052
3053     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3054     priv->frame_num = 0;
3055     priv->frame_num_offset = 0;
3056     picture->frame_num = 0;
3057
3058     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3059     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3060         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3061     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3062         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3063     picture->base.poc = 0;
3064 }
3065
3066 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3067 static void
3068 exec_ref_pic_marking_adaptive_mmco_6(
3069     GstVaapiDecoderH264  *decoder,
3070     GstVaapiPictureH264  *picture,
3071     GstH264RefPicMarking *ref_pic_marking
3072 )
3073 {
3074     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3075     GstVaapiPictureH264 *other_field;
3076     guint i;
3077
3078     for (i = 0; i < priv->long_ref_count; i++) {
3079         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3080             break;
3081     }
3082     if (i != priv->long_ref_count) {
3083         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3084         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3085     }
3086
3087     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3088     gst_vaapi_picture_h264_set_reference(picture,
3089         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3090         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3091
3092     /* Assign LongTermFrameIdx to the other field if it was also
3093        marked as "used for long-term reference */
3094     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3095     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3096         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3097 }
3098
3099 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3100 static gboolean
3101 exec_ref_pic_marking_adaptive(
3102     GstVaapiDecoderH264     *decoder,
3103     GstVaapiPictureH264     *picture,
3104     GstH264DecRefPicMarking *dec_ref_pic_marking
3105 )
3106 {
3107     guint i;
3108
3109     GST_DEBUG("reference picture marking process (adaptive memory control)");
3110
3111     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3112         GstVaapiDecoderH264  *decoder,
3113         GstVaapiPictureH264  *picture,
3114         GstH264RefPicMarking *ref_pic_marking
3115     );
3116
3117     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3118         NULL,
3119         exec_ref_pic_marking_adaptive_mmco_1,
3120         exec_ref_pic_marking_adaptive_mmco_2,
3121         exec_ref_pic_marking_adaptive_mmco_3,
3122         exec_ref_pic_marking_adaptive_mmco_4,
3123         exec_ref_pic_marking_adaptive_mmco_5,
3124         exec_ref_pic_marking_adaptive_mmco_6,
3125     };
3126
3127     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3128         GstH264RefPicMarking * const ref_pic_marking =
3129             &dec_ref_pic_marking->ref_pic_marking[i];
3130
3131         const guint mmco = ref_pic_marking->memory_management_control_operation;
3132         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3133             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3134         else {
3135             GST_ERROR("unhandled MMCO %u", mmco);
3136             return FALSE;
3137         }
3138     }
3139     return TRUE;
3140 }
3141
3142 /* 8.2.5 - Execute reference picture marking process */
3143 static gboolean
3144 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3145 {
3146     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3147
3148     priv->prev_pic_has_mmco5 = FALSE;
3149     priv->prev_pic_structure = picture->structure;
3150
3151     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3152         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3153
3154     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3155         return TRUE;
3156
3157     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3158         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3159             &picture->last_slice_hdr->dec_ref_pic_marking;
3160         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3161             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3162                 return FALSE;
3163         }
3164         else {
3165             if (!exec_ref_pic_marking_sliding_window(decoder))
3166                 return FALSE;
3167         }
3168     }
3169     return TRUE;
3170 }
3171
3172 static void
3173 vaapi_init_picture(VAPictureH264 *pic)
3174 {
3175     pic->picture_id           = VA_INVALID_ID;
3176     pic->frame_idx            = 0;
3177     pic->flags                = VA_PICTURE_H264_INVALID;
3178     pic->TopFieldOrderCnt     = 0;
3179     pic->BottomFieldOrderCnt  = 0;
3180 }
3181
3182 static void
3183 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3184     guint picture_structure)
3185 {
3186     if (!picture_structure)
3187         picture_structure = picture->structure;
3188
3189     pic->picture_id = picture->base.surface_id;
3190     pic->flags = 0;
3191
3192     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3193         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3194         pic->frame_idx = picture->long_term_frame_idx;
3195     }
3196     else {
3197         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3198             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3199         pic->frame_idx = picture->frame_num;
3200     }
3201
3202     switch (picture_structure) {
3203     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3204         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3205         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3206         break;
3207     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3208         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3209         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3210         pic->BottomFieldOrderCnt = 0;
3211         break;
3212     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3213         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3214         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3215         pic->TopFieldOrderCnt = 0;
3216         break;
3217     }
3218 }
3219
3220 static void
3221 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3222     GstVaapiPictureH264 *picture)
3223 {
3224     vaapi_fill_picture(pic, picture, 0);
3225
3226     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3227     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3228         /* The inter-view reference components and inter-view only
3229            reference components that are included in the reference
3230            picture lists are considered as not being marked as "used for
3231            short-term reference" or "used for long-term reference" */
3232         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3233                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3234     }
3235 }
3236
3237 static gboolean
3238 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3239 {
3240     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3241     GstVaapiPicture * const base_picture = &picture->base;
3242     GstH264PPS * const pps = get_pps(decoder);
3243     GstH264SPS * const sps = get_sps(decoder);
3244     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3245     guint i, n;
3246
3247     /* Fill in VAPictureParameterBufferH264 */
3248     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3249
3250     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3251         GstVaapiFrameStore * const fs = priv->dpb[i];
3252         if ((gst_vaapi_frame_store_has_reference(fs) &&
3253              fs->view_id == picture->base.view_id) ||
3254             (gst_vaapi_frame_store_has_inter_view(fs) &&
3255              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3256             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3257                 fs->buffers[0], fs->structure);
3258         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3259             break;
3260     }
3261     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3262         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3263
3264 #define COPY_FIELD(s, f) \
3265     pic_param->f = (s)->f
3266
3267 #define COPY_BFM(a, s, f) \
3268     pic_param->a.bits.f = (s)->f
3269
3270     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3271     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3272     pic_param->frame_num                    = priv->frame_num;
3273
3274     COPY_FIELD(sps, bit_depth_luma_minus8);
3275     COPY_FIELD(sps, bit_depth_chroma_minus8);
3276     COPY_FIELD(sps, num_ref_frames);
3277     COPY_FIELD(pps, num_slice_groups_minus1);
3278     COPY_FIELD(pps, slice_group_map_type);
3279     COPY_FIELD(pps, slice_group_change_rate_minus1);
3280     COPY_FIELD(pps, pic_init_qp_minus26);
3281     COPY_FIELD(pps, pic_init_qs_minus26);
3282     COPY_FIELD(pps, chroma_qp_index_offset);
3283     COPY_FIELD(pps, second_chroma_qp_index_offset);
3284
3285     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3286     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3287     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3288
3289     COPY_BFM(seq_fields, sps, chroma_format_idc);
3290     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3291     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3292     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3293     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3294     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3295     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3296     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3297     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3298
3299     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3300     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3301     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3302
3303     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3304     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3305     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3306     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3307     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3308     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3309     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3310     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3311     return TRUE;
3312 }
3313
3314 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3315 static gboolean
3316 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3317 {
3318     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3319     GstH264PPS * const pps = slice_hdr->pps;
3320     GstH264SPS * const sps = pps->sequence;
3321     GstH264SliceHdr *prev_slice_hdr;
3322
3323     if (!prev_pi)
3324         return TRUE;
3325     prev_slice_hdr = &prev_pi->data.slice_hdr;
3326
3327 #define CHECK_EXPR(expr, field_name) do {              \
3328         if (!(expr)) {                                 \
3329             GST_DEBUG(field_name " differs in value"); \
3330             return TRUE;                               \
3331         }                                              \
3332     } while (0)
3333
3334 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3335     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3336
3337     /* view_id differs in value and VOIdx of current slice_hdr is less
3338        than the VOIdx of the prev_slice_hdr */
3339     CHECK_VALUE(pi, prev_pi, view_id);
3340
3341     /* frame_num differs in value, regardless of inferred values to 0 */
3342     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3343
3344     /* pic_parameter_set_id differs in value */
3345     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3346
3347     /* field_pic_flag differs in value */
3348     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3349
3350     /* bottom_field_flag is present in both and differs in value */
3351     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3352         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3353
3354     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3355     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3356                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3357
3358     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3359        value or delta_pic_order_cnt_bottom differs in value */
3360     if (sps->pic_order_cnt_type == 0) {
3361         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3362         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3363             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3364     }
3365
3366     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3367        differs in value or delta_pic_order_cnt[1] differs in value */
3368     else if (sps->pic_order_cnt_type == 1) {
3369         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3370         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3371     }
3372
3373     /* IdrPicFlag differs in value */
3374     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3375
3376     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3377     if (pi->nalu.idr_pic_flag)
3378         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3379
3380 #undef CHECK_EXPR
3381 #undef CHECK_VALUE
3382     return FALSE;
3383 }
3384
3385 /* Detection of a new access unit, assuming we are already in presence
3386    of a new picture */
3387 static inline gboolean
3388 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3389 {
3390     if (!prev_pi || prev_pi->view_id == pi->view_id)
3391         return TRUE;
3392     return pi->voc < prev_pi->voc;
3393 }
3394
3395 /* Finds the first field picture corresponding to the supplied picture */
3396 static GstVaapiPictureH264 *
3397 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3398 {
3399     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3400     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3401     GstVaapiFrameStore *fs;
3402
3403     if (!slice_hdr->field_pic_flag)
3404         return NULL;
3405
3406     fs = priv->prev_frames[pi->voc];
3407     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3408         return NULL;
3409
3410     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3411         return fs->buffers[0];
3412     return NULL;
3413 }
3414
3415 static GstVaapiDecoderStatus
3416 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3417 {
3418     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3419     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3420     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3421     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3422     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3423     GstVaapiPictureH264 *picture, *first_field;
3424     GstVaapiDecoderStatus status;
3425
3426     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3427     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3428
3429     /* Only decode base stream for MVC */
3430     switch (sps->profile_idc) {
3431     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3432     case GST_H264_PROFILE_STEREO_HIGH:
3433         if (0) {
3434             GST_DEBUG("drop picture from substream");
3435             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3436         }
3437         break;
3438     }
3439
3440     status = ensure_context(decoder, sps);
3441     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3442         return status;
3443
3444     priv->decoder_state = 0;
3445
3446     first_field = find_first_field(decoder, pi);
3447     if (first_field) {
3448         /* Re-use current picture where the first field was decoded */
3449         picture = gst_vaapi_picture_h264_new_field(first_field);
3450         if (!picture) {
3451             GST_ERROR("failed to allocate field picture");
3452             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3453         }
3454     }
3455     else {
3456         /* Create new picture */
3457         picture = gst_vaapi_picture_h264_new(decoder);
3458         if (!picture) {
3459             GST_ERROR("failed to allocate picture");
3460             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3461         }
3462     }
3463     gst_vaapi_picture_replace(&priv->current_picture, picture);
3464     gst_vaapi_picture_unref(picture);
3465
3466     /* Clear inter-view references list if this is the primary coded
3467        picture of the current access unit */
3468     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3469         g_ptr_array_set_size(priv->inter_views, 0);
3470
3471     /* Update cropping rectangle */
3472     if (sps->frame_cropping_flag) {
3473         GstVaapiRectangle crop_rect;
3474         crop_rect.x = sps->crop_rect_x;
3475         crop_rect.y = sps->crop_rect_y;
3476         crop_rect.width = sps->crop_rect_width;
3477         crop_rect.height = sps->crop_rect_height;
3478         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3479     }
3480
3481     status = ensure_quant_matrix(decoder, picture);
3482     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3483         GST_ERROR("failed to reset quantizer matrix");
3484         return status;
3485     }
3486
3487     if (!init_picture(decoder, picture, pi))
3488         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3489     if (!fill_picture(decoder, picture))
3490         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3491
3492     priv->decoder_state = pi->state;
3493     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3494 }
3495
3496 static inline guint
3497 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3498 {
3499     guint epb_count;
3500
3501     epb_count = slice_hdr->n_emulation_prevention_bytes;
3502     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3503 }
3504
3505 static gboolean
3506 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3507     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3508 {
3509     VASliceParameterBufferH264 * const slice_param = slice->param;
3510     GstH264PPS * const pps = get_pps(decoder);
3511     GstH264SPS * const sps = get_sps(decoder);
3512     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3513     guint num_weight_tables = 0;
3514     gint i, j;
3515
3516     if (pps->weighted_pred_flag &&
3517         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3518         num_weight_tables = 1;
3519     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3520         num_weight_tables = 2;
3521     else
3522         num_weight_tables = 0;
3523
3524     slice_param->luma_log2_weight_denom   = 0;
3525     slice_param->chroma_log2_weight_denom = 0;
3526     slice_param->luma_weight_l0_flag      = 0;
3527     slice_param->chroma_weight_l0_flag    = 0;
3528     slice_param->luma_weight_l1_flag      = 0;
3529     slice_param->chroma_weight_l1_flag    = 0;
3530
3531     if (num_weight_tables < 1)
3532         return TRUE;
3533
3534     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3535     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3536
3537     slice_param->luma_weight_l0_flag = 1;
3538     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3539         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3540         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3541     }
3542
3543     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3544     if (slice_param->chroma_weight_l0_flag) {
3545         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3546             for (j = 0; j < 2; j++) {
3547                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3548                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3549             }
3550         }
3551     }
3552
3553     if (num_weight_tables < 2)
3554         return TRUE;
3555
3556     slice_param->luma_weight_l1_flag = 1;
3557     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3558         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3559         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3560     }
3561
3562     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3563     if (slice_param->chroma_weight_l1_flag) {
3564         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3565             for (j = 0; j < 2; j++) {
3566                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3567                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3568             }
3569         }
3570     }
3571     return TRUE;
3572 }
3573
3574 static gboolean
3575 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3576     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3577 {
3578     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3579     VASliceParameterBufferH264 * const slice_param = slice->param;
3580     guint i, num_ref_lists = 0;
3581
3582     slice_param->num_ref_idx_l0_active_minus1 = 0;
3583     slice_param->num_ref_idx_l1_active_minus1 = 0;
3584
3585     if (GST_H264_IS_B_SLICE(slice_hdr))
3586         num_ref_lists = 2;
3587     else if (GST_H264_IS_I_SLICE(slice_hdr))
3588         num_ref_lists = 0;
3589     else
3590         num_ref_lists = 1;
3591
3592     if (num_ref_lists < 1)
3593         return TRUE;
3594
3595     slice_param->num_ref_idx_l0_active_minus1 =
3596         slice_hdr->num_ref_idx_l0_active_minus1;
3597
3598     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3599         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3600             priv->RefPicList0[i]);
3601     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3602         vaapi_init_picture(&slice_param->RefPicList0[i]);
3603
3604     if (num_ref_lists < 2)
3605         return TRUE;
3606
3607     slice_param->num_ref_idx_l1_active_minus1 =
3608         slice_hdr->num_ref_idx_l1_active_minus1;
3609
3610     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3611         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3612             priv->RefPicList1[i]);
3613     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3614         vaapi_init_picture(&slice_param->RefPicList1[i]);
3615     return TRUE;
3616 }
3617
3618 static gboolean
3619 fill_slice(GstVaapiDecoderH264 *decoder,
3620     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3621 {
3622     VASliceParameterBufferH264 * const slice_param = slice->param;
3623     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3624
3625     /* Fill in VASliceParameterBufferH264 */
3626     slice_param->slice_data_bit_offset =
3627         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3628     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3629     slice_param->slice_type                     = slice_hdr->type % 5;
3630     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3631     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3632     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3633     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3634     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3635     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3636
3637     if (!fill_RefPicList(decoder, slice, slice_hdr))
3638         return FALSE;
3639     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3640         return FALSE;
3641     return TRUE;
3642 }
3643
3644 static GstVaapiDecoderStatus
3645 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3646 {
3647     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3648     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3649     GstVaapiPictureH264 * const picture = priv->current_picture;
3650     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3651     GstVaapiSlice *slice;
3652     GstBuffer * const buffer =
3653         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3654     GstMapInfo map_info;
3655
3656     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3657
3658     if (!is_valid_state(pi->state,
3659             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3660         GST_WARNING("failed to receive enough headers to decode slice");
3661         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3662     }
3663
3664     if (!ensure_pps(decoder, slice_hdr->pps)) {
3665         GST_ERROR("failed to activate PPS");
3666         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3667     }
3668
3669     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3670         GST_ERROR("failed to activate SPS");
3671         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3672     }
3673
3674     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3675         GST_ERROR("failed to map buffer");
3676         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3677     }
3678
3679     /* Check wether this is the first/last slice in the current access unit */
3680     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3681         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3682     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3683         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3684
3685     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3686         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3687     gst_buffer_unmap(buffer, &map_info);
3688     if (!slice) {
3689         GST_ERROR("failed to allocate slice");
3690         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3691     }
3692
3693     init_picture_refs(decoder, picture, slice_hdr);
3694     if (!fill_slice(decoder, slice, pi)) {
3695         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3696         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3697     }
3698
3699     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3700     picture->last_slice_hdr = slice_hdr;
3701     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3702     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3703 }
3704
3705 static inline gint
3706 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3707 {
3708     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3709                                                      0xffffff00, 0x00000100,
3710                                                      ofs, size,
3711                                                      scp);
3712 }
3713
3714 static GstVaapiDecoderStatus
3715 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3716 {
3717     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3718     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3719     GstVaapiDecoderStatus status;
3720
3721     priv->decoder_state |= pi->state;
3722     switch (pi->nalu.type) {
3723     case GST_H264_NAL_SPS:
3724         status = decode_sps(decoder, unit);
3725         break;
3726     case GST_H264_NAL_SUBSET_SPS:
3727         status = decode_subset_sps(decoder, unit);
3728         break;
3729     case GST_H264_NAL_PPS:
3730         status = decode_pps(decoder, unit);
3731         break;
3732     case GST_H264_NAL_SLICE_EXT:
3733     case GST_H264_NAL_SLICE_IDR:
3734         /* fall-through. IDR specifics are handled in init_picture() */
3735     case GST_H264_NAL_SLICE:
3736         status = decode_slice(decoder, unit);
3737         break;
3738     case GST_H264_NAL_SEQ_END:
3739     case GST_H264_NAL_STREAM_END:
3740         status = decode_sequence_end(decoder);
3741         break;
3742     case GST_H264_NAL_SEI:
3743         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3744         break;
3745     default:
3746         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3747         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3748         break;
3749     }
3750     return status;
3751 }
3752
3753 static GstVaapiDecoderStatus
3754 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3755     const guchar *buf, guint buf_size)
3756 {
3757     GstVaapiDecoderH264 * const decoder =
3758         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3759     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3760     GstVaapiDecoderStatus status;
3761     GstVaapiDecoderUnit unit;
3762     GstVaapiParserInfoH264 *pi = NULL;
3763     GstH264ParserResult result;
3764     guint i, ofs, num_sps, num_pps;
3765
3766     unit.parsed_info = NULL;
3767
3768     if (buf_size < 8)
3769         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3770
3771     if (buf[0] != 1) {
3772         GST_ERROR("failed to decode codec-data, not in avcC format");
3773         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3774     }
3775
3776     priv->nal_length_size = (buf[4] & 0x03) + 1;
3777
3778     num_sps = buf[5] & 0x1f;
3779     ofs = 6;
3780
3781     for (i = 0; i < num_sps; i++) {
3782         pi = gst_vaapi_parser_info_h264_new();
3783         if (!pi)
3784             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3785         unit.parsed_info = pi;
3786
3787         result = gst_h264_parser_identify_nalu_avc(
3788             priv->parser,
3789             buf, ofs, buf_size, 2,
3790             &pi->nalu
3791         );
3792         if (result != GST_H264_PARSER_OK) {
3793             status = get_status(result);
3794             goto cleanup;
3795         }
3796
3797         status = parse_sps(decoder, &unit);
3798         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3799             goto cleanup;
3800         ofs = pi->nalu.offset + pi->nalu.size;
3801
3802         status = decode_sps(decoder, &unit);
3803         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3804             goto cleanup;
3805         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3806     }
3807
3808     num_pps = buf[ofs];
3809     ofs++;
3810
3811     for (i = 0; i < num_pps; i++) {
3812         pi = gst_vaapi_parser_info_h264_new();
3813         if (!pi)
3814             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3815         unit.parsed_info = pi;
3816
3817         result = gst_h264_parser_identify_nalu_avc(
3818             priv->parser,
3819             buf, ofs, buf_size, 2,
3820             &pi->nalu
3821         );
3822         if (result != GST_H264_PARSER_OK) {
3823             status = get_status(result);
3824             goto cleanup;
3825         }
3826
3827         status = parse_pps(decoder, &unit);
3828         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3829             goto cleanup;
3830         ofs = pi->nalu.offset + pi->nalu.size;
3831
3832         status = decode_pps(decoder, &unit);
3833         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3834             goto cleanup;
3835         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3836     }
3837
3838     priv->is_avcC = TRUE;
3839     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3840
3841 cleanup:
3842     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3843     return status;
3844 }
3845
3846 static GstVaapiDecoderStatus
3847 ensure_decoder(GstVaapiDecoderH264 *decoder)
3848 {
3849     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3850     GstVaapiDecoderStatus status;
3851
3852     if (!priv->is_opened) {
3853         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3854         if (!priv->is_opened)
3855             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3856
3857         status = gst_vaapi_decoder_decode_codec_data(
3858             GST_VAAPI_DECODER_CAST(decoder));
3859         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3860             return status;
3861     }
3862     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3863 }
3864
3865 static GstVaapiDecoderStatus
3866 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3867     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3868 {
3869     GstVaapiDecoderH264 * const decoder =
3870         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3871     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3872     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3873     GstVaapiParserInfoH264 *pi;
3874     GstVaapiDecoderStatus status;
3875     GstH264ParserResult result;
3876     guchar *buf;
3877     guint i, size, buf_size, nalu_size, flags;
3878     guint32 start_code;
3879     gint ofs, ofs2;
3880
3881     status = ensure_decoder(decoder);
3882     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3883         return status;
3884
3885     switch (priv->stream_alignment) {
3886     case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3887         size = gst_adapter_available_fast(adapter);
3888         break;
3889     default:
3890         size = gst_adapter_available(adapter);
3891         break;
3892     }
3893
3894     if (priv->is_avcC) {
3895         if (size < priv->nal_length_size)
3896             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3897
3898         buf = (guchar *)&start_code;
3899         g_assert(priv->nal_length_size <= sizeof(start_code));
3900         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3901
3902         nalu_size = 0;
3903         for (i = 0; i < priv->nal_length_size; i++)
3904             nalu_size = (nalu_size << 8) | buf[i];
3905
3906         buf_size = priv->nal_length_size + nalu_size;
3907         if (size < buf_size)
3908             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3909     }
3910     else {
3911         if (size < 4)
3912             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3913
3914         if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3915             buf_size = size;
3916         else {
3917             ofs = scan_for_start_code(adapter, 0, size, NULL);
3918             if (ofs < 0)
3919                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3920
3921             if (ofs > 0) {
3922                 gst_adapter_flush(adapter, ofs);
3923                 size -= ofs;
3924             }
3925
3926             ofs2 = ps->input_offset2 - ofs - 4;
3927             if (ofs2 < 4)
3928                 ofs2 = 4;
3929
3930             ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3931                 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3932             if (ofs < 0) {
3933                 // Assume the whole NAL unit is present if end-of-stream
3934                 if (!at_eos) {
3935                     ps->input_offset2 = size;
3936                     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3937                 }
3938                 ofs = size;
3939             }
3940             buf_size = ofs;
3941         }
3942     }
3943     ps->input_offset2 = 0;
3944
3945     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3946     if (!buf)
3947         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3948
3949     unit->size = buf_size;
3950
3951     pi = gst_vaapi_parser_info_h264_new();
3952     if (!pi)
3953         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3954
3955     gst_vaapi_decoder_unit_set_parsed_info(unit,
3956         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3957
3958     if (priv->is_avcC)
3959         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3960             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3961     else
3962         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3963             buf, 0, buf_size, &pi->nalu);
3964     status = get_status(result);
3965     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3966         return status;
3967
3968     switch (pi->nalu.type) {
3969     case GST_H264_NAL_SPS:
3970         status = parse_sps(decoder, unit);
3971         break;
3972     case GST_H264_NAL_SUBSET_SPS:
3973         status = parse_subset_sps(decoder, unit);
3974         break;
3975     case GST_H264_NAL_PPS:
3976         status = parse_pps(decoder, unit);
3977         break;
3978     case GST_H264_NAL_SEI:
3979         status = parse_sei(decoder, unit);
3980         break;
3981     case GST_H264_NAL_SLICE_EXT:
3982         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3983             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3984             break;
3985         }
3986         /* fall-through */
3987     case GST_H264_NAL_SLICE_IDR:
3988     case GST_H264_NAL_SLICE:
3989         status = parse_slice(decoder, unit);
3990         break;
3991     default:
3992         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3993         break;
3994     }
3995     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3996         return status;
3997
3998     flags = 0;
3999     switch (pi->nalu.type) {
4000     case GST_H264_NAL_AU_DELIMITER:
4001         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4002         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4003         /* fall-through */
4004     case GST_H264_NAL_FILLER_DATA:
4005         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4006         break;
4007     case GST_H264_NAL_STREAM_END:
4008         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4009         /* fall-through */
4010     case GST_H264_NAL_SEQ_END:
4011         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4012         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4013         break;
4014     case GST_H264_NAL_SPS:
4015     case GST_H264_NAL_SUBSET_SPS:
4016     case GST_H264_NAL_PPS:
4017     case GST_H264_NAL_SEI:
4018         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4019         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4020         break;
4021     case GST_H264_NAL_SLICE_EXT:
4022         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4023             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4024             break;
4025         }
4026         /* fall-through */
4027     case GST_H264_NAL_SLICE_IDR:
4028     case GST_H264_NAL_SLICE:
4029         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4030         if (is_new_picture(pi, priv->prev_slice_pi)) {
4031             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4032             if (is_new_access_unit(pi, priv->prev_slice_pi))
4033                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4034         }
4035         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4036         break;
4037     case GST_H264_NAL_SPS_EXT:
4038     case GST_H264_NAL_SLICE_AUX:
4039         /* skip SPS extension and auxiliary slice for now */
4040         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4041         break;
4042     case GST_H264_NAL_PREFIX_UNIT:
4043         /* skip Prefix NAL units for now */
4044         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4045             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4046             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4047         break;
4048     default:
4049         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4050             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4051                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4052         break;
4053     }
4054     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4055         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4056     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4057
4058     pi->nalu.data = NULL;
4059     pi->state = priv->parser_state;
4060     pi->flags = flags;
4061     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4062     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4063 }
4064
4065 static GstVaapiDecoderStatus
4066 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4067     GstVaapiDecoderUnit *unit)
4068 {
4069     GstVaapiDecoderH264 * const decoder =
4070         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4071     GstVaapiDecoderStatus status;
4072
4073     status = ensure_decoder(decoder);
4074     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4075         return status;
4076     return decode_unit(decoder, unit);
4077 }
4078
4079 static GstVaapiDecoderStatus
4080 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4081     GstVaapiDecoderUnit *unit)
4082 {
4083     GstVaapiDecoderH264 * const decoder =
4084         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4085
4086     return decode_picture(decoder, unit);
4087 }
4088
4089 static GstVaapiDecoderStatus
4090 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4091 {
4092     GstVaapiDecoderH264 * const decoder =
4093         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4094
4095     return decode_current_picture(decoder);
4096 }
4097
4098 static GstVaapiDecoderStatus
4099 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4100 {
4101     GstVaapiDecoderH264 * const decoder =
4102         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4103
4104     dpb_flush(decoder, NULL);
4105     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4106 }
4107
4108 static void
4109 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4110 {
4111     GstVaapiMiniObjectClass * const object_class =
4112         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4113     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4114
4115     object_class->size          = sizeof(GstVaapiDecoderH264);
4116     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4117
4118     decoder_class->create       = gst_vaapi_decoder_h264_create;
4119     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4120     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4121     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4122     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4123     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4124     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4125
4126     decoder_class->decode_codec_data =
4127         gst_vaapi_decoder_h264_decode_codec_data;
4128 }
4129
4130 static inline const GstVaapiDecoderClass *
4131 gst_vaapi_decoder_h264_class(void)
4132 {
4133     static GstVaapiDecoderH264Class g_class;
4134     static gsize g_class_init = FALSE;
4135
4136     if (g_once_init_enter(&g_class_init)) {
4137         gst_vaapi_decoder_h264_class_init(&g_class);
4138         g_once_init_leave(&g_class_init, TRUE);
4139     }
4140     return GST_VAAPI_DECODER_CLASS(&g_class);
4141 }
4142
4143 /**
4144  * gst_vaapi_decoder_h264_set_alignment:
4145  * @decoder: a #GstVaapiDecoderH264
4146  * @alignment: the #GstVaapiStreamAlignH264
4147  *
4148  * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4149  * of each buffer that is supplied to the decoder. This could be no
4150  * specific alignment, NAL unit boundaries, or access unit boundaries.
4151  */
4152 void
4153 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4154     GstVaapiStreamAlignH264 alignment)
4155 {
4156     g_return_if_fail(decoder != NULL);
4157
4158     decoder->priv.stream_alignment = alignment;
4159 }
4160
4161 /**
4162  * gst_vaapi_decoder_h264_new:
4163  * @display: a #GstVaapiDisplay
4164  * @caps: a #GstCaps holding codec information
4165  *
4166  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4167  * hold extra information like codec-data and pictured coded size.
4168  *
4169  * Return value: the newly allocated #GstVaapiDecoder object
4170  */
4171 GstVaapiDecoder *
4172 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4173 {
4174     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4175 }