decoder: h264: fix the DPB compaction process.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiStreamAlignH264     stream_alignment;
449     GstVaapiPictureH264        *current_picture;
450     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
451     GstVaapiParserInfoH264     *active_sps;
452     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
453     GstVaapiParserInfoH264     *active_pps;
454     GstVaapiParserInfoH264     *prev_pi;
455     GstVaapiParserInfoH264     *prev_slice_pi;
456     GstVaapiFrameStore        **prev_frames;
457     guint                       prev_frames_alloc;
458     GstVaapiFrameStore        **dpb;
459     guint                       dpb_count;
460     guint                       dpb_size;
461     guint                       dpb_size_max;
462     guint                       max_views;
463     GstVaapiProfile             profile;
464     GstVaapiEntrypoint          entrypoint;
465     GstVaapiChromaType          chroma_type;
466     GPtrArray                  *inter_views;
467     GstVaapiPictureH264        *short_ref[32];
468     guint                       short_ref_count;
469     GstVaapiPictureH264        *long_ref[32];
470     guint                       long_ref_count;
471     GstVaapiPictureH264        *RefPicList0[32];
472     guint                       RefPicList0_count;
473     GstVaapiPictureH264        *RefPicList1[32];
474     guint                       RefPicList1_count;
475     guint                       nal_length_size;
476     guint                       mb_width;
477     guint                       mb_height;
478     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479     gint32                      poc_msb;                // PicOrderCntMsb
480     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
481     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
482     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
483     gint32                      frame_num_offset;       // FrameNumOffset
484     gint32                      frame_num;              // frame_num (from slice_header())
485     gint32                      prev_frame_num;         // prevFrameNum
486     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
487     gboolean                    prev_pic_structure;     // previous picture structure
488     guint                       is_opened               : 1;
489     guint                       is_avcC                 : 1;
490     guint                       has_context             : 1;
491     guint                       progressive_sequence    : 1;
492 };
493
494 /**
495  * GstVaapiDecoderH264:
496  *
497  * A decoder based on H264.
498  */
499 struct _GstVaapiDecoderH264 {
500     /*< private >*/
501     GstVaapiDecoder             parent_instance;
502     GstVaapiDecoderH264Private  priv;
503 };
504
505 /**
506  * GstVaapiDecoderH264Class:
507  *
508  * A decoder class based on H264.
509  */
510 struct _GstVaapiDecoderH264Class {
511     /*< private >*/
512     GstVaapiDecoderClass parent_class;
513 };
514
515 static gboolean
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
517
518 static gboolean
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520     GstVaapiPictureH264 *picture);
521
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524     GstVaapiFrameStore *fs)
525 {
526     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
527 }
528
529 /* Determines if the supplied profile is one of the MVC set */
530 static gboolean
531 is_mvc_profile(GstH264Profile profile)
532 {
533     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534         profile == GST_H264_PROFILE_STEREO_HIGH;
535 }
536
537 /* Determines the view_id from the supplied NAL unit */
538 static inline guint
539 get_view_id(GstH264NalUnit *nalu)
540 {
541     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
542 }
543
544 /* Determines the view order index (VOIdx) from the supplied view_id */
545 static gint
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
547 {
548     GstH264SPSExtMVC *mvc;
549     gint i;
550
551     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
552         return 0;
553
554     mvc = &sps->extension.mvc;
555     for (i = 0; i <= mvc->num_views_minus1; i++) {
556         if (mvc->view[i].view_id == view_id)
557             return i;
558     }
559     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
560     return -1;
561 }
562
563 /* Determines NumViews */
564 static guint
565 get_num_views(GstH264SPS *sps)
566 {
567     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568         sps->extension.mvc.num_views_minus1 : 0);
569 }
570
571 /* Get number of reference frames to use */
572 static guint
573 get_max_dec_frame_buffering(GstH264SPS *sps)
574 {
575     guint num_views, max_dpb_frames;
576     guint max_dec_frame_buffering, PicSizeMbs;
577     GstVaapiLevelH264 level;
578     const GstVaapiH264LevelLimits *level_limits;
579
580     /* Table A-1 - Level limits */
581     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582         level = GST_VAAPI_LEVEL_H264_L1b;
583     else
584         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586     if (G_UNLIKELY(!level_limits)) {
587         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588         max_dec_frame_buffering = 16;
589     }
590     else {
591         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592                       (sps->pic_height_in_map_units_minus1 + 1) *
593                       (sps->frame_mbs_only_flag ? 1 : 2));
594         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
595     }
596     if (is_mvc_profile(sps->profile_idc))
597         max_dec_frame_buffering <<= 1;
598
599     /* VUI parameters */
600     if (sps->vui_parameters_present_flag) {
601         GstH264VUIParams * const vui_params = &sps->vui_parameters;
602         if (vui_params->bitstream_restriction_flag)
603             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
604         else {
605             switch (sps->profile_idc) {
606             case 44:  // CAVLC 4:4:4 Intra profile
607             case GST_H264_PROFILE_SCALABLE_HIGH:
608             case GST_H264_PROFILE_HIGH:
609             case GST_H264_PROFILE_HIGH10:
610             case GST_H264_PROFILE_HIGH_422:
611             case GST_H264_PROFILE_HIGH_444:
612                 if (sps->constraint_set3_flag)
613                     max_dec_frame_buffering = 0;
614                 break;
615             }
616         }
617     }
618
619     num_views = get_num_views(sps);
620     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621     if (max_dec_frame_buffering > max_dpb_frames)
622         max_dec_frame_buffering = max_dpb_frames;
623     else if (max_dec_frame_buffering < sps->num_ref_frames)
624         max_dec_frame_buffering = sps->num_ref_frames;
625     return MAX(1, max_dec_frame_buffering);
626 }
627
628 static void
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
630 {
631     gpointer * const entries = array;
632     guint num_entries = *array_length_ptr;
633
634     g_return_if_fail(index < num_entries);
635
636     if (index != --num_entries)
637         entries[index] = entries[num_entries];
638     entries[num_entries] = NULL;
639     *array_length_ptr = num_entries;
640 }
641
642 #if 1
643 static inline void
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
645 {
646     array_remove_index_fast(array, array_length_ptr, index);
647 }
648 #else
649 static void
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
651 {
652     gpointer * const entries = array;
653     const guint num_entries = *array_length_ptr - 1;
654     guint i;
655
656     g_return_if_fail(index <= num_entries);
657
658     for (i = index; i < num_entries; i++)
659         entries[i] = entries[i + 1];
660     entries[num_entries] = NULL;
661     *array_length_ptr = num_entries;
662 }
663 #endif
664
665 #define ARRAY_REMOVE_INDEX(array, index) \
666     array_remove_index(array, &array##_count, index)
667
668 static void
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
670 {
671     GstVaapiDecoderH264Private * const priv = &decoder->priv;
672     guint i, num_frames = --priv->dpb_count;
673
674     if (USE_STRICT_DPB_ORDERING) {
675         for (i = index; i < num_frames; i++)
676             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
677     }
678     else if (index != num_frames)
679         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
681 }
682
683 static gboolean
684 dpb_output(
685     GstVaapiDecoderH264 *decoder,
686     GstVaapiFrameStore  *fs,
687     GstVaapiPictureH264 *picture
688 )
689 {
690     picture->output_needed = FALSE;
691
692     if (fs) {
693         if (--fs->output_needed > 0)
694             return TRUE;
695         picture = fs->buffers[0];
696     }
697     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
698 }
699
700 static inline void
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
702 {
703     GstVaapiDecoderH264Private * const priv = &decoder->priv;
704     GstVaapiFrameStore * const fs = priv->dpb[i];
705
706     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707         dpb_remove_index(decoder, i);
708 }
709
710 /* Finds the frame store holding the supplied picture */
711 static gint
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
713 {
714     GstVaapiDecoderH264Private * const priv = &decoder->priv;
715     gint i, j;
716
717     for (i = 0; i < priv->dpb_count; i++) {
718         GstVaapiFrameStore * const fs = priv->dpb[i];
719         for (j = 0; j < fs->num_buffers; j++) {
720             if (fs->buffers[j] == picture)
721                 return i;
722         }
723     }
724     return -1;
725 }
726
727 /* Finds the picture with the lowest POC that needs to be output */
728 static gint
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730     GstVaapiPictureH264 **found_picture_ptr)
731 {
732     GstVaapiDecoderH264Private * const priv = &decoder->priv;
733     GstVaapiPictureH264 *found_picture = NULL;
734     guint i, j, found_index;
735
736     for (i = 0; i < priv->dpb_count; i++) {
737         GstVaapiFrameStore * const fs = priv->dpb[i];
738         if (!fs->output_needed)
739             continue;
740         if (picture && picture->base.view_id != fs->view_id)
741             continue;
742         for (j = 0; j < fs->num_buffers; j++) {
743             GstVaapiPictureH264 * const pic = fs->buffers[j];
744             if (!pic->output_needed)
745                 continue;
746             if (!found_picture || found_picture->base.poc > pic->base.poc ||
747                 (found_picture->base.poc == pic->base.poc &&
748                  found_picture->base.voc > pic->base.voc))
749                 found_picture = pic, found_index = i;
750         }
751     }
752
753     if (found_picture_ptr)
754         *found_picture_ptr = found_picture;
755     return found_picture ? found_index : -1;
756 }
757
758 /* Finds the picture with the lowest VOC that needs to be output */
759 static gint
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761     GstVaapiPictureH264 **found_picture_ptr)
762 {
763     GstVaapiDecoderH264Private * const priv = &decoder->priv;
764     GstVaapiPictureH264 *found_picture = NULL;
765     guint i, j, found_index;
766
767     for (i = 0; i < priv->dpb_count; i++) {
768         GstVaapiFrameStore * const fs = priv->dpb[i];
769         if (!fs->output_needed || fs->view_id == picture->base.view_id)
770             continue;
771         for (j = 0; j < fs->num_buffers; j++) {
772             GstVaapiPictureH264 * const pic = fs->buffers[j];
773             if (!pic->output_needed || pic->base.poc != picture->base.poc)
774                 continue;
775             if (!found_picture || found_picture->base.voc > pic->base.voc)
776                 found_picture = pic, found_index = i;
777         }
778     }
779
780     if (found_picture_ptr)
781         *found_picture_ptr = found_picture;
782     return found_picture ? found_index : -1;
783 }
784
785 static gboolean
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787     GstVaapiPictureH264 *picture, guint voc)
788 {
789     GstVaapiDecoderH264Private * const priv = &decoder->priv;
790     GstVaapiPictureH264 *found_picture;
791     gint found_index;
792     gboolean success;
793
794     if (priv->max_views == 1)
795         return TRUE;
796
797     /* Emit all other view components that were in the same access
798        unit than the picture we have just found */
799     found_picture = picture;
800     for (;;) {
801         found_index = dpb_find_lowest_voc(decoder, found_picture,
802             &found_picture);
803         if (found_index < 0 || found_picture->base.voc >= voc)
804             break;
805         success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806         dpb_evict(decoder, found_picture, found_index);
807         if (!success)
808             return FALSE;
809     }
810     return TRUE;
811 }
812
813 static gboolean
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
815 {
816     GstVaapiDecoderH264Private * const priv = &decoder->priv;
817     GstVaapiPictureH264 *found_picture;
818     gint found_index;
819     gboolean success;
820
821     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
822     if (found_index < 0)
823         return FALSE;
824
825     if (picture && picture->base.poc != found_picture->base.poc)
826         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
827
828     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829     dpb_evict(decoder, found_picture, found_index);
830     if (priv->max_views == 1)
831         return success;
832
833     if (picture && picture->base.poc != found_picture->base.poc)
834         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
835     return success;
836 }
837
838 static void
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 {
841     GstVaapiDecoderH264Private * const priv = &decoder->priv;
842     guint i, n;
843
844     for (i = 0; i < priv->dpb_count; i++) {
845         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
846             continue;
847         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
848     }
849
850     /* Compact the resulting DPB, i.e. remove holes */
851     for (i = 0, n = 0; i < priv->dpb_count; i++) {
852         if (priv->dpb[i]) {
853             if (i != n) {
854                 priv->dpb[n] = priv->dpb[i];
855                 priv->dpb[i] = NULL;
856             }
857             n++;
858         }
859     }
860     priv->dpb_count = n;
861
862     /* Clear previous frame buffers only if this is a "flush-all" operation,
863        or if the picture is the first one in the access unit */
864     if (priv->prev_frames && (!picture ||
865             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
866                 GST_VAAPI_PICTURE_FLAG_AU_START))) {
867         for (i = 0; i < priv->max_views; i++)
868             gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
869     }
870 }
871
872 static void
873 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
874 {
875     while (dpb_bump(decoder, picture))
876         ;
877     dpb_clear(decoder, picture);
878 }
879
880 static void
881 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
882 {
883     GstVaapiDecoderH264Private * const priv = &decoder->priv;
884     const gboolean is_last_picture = /* in the access unit */
885         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
886     guint i;
887
888     // Remove all unused inter-view only reference components of the current AU
889     i = 0;
890     while (i < priv->dpb_count) {
891         GstVaapiFrameStore * const fs = priv->dpb[i];
892         if (fs->view_id != picture->base.view_id &&
893             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
894             (is_last_picture ||
895              !is_inter_view_reference_for_next_frames(decoder, fs)))
896             dpb_remove_index(decoder, i);
897         else
898             i++;
899     }
900 }
901
902 static gboolean
903 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
904 {
905     GstVaapiDecoderH264Private * const priv = &decoder->priv;
906     GstVaapiFrameStore *fs;
907     guint i;
908
909     if (priv->max_views > 1)
910         dpb_prune_mvc(decoder, picture);
911
912     // Remove all unused pictures
913     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
914         i = 0;
915         while (i < priv->dpb_count) {
916             GstVaapiFrameStore * const fs = priv->dpb[i];
917             if (fs->view_id == picture->base.view_id &&
918                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
919                 dpb_remove_index(decoder, i);
920             else
921                 i++;
922         }
923     }
924
925     // Check if picture is the second field and the first field is still in DPB
926     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
927         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
928         const gint found_index = dpb_find_picture(decoder,
929             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
930         if (found_index >= 0)
931             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
932     }
933
934     // Create new frame store, and split fields if necessary
935     fs = gst_vaapi_frame_store_new(picture);
936     if (!fs)
937         return FALSE;
938     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
939     gst_vaapi_frame_store_unref(fs);
940
941     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
942         if (!gst_vaapi_frame_store_split_fields(fs))
943             return FALSE;
944     }
945
946     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
947     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
948         while (priv->dpb_count == priv->dpb_size) {
949             if (!dpb_bump(decoder, picture))
950                 return FALSE;
951         }
952     }
953
954     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
955     else {
956         const gboolean StoreInterViewOnlyRefFlag =
957             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
958                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
959             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
960                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
961         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
962             return TRUE;
963         while (priv->dpb_count == priv->dpb_size) {
964             GstVaapiPictureH264 *found_picture;
965             if (!StoreInterViewOnlyRefFlag) {
966                 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
967                     found_picture->base.poc > picture->base.poc)
968                     return dpb_output(decoder, NULL, picture);
969             }
970             if (!dpb_bump(decoder, picture))
971                 return FALSE;
972         }
973     }
974
975     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
976     if (picture->output_flag) {
977         picture->output_needed = TRUE;
978         fs->output_needed++;
979     }
980     return TRUE;
981 }
982
983 static gboolean
984 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
985 {
986     GstVaapiDecoderH264Private * const priv = &decoder->priv;
987
988     if (dpb_size < priv->dpb_count)
989         return FALSE;
990
991     if (dpb_size > priv->dpb_size_max) {
992         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
993         if (!priv->dpb)
994             return FALSE;
995         memset(&priv->dpb[priv->dpb_size_max], 0,
996             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
997         priv->dpb_size_max = dpb_size;
998     }
999
1000     if (priv->dpb_size < dpb_size)
1001         priv->dpb_size = dpb_size;
1002     else if (dpb_size < priv->dpb_count)
1003         return FALSE;
1004
1005     GST_DEBUG("DPB size %u", priv->dpb_size);
1006     return TRUE;
1007 }
1008
1009 static void
1010 unref_inter_view(GstVaapiPictureH264 *picture)
1011 {
1012     if (!picture)
1013         return;
1014     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1015     gst_vaapi_picture_unref(picture);
1016 }
1017
1018 /* Resets MVC resources */
1019 static gboolean
1020 mvc_reset(GstVaapiDecoderH264 *decoder)
1021 {
1022     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1023     guint i;
1024
1025     // Resize array of inter-view references
1026     if (!priv->inter_views) {
1027         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1028             (GDestroyNotify)unref_inter_view);
1029         if (!priv->inter_views)
1030             return FALSE;
1031     }
1032
1033     // Resize array of previous frame buffers
1034     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1035         gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1036
1037     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1038         sizeof(*priv->prev_frames));
1039     if (!priv->prev_frames) {
1040         priv->prev_frames_alloc = 0;
1041         return FALSE;
1042     }
1043     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1044         priv->prev_frames[i] = NULL;
1045     priv->prev_frames_alloc = priv->max_views;
1046     return TRUE;
1047 }
1048
1049 static GstVaapiDecoderStatus
1050 get_status(GstH264ParserResult result)
1051 {
1052     GstVaapiDecoderStatus status;
1053
1054     switch (result) {
1055     case GST_H264_PARSER_OK:
1056         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1057         break;
1058     case GST_H264_PARSER_NO_NAL_END:
1059         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1060         break;
1061     case GST_H264_PARSER_ERROR:
1062         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1063         break;
1064     default:
1065         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1066         break;
1067     }
1068     return status;
1069 }
1070
1071 static void
1072 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1073 {
1074     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1075
1076     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1077     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1078     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1079
1080     dpb_clear(decoder, NULL);
1081
1082     if (priv->inter_views) {
1083         g_ptr_array_unref(priv->inter_views);
1084         priv->inter_views = NULL;
1085     }
1086
1087     if (priv->parser) {
1088         gst_h264_nal_parser_free(priv->parser);
1089         priv->parser = NULL;
1090     }
1091 }
1092
1093 static gboolean
1094 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1095 {
1096     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1097
1098     gst_vaapi_decoder_h264_close(decoder);
1099
1100     priv->parser = gst_h264_nal_parser_new();
1101     if (!priv->parser)
1102         return FALSE;
1103     return TRUE;
1104 }
1105
1106 static void
1107 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1108 {
1109     GstVaapiDecoderH264 * const decoder =
1110         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1111     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1112     guint i;
1113
1114     gst_vaapi_decoder_h264_close(decoder);
1115
1116     g_free(priv->dpb);
1117     priv->dpb = NULL;
1118     priv->dpb_size = 0;
1119
1120     g_free(priv->prev_frames);
1121     priv->prev_frames = NULL;
1122     priv->prev_frames_alloc = 0;
1123
1124     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1125         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1126     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1127
1128     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1129         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1130     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1131 }
1132
1133 static gboolean
1134 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1135 {
1136     GstVaapiDecoderH264 * const decoder =
1137         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1138     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1139
1140     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1141     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1142     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1143     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1144     priv->progressive_sequence  = TRUE;
1145     return TRUE;
1146 }
1147
1148 /* Activates the supplied PPS */
1149 static GstH264PPS *
1150 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1151 {
1152     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1153     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1154
1155     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1156     return pi ? &pi->data.pps : NULL;
1157 }
1158
1159 /* Returns the active PPS */
1160 static inline GstH264PPS *
1161 get_pps(GstVaapiDecoderH264 *decoder)
1162 {
1163     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1164
1165     return pi ? &pi->data.pps : NULL;
1166 }
1167
1168 /* Activate the supplied SPS */
1169 static GstH264SPS *
1170 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1171 {
1172     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1173     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1174
1175     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1176     return pi ? &pi->data.sps : NULL;
1177 }
1178
1179 /* Returns the active SPS */
1180 static inline GstH264SPS *
1181 get_sps(GstVaapiDecoderH264 *decoder)
1182 {
1183     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1184
1185     return pi ? &pi->data.sps : NULL;
1186 }
1187
1188 static void
1189 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1190     GstVaapiProfile profile)
1191 {
1192     guint n_profiles = *n_profiles_ptr;
1193
1194     profiles[n_profiles++] = profile;
1195     switch (profile) {
1196     case GST_VAAPI_PROFILE_H264_MAIN:
1197         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1198         break;
1199     default:
1200         break;
1201     }
1202     *n_profiles_ptr = n_profiles;
1203 }
1204
1205 /* Fills in compatible profiles for MVC decoding */
1206 static void
1207 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1208     guint *n_profiles_ptr, guint dpb_size)
1209 {
1210     const gchar * const vendor_string =
1211         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1212
1213     gboolean add_high_profile = FALSE;
1214     struct map {
1215         const gchar *str;
1216         guint str_len;
1217     };
1218     const struct map *m;
1219
1220     // Drivers that support slice level decoding
1221     if (vendor_string && dpb_size <= 16) {
1222         static const struct map drv_names[] = {
1223             { "Intel i965 driver", 17 },
1224             { NULL, 0 }
1225         };
1226         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1227             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1228                 add_high_profile = TRUE;
1229         }
1230     }
1231
1232     if (add_high_profile)
1233         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1234 }
1235
1236 static GstVaapiProfile
1237 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1238 {
1239     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1240     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1241     GstVaapiProfile profile, profiles[4];
1242     guint i, n_profiles = 0;
1243
1244     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1245     if (!profile)
1246         return GST_VAAPI_PROFILE_UNKNOWN;
1247
1248     fill_profiles(profiles, &n_profiles, profile);
1249     switch (profile) {
1250     case GST_VAAPI_PROFILE_H264_BASELINE:
1251         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1252             fill_profiles(profiles, &n_profiles,
1253                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1254             fill_profiles(profiles, &n_profiles,
1255                 GST_VAAPI_PROFILE_H264_MAIN);
1256         }
1257         break;
1258     case GST_VAAPI_PROFILE_H264_EXTENDED:
1259         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1260             fill_profiles(profiles, &n_profiles,
1261                 GST_VAAPI_PROFILE_H264_MAIN);
1262         }
1263         break;
1264     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1265         if (priv->max_views == 2) {
1266             fill_profiles(profiles, &n_profiles,
1267                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1268         }
1269         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1270         break;
1271     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1272         if (sps->frame_mbs_only_flag) {
1273             fill_profiles(profiles, &n_profiles,
1274                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1275         }
1276         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1277         break;
1278     default:
1279         break;
1280     }
1281
1282     /* If the preferred profile (profiles[0]) matches one that we already
1283        found, then just return it now instead of searching for it again */
1284     if (profiles[0] == priv->profile)
1285         return priv->profile;
1286
1287     for (i = 0; i < n_profiles; i++) {
1288         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1289             return profiles[i];
1290     }
1291     return GST_VAAPI_PROFILE_UNKNOWN;
1292 }
1293
1294 static GstVaapiDecoderStatus
1295 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1296 {
1297     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1298     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1299     GstVaapiContextInfo info;
1300     GstVaapiProfile profile;
1301     GstVaapiChromaType chroma_type;
1302     gboolean reset_context = FALSE;
1303     guint mb_width, mb_height, dpb_size;
1304
1305     dpb_size = get_max_dec_frame_buffering(sps);
1306     if (priv->dpb_size < dpb_size) {
1307         GST_DEBUG("DPB size increased");
1308         reset_context = TRUE;
1309     }
1310
1311     profile = get_profile(decoder, sps, dpb_size);
1312     if (!profile) {
1313         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1314         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1315     }
1316
1317     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1318         GST_DEBUG("profile changed");
1319         reset_context = TRUE;
1320         priv->profile = profile;
1321     }
1322
1323     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1324     if (!chroma_type) {
1325         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1326         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1327     }
1328
1329     if (priv->chroma_type != chroma_type) {
1330         GST_DEBUG("chroma format changed");
1331         reset_context     = TRUE;
1332         priv->chroma_type = chroma_type;
1333     }
1334
1335     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1336     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1337         !sps->frame_mbs_only_flag;
1338     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1339         GST_DEBUG("size changed");
1340         reset_context   = TRUE;
1341         priv->mb_width  = mb_width;
1342         priv->mb_height = mb_height;
1343     }
1344
1345     priv->progressive_sequence = sps->frame_mbs_only_flag;
1346     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1347
1348     gst_vaapi_decoder_set_pixel_aspect_ratio(
1349         base_decoder,
1350         sps->vui_parameters.par_n,
1351         sps->vui_parameters.par_d
1352     );
1353
1354     if (!reset_context && priv->has_context)
1355         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1356
1357     /* XXX: fix surface size when cropping is implemented */
1358     info.profile    = priv->profile;
1359     info.entrypoint = priv->entrypoint;
1360     info.chroma_type = priv->chroma_type;
1361     info.width      = sps->width;
1362     info.height     = sps->height;
1363     info.ref_frames = dpb_size;
1364
1365     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1366         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1367     priv->has_context = TRUE;
1368
1369     /* Reset DPB */
1370     if (!dpb_reset(decoder, dpb_size))
1371         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1372
1373     /* Reset MVC data */
1374     if (!mvc_reset(decoder))
1375         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1376     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1377 }
1378
1379 static void
1380 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1381     const GstH264SPS *sps)
1382 {
1383     guint i;
1384
1385     /* There are always 6 4x4 scaling lists */
1386     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1387     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1388
1389     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1390         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1391             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1392 }
1393
1394 static void
1395 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1396     const GstH264SPS *sps)
1397 {
1398     guint i, n;
1399
1400     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1401     if (!pps->transform_8x8_mode_flag)
1402         return;
1403
1404     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1405     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1406
1407     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1408     for (i = 0; i < n; i++) {
1409         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1410             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1411     }
1412 }
1413
1414 static GstVaapiDecoderStatus
1415 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1416 {
1417     GstVaapiPicture * const base_picture = &picture->base;
1418     GstH264PPS * const pps = get_pps(decoder);
1419     GstH264SPS * const sps = get_sps(decoder);
1420     VAIQMatrixBufferH264 *iq_matrix;
1421
1422     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1423     if (!base_picture->iq_matrix) {
1424         GST_ERROR("failed to allocate IQ matrix");
1425         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1426     }
1427     iq_matrix = base_picture->iq_matrix->param;
1428
1429     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1430        is not large enough to hold lists for 4:4:4 */
1431     if (sps->chroma_format_idc == 3)
1432         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1433
1434     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1435     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1436
1437     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1438 }
1439
1440 static inline gboolean
1441 is_valid_state(guint state, guint ref_state)
1442 {
1443     return (state & ref_state) == ref_state;
1444 }
1445
1446 static GstVaapiDecoderStatus
1447 decode_current_picture(GstVaapiDecoderH264 *decoder)
1448 {
1449     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1450     GstVaapiPictureH264 * const picture = priv->current_picture;
1451
1452     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1453         goto drop_frame;
1454     priv->decoder_state = 0;
1455
1456     if (!picture)
1457         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1458
1459     if (!exec_ref_pic_marking(decoder, picture))
1460         goto error;
1461     if (!dpb_add(decoder, picture))
1462         goto error;
1463     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1464         goto error;
1465     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1466     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1467
1468 error:
1469     /* XXX: fix for cases where first field failed to be decoded */
1470     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1471     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1472
1473 drop_frame:
1474     priv->decoder_state = 0;
1475     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1476 }
1477
1478 static GstVaapiDecoderStatus
1479 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1480 {
1481     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1482     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1483     GstH264SPS * const sps = &pi->data.sps;
1484     GstH264ParserResult result;
1485
1486     GST_DEBUG("parse SPS");
1487
1488     priv->parser_state = 0;
1489
1490     /* Variables that don't have inferred values per the H.264
1491        standard but that should get a default value anyway */
1492     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1493
1494     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1495     if (result != GST_H264_PARSER_OK)
1496         return get_status(result);
1497
1498     /* Reset defaults */
1499     priv->max_views = 1;
1500
1501     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1502     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1503 }
1504
1505 static GstVaapiDecoderStatus
1506 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1507 {
1508     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1509     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1510     GstH264SPS * const sps = &pi->data.sps;
1511     GstH264ParserResult result;
1512
1513     GST_DEBUG("parse subset SPS");
1514
1515     /* Variables that don't have inferred values per the H.264
1516        standard but that should get a default value anyway */
1517     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1518
1519     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1520         TRUE);
1521     if (result != GST_H264_PARSER_OK)
1522         return get_status(result);
1523
1524     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1525     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1526 }
1527
1528 static GstVaapiDecoderStatus
1529 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1530 {
1531     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1532     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1533     GstH264PPS * const pps = &pi->data.pps;
1534     GstH264ParserResult result;
1535
1536     GST_DEBUG("parse PPS");
1537
1538     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1539
1540     /* Variables that don't have inferred values per the H.264
1541        standard but that should get a default value anyway */
1542     pps->slice_group_map_type = 0;
1543     pps->slice_group_change_rate_minus1 = 0;
1544
1545     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1546     if (result != GST_H264_PARSER_OK)
1547         return get_status(result);
1548
1549     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1550     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1551 }
1552
1553 static GstVaapiDecoderStatus
1554 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1555 {
1556     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1557     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1558     GArray ** const sei_ptr = &pi->data.sei;
1559     GstH264ParserResult result;
1560
1561     GST_DEBUG("parse SEI");
1562
1563     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1564     if (result != GST_H264_PARSER_OK) {
1565         GST_WARNING("failed to parse SEI messages");
1566         return get_status(result);
1567     }
1568     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1569 }
1570
1571 static GstVaapiDecoderStatus
1572 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1573 {
1574     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1575     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1576     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1577     GstH264NalUnit * const nalu = &pi->nalu;
1578     GstH264SPS *sps;
1579     GstH264ParserResult result;
1580     guint num_views;
1581
1582     GST_DEBUG("parse slice");
1583
1584     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1585                            GST_H264_VIDEO_STATE_GOT_PPS);
1586
1587     /* Propagate Prefix NAL unit info, if necessary */
1588     switch (nalu->type) {
1589     case GST_H264_NAL_SLICE:
1590     case GST_H264_NAL_SLICE_IDR: {
1591         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1592         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1593             /* MVC sequences shall have a Prefix NAL unit immediately
1594                preceding this NAL unit */
1595             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1596             pi->nalu.extension = prev_pi->nalu.extension;
1597         }
1598         else {
1599             /* In the very unlikely case there is no Prefix NAL unit
1600                immediately preceding this NAL unit, try to infer some
1601                defaults (H.7.4.1.1) */
1602             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1603             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1604             nalu->idr_pic_flag = !mvc->non_idr_flag;
1605             mvc->priority_id = 0;
1606             mvc->view_id = 0;
1607             mvc->temporal_id = 0;
1608             mvc->anchor_pic_flag = 0;
1609             mvc->inter_view_flag = 1;
1610         }
1611         break;
1612     }
1613     }
1614
1615     /* Variables that don't have inferred values per the H.264
1616        standard but that should get a default value anyway */
1617     slice_hdr->cabac_init_idc = 0;
1618     slice_hdr->direct_spatial_mv_pred_flag = 0;
1619
1620     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1621         slice_hdr, TRUE, TRUE);
1622     if (result != GST_H264_PARSER_OK)
1623         return get_status(result);
1624
1625     sps = slice_hdr->pps->sequence;
1626
1627     /* Update MVC data */
1628     num_views = get_num_views(sps);
1629     if (priv->max_views < num_views) {
1630         priv->max_views = num_views;
1631         GST_DEBUG("maximum number of views changed to %u", num_views);
1632     }
1633     pi->view_id = get_view_id(&pi->nalu);
1634     pi->voc = get_view_order_index(sps, pi->view_id);
1635
1636     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1637     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1638 }
1639
1640 static GstVaapiDecoderStatus
1641 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1642 {
1643     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1644     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1645     GstH264SPS * const sps = &pi->data.sps;
1646
1647     GST_DEBUG("decode SPS");
1648
1649     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1650     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1651 }
1652
1653 static GstVaapiDecoderStatus
1654 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1655 {
1656     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1657     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1658     GstH264SPS * const sps = &pi->data.sps;
1659
1660     GST_DEBUG("decode subset SPS");
1661
1662     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1663     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1664 }
1665
1666 static GstVaapiDecoderStatus
1667 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1668 {
1669     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1670     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1671     GstH264PPS * const pps = &pi->data.pps;
1672
1673     GST_DEBUG("decode PPS");
1674
1675     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1676     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1677 }
1678
1679 static GstVaapiDecoderStatus
1680 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1681 {
1682     GstVaapiDecoderStatus status;
1683
1684     GST_DEBUG("decode sequence-end");
1685
1686     status = decode_current_picture(decoder);
1687     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1688         return status;
1689
1690     dpb_flush(decoder, NULL);
1691     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1692 }
1693
1694 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1695 static void
1696 init_picture_poc_0(
1697     GstVaapiDecoderH264 *decoder,
1698     GstVaapiPictureH264 *picture,
1699     GstH264SliceHdr     *slice_hdr
1700 )
1701 {
1702     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1703     GstH264SPS * const sps = get_sps(decoder);
1704     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1705     gint32 temp_poc;
1706
1707     GST_DEBUG("decode picture order count type 0");
1708
1709     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1710         priv->prev_poc_msb = 0;
1711         priv->prev_poc_lsb = 0;
1712     }
1713     else if (priv->prev_pic_has_mmco5) {
1714         priv->prev_poc_msb = 0;
1715         priv->prev_poc_lsb =
1716             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1717              0 : priv->field_poc[TOP_FIELD]);
1718     }
1719     else {
1720         priv->prev_poc_msb = priv->poc_msb;
1721         priv->prev_poc_lsb = priv->poc_lsb;
1722     }
1723
1724     // (8-3)
1725     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1726     if (priv->poc_lsb < priv->prev_poc_lsb &&
1727         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1728         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1729     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1730              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1731         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1732     else
1733         priv->poc_msb = priv->prev_poc_msb;
1734
1735     temp_poc = priv->poc_msb + priv->poc_lsb;
1736     switch (picture->structure) {
1737     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1738         // (8-4, 8-5)
1739         priv->field_poc[TOP_FIELD] = temp_poc;
1740         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1741             slice_hdr->delta_pic_order_cnt_bottom;
1742         break;
1743     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1744         // (8-4)
1745         priv->field_poc[TOP_FIELD] = temp_poc;
1746         break;
1747     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1748         // (8-5)
1749         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1750         break;
1751     }
1752 }
1753
1754 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1755 static void
1756 init_picture_poc_1(
1757     GstVaapiDecoderH264 *decoder,
1758     GstVaapiPictureH264 *picture,
1759     GstH264SliceHdr     *slice_hdr
1760 )
1761 {
1762     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1763     GstH264SPS * const sps = get_sps(decoder);
1764     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1765     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1766     guint i;
1767
1768     GST_DEBUG("decode picture order count type 1");
1769
1770     if (priv->prev_pic_has_mmco5)
1771         prev_frame_num_offset = 0;
1772     else
1773         prev_frame_num_offset = priv->frame_num_offset;
1774
1775     // (8-6)
1776     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1777         priv->frame_num_offset = 0;
1778     else if (priv->prev_frame_num > priv->frame_num)
1779         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1780     else
1781         priv->frame_num_offset = prev_frame_num_offset;
1782
1783     // (8-7)
1784     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1785         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1786     else
1787         abs_frame_num = 0;
1788     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1789         abs_frame_num = abs_frame_num - 1;
1790
1791     if (abs_frame_num > 0) {
1792         gint32 expected_delta_per_poc_cycle;
1793         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1794
1795         expected_delta_per_poc_cycle = 0;
1796         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1797             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1798
1799         // (8-8)
1800         poc_cycle_cnt = (abs_frame_num - 1) /
1801             sps->num_ref_frames_in_pic_order_cnt_cycle;
1802         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1803             sps->num_ref_frames_in_pic_order_cnt_cycle;
1804
1805         // (8-9)
1806         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1807         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1808             expected_poc += sps->offset_for_ref_frame[i];
1809     }
1810     else
1811         expected_poc = 0;
1812     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1813         expected_poc += sps->offset_for_non_ref_pic;
1814
1815     // (8-10)
1816     switch (picture->structure) {
1817     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1818         priv->field_poc[TOP_FIELD] = expected_poc +
1819             slice_hdr->delta_pic_order_cnt[0];
1820         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1821             sps->offset_for_top_to_bottom_field +
1822             slice_hdr->delta_pic_order_cnt[1];
1823         break;
1824     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1825         priv->field_poc[TOP_FIELD] = expected_poc +
1826             slice_hdr->delta_pic_order_cnt[0];
1827         break;
1828     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1829         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1830             sps->offset_for_top_to_bottom_field +
1831             slice_hdr->delta_pic_order_cnt[0];
1832         break;
1833     }
1834 }
1835
1836 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1837 static void
1838 init_picture_poc_2(
1839     GstVaapiDecoderH264 *decoder,
1840     GstVaapiPictureH264 *picture,
1841     GstH264SliceHdr     *slice_hdr
1842 )
1843 {
1844     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1845     GstH264SPS * const sps = get_sps(decoder);
1846     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1847     gint32 prev_frame_num_offset, temp_poc;
1848
1849     GST_DEBUG("decode picture order count type 2");
1850
1851     if (priv->prev_pic_has_mmco5)
1852         prev_frame_num_offset = 0;
1853     else
1854         prev_frame_num_offset = priv->frame_num_offset;
1855
1856     // (8-11)
1857     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1858         priv->frame_num_offset = 0;
1859     else if (priv->prev_frame_num > priv->frame_num)
1860         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1861     else
1862         priv->frame_num_offset = prev_frame_num_offset;
1863
1864     // (8-12)
1865     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1866         temp_poc = 0;
1867     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1868         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1869     else
1870         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1871
1872     // (8-13)
1873     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1874         priv->field_poc[TOP_FIELD] = temp_poc;
1875     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1876         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1877 }
1878
1879 /* 8.2.1 - Decoding process for picture order count */
1880 static void
1881 init_picture_poc(
1882     GstVaapiDecoderH264 *decoder,
1883     GstVaapiPictureH264 *picture,
1884     GstH264SliceHdr     *slice_hdr
1885 )
1886 {
1887     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1888     GstH264SPS * const sps = get_sps(decoder);
1889
1890     switch (sps->pic_order_cnt_type) {
1891     case 0:
1892         init_picture_poc_0(decoder, picture, slice_hdr);
1893         break;
1894     case 1:
1895         init_picture_poc_1(decoder, picture, slice_hdr);
1896         break;
1897     case 2:
1898         init_picture_poc_2(decoder, picture, slice_hdr);
1899         break;
1900     }
1901
1902     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1903         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1904     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1905         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1906     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1907 }
1908
1909 static int
1910 compare_picture_pic_num_dec(const void *a, const void *b)
1911 {
1912     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1913     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1914
1915     return picB->pic_num - picA->pic_num;
1916 }
1917
1918 static int
1919 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1920 {
1921     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1922     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1923
1924     return picA->long_term_pic_num - picB->long_term_pic_num;
1925 }
1926
1927 static int
1928 compare_picture_poc_dec(const void *a, const void *b)
1929 {
1930     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1931     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1932
1933     return picB->base.poc - picA->base.poc;
1934 }
1935
1936 static int
1937 compare_picture_poc_inc(const void *a, const void *b)
1938 {
1939     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1940     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1941
1942     return picA->base.poc - picB->base.poc;
1943 }
1944
1945 static int
1946 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1947 {
1948     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1949     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1950
1951     return picB->frame_num_wrap - picA->frame_num_wrap;
1952 }
1953
1954 static int
1955 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1956 {
1957     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1958     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1959
1960     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1961 }
1962
1963 /* 8.2.4.1 - Decoding process for picture numbers */
1964 static void
1965 init_picture_refs_pic_num(
1966     GstVaapiDecoderH264 *decoder,
1967     GstVaapiPictureH264 *picture,
1968     GstH264SliceHdr     *slice_hdr
1969 )
1970 {
1971     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1972     GstH264SPS * const sps = get_sps(decoder);
1973     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1974     guint i;
1975
1976     GST_DEBUG("decode picture numbers");
1977
1978     for (i = 0; i < priv->short_ref_count; i++) {
1979         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1980
1981         // (H.8.2)
1982         if (pic->base.view_id != picture->base.view_id)
1983             continue;
1984
1985         // (8-27)
1986         if (pic->frame_num > priv->frame_num)
1987             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1988         else
1989             pic->frame_num_wrap = pic->frame_num;
1990
1991         // (8-28, 8-30, 8-31)
1992         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1993             pic->pic_num = pic->frame_num_wrap;
1994         else {
1995             if (pic->structure == picture->structure)
1996                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1997             else
1998                 pic->pic_num = 2 * pic->frame_num_wrap;
1999         }
2000     }
2001
2002     for (i = 0; i < priv->long_ref_count; i++) {
2003         GstVaapiPictureH264 * const pic = priv->long_ref[i];
2004
2005         // (H.8.2)
2006         if (pic->base.view_id != picture->base.view_id)
2007             continue;
2008
2009         // (8-29, 8-32, 8-33)
2010         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2011             pic->long_term_pic_num = pic->long_term_frame_idx;
2012         else {
2013             if (pic->structure == picture->structure)
2014                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2015             else
2016                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2017         }
2018     }
2019 }
2020
2021 #define SORT_REF_LIST(list, n, compare_func) \
2022     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2023
2024 static void
2025 init_picture_refs_fields_1(
2026     guint                picture_structure,
2027     GstVaapiPictureH264 *RefPicList[32],
2028     guint               *RefPicList_count,
2029     GstVaapiPictureH264 *ref_list[32],
2030     guint                ref_list_count
2031 )
2032 {
2033     guint i, j, n;
2034
2035     i = 0;
2036     j = 0;
2037     n = *RefPicList_count;
2038     do {
2039         g_assert(n < 32);
2040         for (; i < ref_list_count; i++) {
2041             if (ref_list[i]->structure == picture_structure) {
2042                 RefPicList[n++] = ref_list[i++];
2043                 break;
2044             }
2045         }
2046         for (; j < ref_list_count; j++) {
2047             if (ref_list[j]->structure != picture_structure) {
2048                 RefPicList[n++] = ref_list[j++];
2049                 break;
2050             }
2051         }
2052     } while (i < ref_list_count || j < ref_list_count);
2053     *RefPicList_count = n;
2054 }
2055
2056 static inline void
2057 init_picture_refs_fields(
2058     GstVaapiPictureH264 *picture,
2059     GstVaapiPictureH264 *RefPicList[32],
2060     guint               *RefPicList_count,
2061     GstVaapiPictureH264 *short_ref[32],
2062     guint                short_ref_count,
2063     GstVaapiPictureH264 *long_ref[32],
2064     guint                long_ref_count
2065 )
2066 {
2067     guint n = 0;
2068
2069     /* 8.2.4.2.5 - reference picture lists in fields */
2070     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2071         short_ref, short_ref_count);
2072     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2073         long_ref, long_ref_count);
2074     *RefPicList_count = n;
2075 }
2076
2077 /* Finds the inter-view reference picture with the supplied view id */
2078 static GstVaapiPictureH264 *
2079 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2080 {
2081     GPtrArray * const inter_views = decoder->priv.inter_views;
2082     guint i;
2083
2084     for (i = 0; i < inter_views->len; i++) {
2085         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2086         if (picture->base.view_id == view_id)
2087             return picture;
2088     }
2089
2090     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2091         view_id);
2092     return NULL;
2093 }
2094
2095 /* Checks whether the view id exists in the supplied list of view ids */
2096 static gboolean
2097 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2098 {
2099     guint i;
2100
2101     for (i = 0; i < num_view_ids; i++) {
2102         if (view_ids[i] == view_id)
2103             return TRUE;
2104     }
2105     return FALSE;
2106 }
2107
2108 static gboolean
2109 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2110     gboolean is_anchor)
2111 {
2112     if (is_anchor)
2113         return (find_view_id(view_id, view->anchor_ref_l0,
2114                     view->num_anchor_refs_l0) ||
2115                 find_view_id(view_id, view->anchor_ref_l1,
2116                     view->num_anchor_refs_l1));
2117
2118     return (find_view_id(view_id, view->non_anchor_ref_l0,
2119                 view->num_non_anchor_refs_l0) ||
2120             find_view_id(view_id, view->non_anchor_ref_l1,
2121                 view->num_non_anchor_refs_l1));
2122 }
2123
2124 /* Checks whether the inter-view reference picture with the supplied
2125    view id is used for decoding the current view component picture */
2126 static gboolean
2127 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2128     guint16 view_id, GstVaapiPictureH264 *picture)
2129 {
2130     const GstH264SPS * const sps = get_sps(decoder);
2131     gboolean is_anchor;
2132
2133     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2134         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2135         return FALSE;
2136
2137     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2138     return find_view_id_in_view(view_id,
2139         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2140 }
2141
2142 /* Checks whether the supplied inter-view reference picture is used
2143    for decoding the next view component pictures */
2144 static gboolean
2145 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2146     GstVaapiPictureH264 *picture)
2147 {
2148     const GstH264SPS * const sps = get_sps(decoder);
2149     gboolean is_anchor;
2150     guint i, num_views;
2151
2152     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2153         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2154         return FALSE;
2155
2156     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2157     num_views = sps->extension.mvc.num_views_minus1 + 1;
2158     for (i = picture->base.voc + 1; i < num_views; i++) {
2159         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2160         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2161             return TRUE;
2162     }
2163     return FALSE;
2164 }
2165
2166 /* H.8.2.1 - Initialization process for inter-view prediction references */
2167 static void
2168 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2169     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2170     const guint16 *view_ids, guint num_view_ids)
2171 {
2172     guint j, n;
2173
2174     n = *ref_list_count_ptr;
2175     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2176         GstVaapiPictureH264 * const pic =
2177             find_inter_view_reference(decoder, view_ids[j]);
2178         if (pic)
2179             ref_list[n++] = pic;
2180     }
2181     *ref_list_count_ptr = n;
2182 }
2183
2184 static inline void
2185 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2186     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2187 {
2188     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2189     const GstH264SPS * const sps = get_sps(decoder);
2190     const GstH264SPSExtMVCView *view;
2191
2192     GST_DEBUG("initialize reference picture list for inter-view prediction");
2193
2194     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2195         return;
2196     view = &sps->extension.mvc.view[picture->base.voc];
2197
2198 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2199         init_picture_refs_mvc_1(decoder,                                \
2200             priv->RefPicList##ref_list,                                 \
2201             &priv->RefPicList##ref_list##_count,                        \
2202             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2203             view->view_list##_l##ref_list,                              \
2204             view->num_##view_list##s_l##ref_list);                      \
2205     } while (0)
2206
2207     if (list == 0) {
2208         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2209             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2210         else
2211             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2212     }
2213     else {
2214         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2215             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2216         else
2217             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2218     }
2219
2220 #undef INVOKE_INIT_PICTURE_REFS_MVC
2221 }
2222
2223 static void
2224 init_picture_refs_p_slice(
2225     GstVaapiDecoderH264 *decoder,
2226     GstVaapiPictureH264 *picture,
2227     GstH264SliceHdr     *slice_hdr
2228 )
2229 {
2230     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2231     GstVaapiPictureH264 **ref_list;
2232     guint i;
2233
2234     GST_DEBUG("decode reference picture list for P and SP slices");
2235
2236     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2237         /* 8.2.4.2.1 - P and SP slices in frames */
2238         if (priv->short_ref_count > 0) {
2239             ref_list = priv->RefPicList0;
2240             for (i = 0; i < priv->short_ref_count; i++)
2241                 ref_list[i] = priv->short_ref[i];
2242             SORT_REF_LIST(ref_list, i, pic_num_dec);
2243             priv->RefPicList0_count += i;
2244         }
2245
2246         if (priv->long_ref_count > 0) {
2247             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2248             for (i = 0; i < priv->long_ref_count; i++)
2249                 ref_list[i] = priv->long_ref[i];
2250             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2251             priv->RefPicList0_count += i;
2252         }
2253     }
2254     else {
2255         /* 8.2.4.2.2 - P and SP slices in fields */
2256         GstVaapiPictureH264 *short_ref[32];
2257         guint short_ref_count = 0;
2258         GstVaapiPictureH264 *long_ref[32];
2259         guint long_ref_count = 0;
2260
2261         if (priv->short_ref_count > 0) {
2262             for (i = 0; i < priv->short_ref_count; i++)
2263                 short_ref[i] = priv->short_ref[i];
2264             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2265             short_ref_count = i;
2266         }
2267
2268         if (priv->long_ref_count > 0) {
2269             for (i = 0; i < priv->long_ref_count; i++)
2270                 long_ref[i] = priv->long_ref[i];
2271             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2272             long_ref_count = i;
2273         }
2274
2275         init_picture_refs_fields(
2276             picture,
2277             priv->RefPicList0, &priv->RefPicList0_count,
2278             short_ref,          short_ref_count,
2279             long_ref,           long_ref_count
2280         );
2281     }
2282
2283     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2284         /* RefPicList0 */
2285         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2286     }
2287 }
2288
2289 static void
2290 init_picture_refs_b_slice(
2291     GstVaapiDecoderH264 *decoder,
2292     GstVaapiPictureH264 *picture,
2293     GstH264SliceHdr     *slice_hdr
2294 )
2295 {
2296     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2297     GstVaapiPictureH264 **ref_list;
2298     guint i, n;
2299
2300     GST_DEBUG("decode reference picture list for B slices");
2301
2302     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2303         /* 8.2.4.2.3 - B slices in frames */
2304
2305         /* RefPicList0 */
2306         if (priv->short_ref_count > 0) {
2307             // 1. Short-term references
2308             ref_list = priv->RefPicList0;
2309             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2310                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2311                     ref_list[n++] = priv->short_ref[i];
2312             }
2313             SORT_REF_LIST(ref_list, n, poc_dec);
2314             priv->RefPicList0_count += n;
2315
2316             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2317             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2318                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2319                     ref_list[n++] = priv->short_ref[i];
2320             }
2321             SORT_REF_LIST(ref_list, n, poc_inc);
2322             priv->RefPicList0_count += n;
2323         }
2324
2325         if (priv->long_ref_count > 0) {
2326             // 2. Long-term references
2327             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2328             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2329                 ref_list[n++] = priv->long_ref[i];
2330             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2331             priv->RefPicList0_count += n;
2332         }
2333
2334         /* RefPicList1 */
2335         if (priv->short_ref_count > 0) {
2336             // 1. Short-term references
2337             ref_list = priv->RefPicList1;
2338             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2339                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2340                     ref_list[n++] = priv->short_ref[i];
2341             }
2342             SORT_REF_LIST(ref_list, n, poc_inc);
2343             priv->RefPicList1_count += n;
2344
2345             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2346             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2347                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2348                     ref_list[n++] = priv->short_ref[i];
2349             }
2350             SORT_REF_LIST(ref_list, n, poc_dec);
2351             priv->RefPicList1_count += n;
2352         }
2353
2354         if (priv->long_ref_count > 0) {
2355             // 2. Long-term references
2356             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2357             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2358                 ref_list[n++] = priv->long_ref[i];
2359             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2360             priv->RefPicList1_count += n;
2361         }
2362     }
2363     else {
2364         /* 8.2.4.2.4 - B slices in fields */
2365         GstVaapiPictureH264 *short_ref0[32];
2366         guint short_ref0_count = 0;
2367         GstVaapiPictureH264 *short_ref1[32];
2368         guint short_ref1_count = 0;
2369         GstVaapiPictureH264 *long_ref[32];
2370         guint long_ref_count = 0;
2371
2372         /* refFrameList0ShortTerm */
2373         if (priv->short_ref_count > 0) {
2374             ref_list = short_ref0;
2375             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2376                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2377                     ref_list[n++] = priv->short_ref[i];
2378             }
2379             SORT_REF_LIST(ref_list, n, poc_dec);
2380             short_ref0_count += n;
2381
2382             ref_list = &short_ref0[short_ref0_count];
2383             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2384                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2385                     ref_list[n++] = priv->short_ref[i];
2386             }
2387             SORT_REF_LIST(ref_list, n, poc_inc);
2388             short_ref0_count += n;
2389         }
2390
2391         /* refFrameList1ShortTerm */
2392         if (priv->short_ref_count > 0) {
2393             ref_list = short_ref1;
2394             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2395                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2396                     ref_list[n++] = priv->short_ref[i];
2397             }
2398             SORT_REF_LIST(ref_list, n, poc_inc);
2399             short_ref1_count += n;
2400
2401             ref_list = &short_ref1[short_ref1_count];
2402             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2403                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2404                     ref_list[n++] = priv->short_ref[i];
2405             }
2406             SORT_REF_LIST(ref_list, n, poc_dec);
2407             short_ref1_count += n;
2408         }
2409
2410         /* refFrameListLongTerm */
2411         if (priv->long_ref_count > 0) {
2412             for (i = 0; i < priv->long_ref_count; i++)
2413                 long_ref[i] = priv->long_ref[i];
2414             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2415             long_ref_count = i;
2416         }
2417
2418         init_picture_refs_fields(
2419             picture,
2420             priv->RefPicList0, &priv->RefPicList0_count,
2421             short_ref0,         short_ref0_count,
2422             long_ref,           long_ref_count
2423         );
2424
2425         init_picture_refs_fields(
2426             picture,
2427             priv->RefPicList1, &priv->RefPicList1_count,
2428             short_ref1,         short_ref1_count,
2429             long_ref,           long_ref_count
2430         );
2431    }
2432
2433     /* Check whether RefPicList1 is identical to RefPicList0, then
2434        swap if necessary */
2435     if (priv->RefPicList1_count > 1 &&
2436         priv->RefPicList1_count == priv->RefPicList0_count &&
2437         memcmp(priv->RefPicList0, priv->RefPicList1,
2438                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2439         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2440         priv->RefPicList1[0] = priv->RefPicList1[1];
2441         priv->RefPicList1[1] = tmp;
2442     }
2443
2444     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2445         /* RefPicList0 */
2446         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2447
2448         /* RefPicList1 */
2449         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2450     }
2451 }
2452
2453 #undef SORT_REF_LIST
2454
2455 static gint
2456 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2457 {
2458     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2459     guint i;
2460
2461     for (i = 0; i < priv->short_ref_count; i++) {
2462         if (priv->short_ref[i]->pic_num == pic_num)
2463             return i;
2464     }
2465     GST_ERROR("found no short-term reference picture with PicNum = %d",
2466               pic_num);
2467     return -1;
2468 }
2469
2470 static gint
2471 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2472 {
2473     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2474     guint i;
2475
2476     for (i = 0; i < priv->long_ref_count; i++) {
2477         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2478             return i;
2479     }
2480     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2481               long_term_pic_num);
2482     return -1;
2483 }
2484
2485 static void
2486 exec_picture_refs_modification_1(
2487     GstVaapiDecoderH264           *decoder,
2488     GstVaapiPictureH264           *picture,
2489     GstH264SliceHdr               *slice_hdr,
2490     guint                          list
2491 )
2492 {
2493     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2494     GstH264SPS * const sps = get_sps(decoder);
2495     GstH264RefPicListModification *ref_pic_list_modification;
2496     guint num_ref_pic_list_modifications;
2497     GstVaapiPictureH264 **ref_list;
2498     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2499     const guint16 *view_ids = NULL;
2500     guint i, j, n, num_refs, num_view_ids = 0;
2501     gint found_ref_idx;
2502     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2503
2504     GST_DEBUG("modification process of reference picture list %u", list);
2505
2506     if (list == 0) {
2507         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2508         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2509         ref_list                       = priv->RefPicList0;
2510         ref_list_count_ptr             = &priv->RefPicList0_count;
2511         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2512
2513         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2514             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2515             const GstH264SPSExtMVCView * const view =
2516                 &sps->extension.mvc.view[picture->base.voc];
2517             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2518                 view_ids = view->anchor_ref_l0;
2519                 num_view_ids = view->num_anchor_refs_l0;
2520             }
2521             else {
2522                 view_ids = view->non_anchor_ref_l0;
2523                 num_view_ids = view->num_non_anchor_refs_l0;
2524             }
2525         }
2526     }
2527     else {
2528         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2529         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2530         ref_list                       = priv->RefPicList1;
2531         ref_list_count_ptr             = &priv->RefPicList1_count;
2532         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2533
2534         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2535             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2536             const GstH264SPSExtMVCView * const view =
2537                 &sps->extension.mvc.view[picture->base.voc];
2538             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2539                 view_ids = view->anchor_ref_l1;
2540                 num_view_ids = view->num_anchor_refs_l1;
2541             }
2542             else {
2543                 view_ids = view->non_anchor_ref_l1;
2544                 num_view_ids = view->num_non_anchor_refs_l1;
2545             }
2546         }
2547     }
2548     ref_list_count = *ref_list_count_ptr;
2549
2550     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2551         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2552         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2553     }
2554     else {
2555         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2556         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2557     }
2558
2559     picNumPred = CurrPicNum;
2560     picViewIdxPred = -1;
2561
2562     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2563         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2564         if (l->modification_of_pic_nums_idc == 3)
2565             break;
2566
2567         /* 8.2.4.3.1 - Short-term reference pictures */
2568         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2569             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2570             gint32 picNum, picNumNoWrap;
2571
2572             // (8-34)
2573             if (l->modification_of_pic_nums_idc == 0) {
2574                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2575                 if (picNumNoWrap < 0)
2576                     picNumNoWrap += MaxPicNum;
2577             }
2578
2579             // (8-35)
2580             else {
2581                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2582                 if (picNumNoWrap >= MaxPicNum)
2583                     picNumNoWrap -= MaxPicNum;
2584             }
2585             picNumPred = picNumNoWrap;
2586
2587             // (8-36)
2588             picNum = picNumNoWrap;
2589             if (picNum > CurrPicNum)
2590                 picNum -= MaxPicNum;
2591
2592             // (8-37)
2593             for (j = num_refs; j > ref_list_idx; j--)
2594                 ref_list[j] = ref_list[j - 1];
2595             found_ref_idx = find_short_term_reference(decoder, picNum);
2596             ref_list[ref_list_idx++] =
2597                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2598             n = ref_list_idx;
2599             for (j = ref_list_idx; j <= num_refs; j++) {
2600                 gint32 PicNumF;
2601                 if (!ref_list[j])
2602                     continue;
2603                 PicNumF =
2604                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2605                     ref_list[j]->pic_num : MaxPicNum;
2606                 if (PicNumF != picNum ||
2607                     ref_list[j]->base.view_id != picture->base.view_id)
2608                     ref_list[n++] = ref_list[j];
2609             }
2610         }
2611
2612         /* 8.2.4.3.2 - Long-term reference pictures */
2613         else if (l->modification_of_pic_nums_idc == 2) {
2614
2615             for (j = num_refs; j > ref_list_idx; j--)
2616                 ref_list[j] = ref_list[j - 1];
2617             found_ref_idx =
2618                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2619             ref_list[ref_list_idx++] =
2620                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2621             n = ref_list_idx;
2622             for (j = ref_list_idx; j <= num_refs; j++) {
2623                 gint32 LongTermPicNumF;
2624                 if (!ref_list[j])
2625                     continue;
2626                 LongTermPicNumF =
2627                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2628                     ref_list[j]->long_term_pic_num : INT_MAX;
2629                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2630                     ref_list[j]->base.view_id != picture->base.view_id)
2631                     ref_list[n++] = ref_list[j];
2632             }
2633         }
2634
2635         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2636         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2637                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2638                  (l->modification_of_pic_nums_idc == 4 ||
2639                   l->modification_of_pic_nums_idc == 5)) {
2640             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2641             gint32 picViewIdx, targetViewId;
2642
2643             // (H-6)
2644             if (l->modification_of_pic_nums_idc == 4) {
2645                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2646                 if (picViewIdx < 0)
2647                     picViewIdx += num_view_ids;
2648             }
2649
2650             // (H-7)
2651             else {
2652                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2653                 if (picViewIdx >= num_view_ids)
2654                     picViewIdx -= num_view_ids;
2655             }
2656             picViewIdxPred = picViewIdx;
2657
2658             // (H-8, H-9)
2659             targetViewId = view_ids[picViewIdx];
2660
2661             // (H-10)
2662             for (j = num_refs; j > ref_list_idx; j--)
2663                 ref_list[j] = ref_list[j - 1];
2664             ref_list[ref_list_idx++] =
2665                 find_inter_view_reference(decoder, targetViewId);
2666             n = ref_list_idx;
2667             for (j = ref_list_idx; j <= num_refs; j++) {
2668                 if (!ref_list[j])
2669                     continue;
2670                 if (ref_list[j]->base.view_id != targetViewId ||
2671                     ref_list[j]->base.poc != picture->base.poc)
2672                     ref_list[n++] = ref_list[j];
2673             }
2674         }
2675     }
2676
2677 #if DEBUG
2678     for (i = 0; i < num_refs; i++)
2679         if (!ref_list[i])
2680             GST_ERROR("list %u entry %u is empty", list, i);
2681 #endif
2682     *ref_list_count_ptr = num_refs;
2683 }
2684
2685 /* 8.2.4.3 - Modification process for reference picture lists */
2686 static void
2687 exec_picture_refs_modification(
2688     GstVaapiDecoderH264 *decoder,
2689     GstVaapiPictureH264 *picture,
2690     GstH264SliceHdr     *slice_hdr
2691 )
2692 {
2693     GST_DEBUG("execute ref_pic_list_modification()");
2694
2695     /* RefPicList0 */
2696     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2697         slice_hdr->ref_pic_list_modification_flag_l0)
2698         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2699
2700     /* RefPicList1 */
2701     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2702         slice_hdr->ref_pic_list_modification_flag_l1)
2703         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2704 }
2705
2706 static void
2707 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2708     GstVaapiPictureH264 *picture)
2709 {
2710     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2711     guint i, j, short_ref_count, long_ref_count;
2712
2713     short_ref_count = 0;
2714     long_ref_count  = 0;
2715     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2716         for (i = 0; i < priv->dpb_count; i++) {
2717             GstVaapiFrameStore * const fs = priv->dpb[i];
2718             GstVaapiPictureH264 *pic;
2719             if (!gst_vaapi_frame_store_has_frame(fs))
2720                 continue;
2721             pic = fs->buffers[0];
2722             if (pic->base.view_id != picture->base.view_id)
2723                 continue;
2724             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2725                 priv->short_ref[short_ref_count++] = pic;
2726             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2727                 priv->long_ref[long_ref_count++] = pic;
2728             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2729             pic->other_field = fs->buffers[1];
2730         }
2731     }
2732     else {
2733         for (i = 0; i < priv->dpb_count; i++) {
2734             GstVaapiFrameStore * const fs = priv->dpb[i];
2735             for (j = 0; j < fs->num_buffers; j++) {
2736                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2737                 if (pic->base.view_id != picture->base.view_id)
2738                     continue;
2739                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2740                     priv->short_ref[short_ref_count++] = pic;
2741                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2742                     priv->long_ref[long_ref_count++] = pic;
2743                 pic->structure = pic->base.structure;
2744                 pic->other_field = fs->buffers[j ^ 1];
2745             }
2746         }
2747     }
2748
2749     for (i = short_ref_count; i < priv->short_ref_count; i++)
2750         priv->short_ref[i] = NULL;
2751     priv->short_ref_count = short_ref_count;
2752
2753     for (i = long_ref_count; i < priv->long_ref_count; i++)
2754         priv->long_ref[i] = NULL;
2755     priv->long_ref_count = long_ref_count;
2756 }
2757
2758 static void
2759 init_picture_refs(
2760     GstVaapiDecoderH264 *decoder,
2761     GstVaapiPictureH264 *picture,
2762     GstH264SliceHdr     *slice_hdr
2763 )
2764 {
2765     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2766     guint i, num_refs;
2767
2768     init_picture_ref_lists(decoder, picture);
2769     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2770
2771     priv->RefPicList0_count = 0;
2772     priv->RefPicList1_count = 0;
2773
2774     switch (slice_hdr->type % 5) {
2775     case GST_H264_P_SLICE:
2776     case GST_H264_SP_SLICE:
2777         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2778         break;
2779     case GST_H264_B_SLICE:
2780         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2781         break;
2782     default:
2783         break;
2784     }
2785
2786     exec_picture_refs_modification(decoder, picture, slice_hdr);
2787
2788     switch (slice_hdr->type % 5) {
2789     case GST_H264_B_SLICE:
2790         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2791         for (i = priv->RefPicList1_count; i < num_refs; i++)
2792             priv->RefPicList1[i] = NULL;
2793         priv->RefPicList1_count = num_refs;
2794
2795         // fall-through
2796     case GST_H264_P_SLICE:
2797     case GST_H264_SP_SLICE:
2798         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2799         for (i = priv->RefPicList0_count; i < num_refs; i++)
2800             priv->RefPicList0[i] = NULL;
2801         priv->RefPicList0_count = num_refs;
2802         break;
2803     default:
2804         break;
2805     }
2806 }
2807
2808 static gboolean
2809 init_picture(
2810     GstVaapiDecoderH264 *decoder,
2811     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2812 {
2813     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2814     GstVaapiPicture * const base_picture = &picture->base;
2815     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2816
2817     priv->prev_frame_num        = priv->frame_num;
2818     priv->frame_num             = slice_hdr->frame_num;
2819     picture->frame_num          = priv->frame_num;
2820     picture->frame_num_wrap     = priv->frame_num;
2821     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2822     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2823     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2824     base_picture->view_id       = pi->view_id;
2825     base_picture->voc           = pi->voc;
2826
2827     /* Initialize extensions */
2828     switch (pi->nalu.extension_type) {
2829     case GST_H264_NAL_EXTENSION_MVC: {
2830         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2831
2832         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2833         if (mvc->inter_view_flag)
2834             GST_VAAPI_PICTURE_FLAG_SET(picture,
2835                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2836         if (mvc->anchor_pic_flag)
2837             GST_VAAPI_PICTURE_FLAG_SET(picture,
2838                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2839         break;
2840     }
2841     }
2842
2843     /* Reset decoder state for IDR pictures */
2844     if (pi->nalu.idr_pic_flag) {
2845         GST_DEBUG("<IDR>");
2846         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2847         dpb_flush(decoder, picture);
2848     }
2849
2850     /* Initialize picture structure */
2851     if (!slice_hdr->field_pic_flag)
2852         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2853     else {
2854         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2855         if (!slice_hdr->bottom_field_flag)
2856             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2857         else
2858             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2859     }
2860     picture->structure = base_picture->structure;
2861
2862     /* Initialize reference flags */
2863     if (pi->nalu.ref_idc) {
2864         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2865             &slice_hdr->dec_ref_pic_marking;
2866
2867         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2868             dec_ref_pic_marking->long_term_reference_flag)
2869             GST_VAAPI_PICTURE_FLAG_SET(picture,
2870                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2871         else
2872             GST_VAAPI_PICTURE_FLAG_SET(picture,
2873                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2874     }
2875
2876     init_picture_poc(decoder, picture, slice_hdr);
2877     return TRUE;
2878 }
2879
2880 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2881 static gboolean
2882 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2883 {
2884     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2885     GstH264SPS * const sps = get_sps(decoder);
2886     GstVaapiPictureH264 *ref_picture;
2887     guint i, m, max_num_ref_frames;
2888
2889     GST_DEBUG("reference picture marking process (sliding window)");
2890
2891     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2892         return TRUE;
2893
2894     max_num_ref_frames = sps->num_ref_frames;
2895     if (max_num_ref_frames == 0)
2896         max_num_ref_frames = 1;
2897     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2898         max_num_ref_frames <<= 1;
2899
2900     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2901         return TRUE;
2902     if (priv->short_ref_count < 1)
2903         return FALSE;
2904
2905     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2906         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2907         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2908             m = i;
2909     }
2910
2911     ref_picture = priv->short_ref[m];
2912     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2913     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2914
2915     /* Both fields need to be marked as "unused for reference", so
2916        remove the other field from the short_ref[] list as well */
2917     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2918         for (i = 0; i < priv->short_ref_count; i++) {
2919             if (priv->short_ref[i] == ref_picture->other_field) {
2920                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2921                 break;
2922             }
2923         }
2924     }
2925     return TRUE;
2926 }
2927
2928 static inline gint32
2929 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2930 {
2931     gint32 pic_num;
2932
2933     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2934         pic_num = picture->frame_num_wrap;
2935     else
2936         pic_num = 2 * picture->frame_num_wrap + 1;
2937     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2938     return pic_num;
2939 }
2940
2941 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2942 static void
2943 exec_ref_pic_marking_adaptive_mmco_1(
2944     GstVaapiDecoderH264  *decoder,
2945     GstVaapiPictureH264  *picture,
2946     GstH264RefPicMarking *ref_pic_marking
2947 )
2948 {
2949     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2950     gint32 i, picNumX;
2951
2952     picNumX = get_picNumX(picture, ref_pic_marking);
2953     i = find_short_term_reference(decoder, picNumX);
2954     if (i < 0)
2955         return;
2956
2957     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2958         GST_VAAPI_PICTURE_IS_FRAME(picture));
2959     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2960 }
2961
2962 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2963 static void
2964 exec_ref_pic_marking_adaptive_mmco_2(
2965     GstVaapiDecoderH264  *decoder,
2966     GstVaapiPictureH264  *picture,
2967     GstH264RefPicMarking *ref_pic_marking
2968 )
2969 {
2970     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2971     gint32 i;
2972
2973     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2974     if (i < 0)
2975         return;
2976
2977     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2978         GST_VAAPI_PICTURE_IS_FRAME(picture));
2979     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2980 }
2981
2982 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2983 static void
2984 exec_ref_pic_marking_adaptive_mmco_3(
2985     GstVaapiDecoderH264  *decoder,
2986     GstVaapiPictureH264  *picture,
2987     GstH264RefPicMarking *ref_pic_marking
2988 )
2989 {
2990     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2991     GstVaapiPictureH264 *ref_picture, *other_field;
2992     gint32 i, picNumX;
2993
2994     for (i = 0; i < priv->long_ref_count; i++) {
2995         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2996             break;
2997     }
2998     if (i != priv->long_ref_count) {
2999         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3000         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3001     }
3002
3003     picNumX = get_picNumX(picture, ref_pic_marking);
3004     i = find_short_term_reference(decoder, picNumX);
3005     if (i < 0)
3006         return;
3007
3008     ref_picture = priv->short_ref[i];
3009     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3010     priv->long_ref[priv->long_ref_count++] = ref_picture;
3011
3012     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3013     gst_vaapi_picture_h264_set_reference(ref_picture,
3014         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3015         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3016
3017     /* Assign LongTermFrameIdx to the other field if it was also
3018        marked as "used for long-term reference */
3019     other_field = ref_picture->other_field;
3020     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3021         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3022 }
3023
3024 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3025  * as "unused for reference" */
3026 static void
3027 exec_ref_pic_marking_adaptive_mmco_4(
3028     GstVaapiDecoderH264  *decoder,
3029     GstVaapiPictureH264  *picture,
3030     GstH264RefPicMarking *ref_pic_marking
3031 )
3032 {
3033     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3034     gint32 i, long_term_frame_idx;
3035
3036     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3037
3038     for (i = 0; i < priv->long_ref_count; i++) {
3039         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3040             continue;
3041         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3042         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3043         i--;
3044     }
3045 }
3046
3047 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3048 static void
3049 exec_ref_pic_marking_adaptive_mmco_5(
3050     GstVaapiDecoderH264  *decoder,
3051     GstVaapiPictureH264  *picture,
3052     GstH264RefPicMarking *ref_pic_marking
3053 )
3054 {
3055     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3056
3057     dpb_flush(decoder, picture);
3058
3059     priv->prev_pic_has_mmco5 = TRUE;
3060
3061     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3062     priv->frame_num = 0;
3063     priv->frame_num_offset = 0;
3064     picture->frame_num = 0;
3065
3066     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3067     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3068         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3069     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3070         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3071     picture->base.poc = 0;
3072 }
3073
3074 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3075 static void
3076 exec_ref_pic_marking_adaptive_mmco_6(
3077     GstVaapiDecoderH264  *decoder,
3078     GstVaapiPictureH264  *picture,
3079     GstH264RefPicMarking *ref_pic_marking
3080 )
3081 {
3082     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3083     GstVaapiPictureH264 *other_field;
3084     guint i;
3085
3086     for (i = 0; i < priv->long_ref_count; i++) {
3087         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3088             break;
3089     }
3090     if (i != priv->long_ref_count) {
3091         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3092         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3093     }
3094
3095     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3096     gst_vaapi_picture_h264_set_reference(picture,
3097         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3098         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3099
3100     /* Assign LongTermFrameIdx to the other field if it was also
3101        marked as "used for long-term reference */
3102     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3103     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3104         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3105 }
3106
3107 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3108 static gboolean
3109 exec_ref_pic_marking_adaptive(
3110     GstVaapiDecoderH264     *decoder,
3111     GstVaapiPictureH264     *picture,
3112     GstH264DecRefPicMarking *dec_ref_pic_marking
3113 )
3114 {
3115     guint i;
3116
3117     GST_DEBUG("reference picture marking process (adaptive memory control)");
3118
3119     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3120         GstVaapiDecoderH264  *decoder,
3121         GstVaapiPictureH264  *picture,
3122         GstH264RefPicMarking *ref_pic_marking
3123     );
3124
3125     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3126         NULL,
3127         exec_ref_pic_marking_adaptive_mmco_1,
3128         exec_ref_pic_marking_adaptive_mmco_2,
3129         exec_ref_pic_marking_adaptive_mmco_3,
3130         exec_ref_pic_marking_adaptive_mmco_4,
3131         exec_ref_pic_marking_adaptive_mmco_5,
3132         exec_ref_pic_marking_adaptive_mmco_6,
3133     };
3134
3135     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3136         GstH264RefPicMarking * const ref_pic_marking =
3137             &dec_ref_pic_marking->ref_pic_marking[i];
3138
3139         const guint mmco = ref_pic_marking->memory_management_control_operation;
3140         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3141             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3142         else {
3143             GST_ERROR("unhandled MMCO %u", mmco);
3144             return FALSE;
3145         }
3146     }
3147     return TRUE;
3148 }
3149
3150 /* 8.2.5 - Execute reference picture marking process */
3151 static gboolean
3152 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3153 {
3154     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3155
3156     priv->prev_pic_has_mmco5 = FALSE;
3157     priv->prev_pic_structure = picture->structure;
3158
3159     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3160         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3161
3162     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3163         return TRUE;
3164
3165     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3166         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3167             &picture->last_slice_hdr->dec_ref_pic_marking;
3168         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3169             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3170                 return FALSE;
3171         }
3172         else {
3173             if (!exec_ref_pic_marking_sliding_window(decoder))
3174                 return FALSE;
3175         }
3176     }
3177     return TRUE;
3178 }
3179
3180 static void
3181 vaapi_init_picture(VAPictureH264 *pic)
3182 {
3183     pic->picture_id           = VA_INVALID_ID;
3184     pic->frame_idx            = 0;
3185     pic->flags                = VA_PICTURE_H264_INVALID;
3186     pic->TopFieldOrderCnt     = 0;
3187     pic->BottomFieldOrderCnt  = 0;
3188 }
3189
3190 static void
3191 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3192     guint picture_structure)
3193 {
3194     if (!picture_structure)
3195         picture_structure = picture->structure;
3196
3197     pic->picture_id = picture->base.surface_id;
3198     pic->flags = 0;
3199
3200     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3201         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3202         pic->frame_idx = picture->long_term_frame_idx;
3203     }
3204     else {
3205         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3206             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3207         pic->frame_idx = picture->frame_num;
3208     }
3209
3210     switch (picture_structure) {
3211     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3212         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3213         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3214         break;
3215     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3216         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3217         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3218         pic->BottomFieldOrderCnt = 0;
3219         break;
3220     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3221         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3222         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3223         pic->TopFieldOrderCnt = 0;
3224         break;
3225     }
3226 }
3227
3228 static void
3229 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3230     GstVaapiPictureH264 *picture)
3231 {
3232     vaapi_fill_picture(pic, picture, 0);
3233
3234     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3235     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3236         /* The inter-view reference components and inter-view only
3237            reference components that are included in the reference
3238            picture lists are considered as not being marked as "used for
3239            short-term reference" or "used for long-term reference" */
3240         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3241                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3242     }
3243 }
3244
3245 static gboolean
3246 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3247 {
3248     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3249     GstVaapiPicture * const base_picture = &picture->base;
3250     GstH264PPS * const pps = get_pps(decoder);
3251     GstH264SPS * const sps = get_sps(decoder);
3252     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3253     guint i, n;
3254
3255     /* Fill in VAPictureParameterBufferH264 */
3256     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3257
3258     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3259         GstVaapiFrameStore * const fs = priv->dpb[i];
3260         if ((gst_vaapi_frame_store_has_reference(fs) &&
3261              fs->view_id == picture->base.view_id) ||
3262             (gst_vaapi_frame_store_has_inter_view(fs) &&
3263              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3264             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3265                 fs->buffers[0], fs->structure);
3266         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3267             break;
3268     }
3269     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3270         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3271
3272 #define COPY_FIELD(s, f) \
3273     pic_param->f = (s)->f
3274
3275 #define COPY_BFM(a, s, f) \
3276     pic_param->a.bits.f = (s)->f
3277
3278     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3279     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3280     pic_param->frame_num                    = priv->frame_num;
3281
3282     COPY_FIELD(sps, bit_depth_luma_minus8);
3283     COPY_FIELD(sps, bit_depth_chroma_minus8);
3284     COPY_FIELD(sps, num_ref_frames);
3285     COPY_FIELD(pps, num_slice_groups_minus1);
3286     COPY_FIELD(pps, slice_group_map_type);
3287     COPY_FIELD(pps, slice_group_change_rate_minus1);
3288     COPY_FIELD(pps, pic_init_qp_minus26);
3289     COPY_FIELD(pps, pic_init_qs_minus26);
3290     COPY_FIELD(pps, chroma_qp_index_offset);
3291     COPY_FIELD(pps, second_chroma_qp_index_offset);
3292
3293     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3294     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3295     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3296
3297     COPY_BFM(seq_fields, sps, chroma_format_idc);
3298     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3299     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3300     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3301     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3302     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3303     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3304     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3305     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3306
3307     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3308     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3309     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3310
3311     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3312     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3313     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3314     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3315     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3316     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3317     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3318     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3319     return TRUE;
3320 }
3321
3322 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3323 static gboolean
3324 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3325 {
3326     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3327     GstH264PPS * const pps = slice_hdr->pps;
3328     GstH264SPS * const sps = pps->sequence;
3329     GstH264SliceHdr *prev_slice_hdr;
3330
3331     if (!prev_pi)
3332         return TRUE;
3333     prev_slice_hdr = &prev_pi->data.slice_hdr;
3334
3335 #define CHECK_EXPR(expr, field_name) do {              \
3336         if (!(expr)) {                                 \
3337             GST_DEBUG(field_name " differs in value"); \
3338             return TRUE;                               \
3339         }                                              \
3340     } while (0)
3341
3342 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3343     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3344
3345     /* view_id differs in value and VOIdx of current slice_hdr is less
3346        than the VOIdx of the prev_slice_hdr */
3347     CHECK_VALUE(pi, prev_pi, view_id);
3348
3349     /* frame_num differs in value, regardless of inferred values to 0 */
3350     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3351
3352     /* pic_parameter_set_id differs in value */
3353     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3354
3355     /* field_pic_flag differs in value */
3356     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3357
3358     /* bottom_field_flag is present in both and differs in value */
3359     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3360         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3361
3362     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3363     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3364                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3365
3366     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3367        value or delta_pic_order_cnt_bottom differs in value */
3368     if (sps->pic_order_cnt_type == 0) {
3369         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3370         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3371             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3372     }
3373
3374     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3375        differs in value or delta_pic_order_cnt[1] differs in value */
3376     else if (sps->pic_order_cnt_type == 1) {
3377         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3378         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3379     }
3380
3381     /* IdrPicFlag differs in value */
3382     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3383
3384     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3385     if (pi->nalu.idr_pic_flag)
3386         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3387
3388 #undef CHECK_EXPR
3389 #undef CHECK_VALUE
3390     return FALSE;
3391 }
3392
3393 /* Detection of a new access unit, assuming we are already in presence
3394    of a new picture */
3395 static inline gboolean
3396 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3397 {
3398     if (!prev_pi || prev_pi->view_id == pi->view_id)
3399         return TRUE;
3400     return pi->voc < prev_pi->voc;
3401 }
3402
3403 /* Finds the first field picture corresponding to the supplied picture */
3404 static GstVaapiPictureH264 *
3405 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3406 {
3407     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3408     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3409     GstVaapiFrameStore *fs;
3410
3411     if (!slice_hdr->field_pic_flag)
3412         return NULL;
3413
3414     fs = priv->prev_frames[pi->voc];
3415     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3416         return NULL;
3417
3418     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3419         return fs->buffers[0];
3420     return NULL;
3421 }
3422
3423 static GstVaapiDecoderStatus
3424 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3425 {
3426     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3427     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3428     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3429     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3430     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3431     GstVaapiPictureH264 *picture, *first_field;
3432     GstVaapiDecoderStatus status;
3433
3434     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3435     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3436
3437     /* Only decode base stream for MVC */
3438     switch (sps->profile_idc) {
3439     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3440     case GST_H264_PROFILE_STEREO_HIGH:
3441         if (0) {
3442             GST_DEBUG("drop picture from substream");
3443             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3444         }
3445         break;
3446     }
3447
3448     status = ensure_context(decoder, sps);
3449     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3450         return status;
3451
3452     priv->decoder_state = 0;
3453
3454     first_field = find_first_field(decoder, pi);
3455     if (first_field) {
3456         /* Re-use current picture where the first field was decoded */
3457         picture = gst_vaapi_picture_h264_new_field(first_field);
3458         if (!picture) {
3459             GST_ERROR("failed to allocate field picture");
3460             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3461         }
3462     }
3463     else {
3464         /* Create new picture */
3465         picture = gst_vaapi_picture_h264_new(decoder);
3466         if (!picture) {
3467             GST_ERROR("failed to allocate picture");
3468             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3469         }
3470     }
3471     gst_vaapi_picture_replace(&priv->current_picture, picture);
3472     gst_vaapi_picture_unref(picture);
3473
3474     /* Clear inter-view references list if this is the primary coded
3475        picture of the current access unit */
3476     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3477         g_ptr_array_set_size(priv->inter_views, 0);
3478
3479     /* Update cropping rectangle */
3480     if (sps->frame_cropping_flag) {
3481         GstVaapiRectangle crop_rect;
3482         crop_rect.x = sps->crop_rect_x;
3483         crop_rect.y = sps->crop_rect_y;
3484         crop_rect.width = sps->crop_rect_width;
3485         crop_rect.height = sps->crop_rect_height;
3486         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3487     }
3488
3489     status = ensure_quant_matrix(decoder, picture);
3490     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3491         GST_ERROR("failed to reset quantizer matrix");
3492         return status;
3493     }
3494
3495     if (!init_picture(decoder, picture, pi))
3496         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3497     if (!fill_picture(decoder, picture))
3498         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3499
3500     priv->decoder_state = pi->state;
3501     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3502 }
3503
3504 static inline guint
3505 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3506 {
3507     guint epb_count;
3508
3509     epb_count = slice_hdr->n_emulation_prevention_bytes;
3510     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3511 }
3512
3513 static gboolean
3514 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3515     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3516 {
3517     VASliceParameterBufferH264 * const slice_param = slice->param;
3518     GstH264PPS * const pps = get_pps(decoder);
3519     GstH264SPS * const sps = get_sps(decoder);
3520     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3521     guint num_weight_tables = 0;
3522     gint i, j;
3523
3524     if (pps->weighted_pred_flag &&
3525         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3526         num_weight_tables = 1;
3527     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3528         num_weight_tables = 2;
3529     else
3530         num_weight_tables = 0;
3531
3532     slice_param->luma_log2_weight_denom   = 0;
3533     slice_param->chroma_log2_weight_denom = 0;
3534     slice_param->luma_weight_l0_flag      = 0;
3535     slice_param->chroma_weight_l0_flag    = 0;
3536     slice_param->luma_weight_l1_flag      = 0;
3537     slice_param->chroma_weight_l1_flag    = 0;
3538
3539     if (num_weight_tables < 1)
3540         return TRUE;
3541
3542     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3543     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3544
3545     slice_param->luma_weight_l0_flag = 1;
3546     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3547         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3548         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3549     }
3550
3551     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3552     if (slice_param->chroma_weight_l0_flag) {
3553         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3554             for (j = 0; j < 2; j++) {
3555                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3556                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3557             }
3558         }
3559     }
3560
3561     if (num_weight_tables < 2)
3562         return TRUE;
3563
3564     slice_param->luma_weight_l1_flag = 1;
3565     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3566         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3567         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3568     }
3569
3570     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3571     if (slice_param->chroma_weight_l1_flag) {
3572         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3573             for (j = 0; j < 2; j++) {
3574                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3575                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3576             }
3577         }
3578     }
3579     return TRUE;
3580 }
3581
3582 static gboolean
3583 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3584     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3585 {
3586     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3587     VASliceParameterBufferH264 * const slice_param = slice->param;
3588     guint i, num_ref_lists = 0;
3589
3590     slice_param->num_ref_idx_l0_active_minus1 = 0;
3591     slice_param->num_ref_idx_l1_active_minus1 = 0;
3592
3593     if (GST_H264_IS_B_SLICE(slice_hdr))
3594         num_ref_lists = 2;
3595     else if (GST_H264_IS_I_SLICE(slice_hdr))
3596         num_ref_lists = 0;
3597     else
3598         num_ref_lists = 1;
3599
3600     if (num_ref_lists < 1)
3601         return TRUE;
3602
3603     slice_param->num_ref_idx_l0_active_minus1 =
3604         slice_hdr->num_ref_idx_l0_active_minus1;
3605
3606     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3607         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3608             priv->RefPicList0[i]);
3609     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3610         vaapi_init_picture(&slice_param->RefPicList0[i]);
3611
3612     if (num_ref_lists < 2)
3613         return TRUE;
3614
3615     slice_param->num_ref_idx_l1_active_minus1 =
3616         slice_hdr->num_ref_idx_l1_active_minus1;
3617
3618     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3619         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3620             priv->RefPicList1[i]);
3621     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3622         vaapi_init_picture(&slice_param->RefPicList1[i]);
3623     return TRUE;
3624 }
3625
3626 static gboolean
3627 fill_slice(GstVaapiDecoderH264 *decoder,
3628     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3629 {
3630     VASliceParameterBufferH264 * const slice_param = slice->param;
3631     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3632
3633     /* Fill in VASliceParameterBufferH264 */
3634     slice_param->slice_data_bit_offset =
3635         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3636     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3637     slice_param->slice_type                     = slice_hdr->type % 5;
3638     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3639     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3640     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3641     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3642     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3643     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3644
3645     if (!fill_RefPicList(decoder, slice, slice_hdr))
3646         return FALSE;
3647     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3648         return FALSE;
3649     return TRUE;
3650 }
3651
3652 static GstVaapiDecoderStatus
3653 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3654 {
3655     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3656     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3657     GstVaapiPictureH264 * const picture = priv->current_picture;
3658     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3659     GstVaapiSlice *slice;
3660     GstBuffer * const buffer =
3661         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3662     GstMapInfo map_info;
3663
3664     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3665
3666     if (!is_valid_state(pi->state,
3667             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3668         GST_WARNING("failed to receive enough headers to decode slice");
3669         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3670     }
3671
3672     if (!ensure_pps(decoder, slice_hdr->pps)) {
3673         GST_ERROR("failed to activate PPS");
3674         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3675     }
3676
3677     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3678         GST_ERROR("failed to activate SPS");
3679         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3680     }
3681
3682     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3683         GST_ERROR("failed to map buffer");
3684         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3685     }
3686
3687     /* Check wether this is the first/last slice in the current access unit */
3688     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3689         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3690     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3691         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3692
3693     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3694         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3695     gst_buffer_unmap(buffer, &map_info);
3696     if (!slice) {
3697         GST_ERROR("failed to allocate slice");
3698         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3699     }
3700
3701     init_picture_refs(decoder, picture, slice_hdr);
3702     if (!fill_slice(decoder, slice, pi)) {
3703         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3704         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3705     }
3706
3707     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3708     picture->last_slice_hdr = slice_hdr;
3709     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3710     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3711 }
3712
3713 static inline gint
3714 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3715 {
3716     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3717                                                      0xffffff00, 0x00000100,
3718                                                      ofs, size,
3719                                                      scp);
3720 }
3721
3722 static GstVaapiDecoderStatus
3723 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3724 {
3725     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3726     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3727     GstVaapiDecoderStatus status;
3728
3729     priv->decoder_state |= pi->state;
3730     switch (pi->nalu.type) {
3731     case GST_H264_NAL_SPS:
3732         status = decode_sps(decoder, unit);
3733         break;
3734     case GST_H264_NAL_SUBSET_SPS:
3735         status = decode_subset_sps(decoder, unit);
3736         break;
3737     case GST_H264_NAL_PPS:
3738         status = decode_pps(decoder, unit);
3739         break;
3740     case GST_H264_NAL_SLICE_EXT:
3741     case GST_H264_NAL_SLICE_IDR:
3742         /* fall-through. IDR specifics are handled in init_picture() */
3743     case GST_H264_NAL_SLICE:
3744         status = decode_slice(decoder, unit);
3745         break;
3746     case GST_H264_NAL_SEQ_END:
3747     case GST_H264_NAL_STREAM_END:
3748         status = decode_sequence_end(decoder);
3749         break;
3750     case GST_H264_NAL_SEI:
3751         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3752         break;
3753     default:
3754         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3755         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3756         break;
3757     }
3758     return status;
3759 }
3760
3761 static GstVaapiDecoderStatus
3762 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3763     const guchar *buf, guint buf_size)
3764 {
3765     GstVaapiDecoderH264 * const decoder =
3766         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3767     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3768     GstVaapiDecoderStatus status;
3769     GstVaapiDecoderUnit unit;
3770     GstVaapiParserInfoH264 *pi = NULL;
3771     GstH264ParserResult result;
3772     guint i, ofs, num_sps, num_pps;
3773
3774     unit.parsed_info = NULL;
3775
3776     if (buf_size < 8)
3777         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3778
3779     if (buf[0] != 1) {
3780         GST_ERROR("failed to decode codec-data, not in avcC format");
3781         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3782     }
3783
3784     priv->nal_length_size = (buf[4] & 0x03) + 1;
3785
3786     num_sps = buf[5] & 0x1f;
3787     ofs = 6;
3788
3789     for (i = 0; i < num_sps; i++) {
3790         pi = gst_vaapi_parser_info_h264_new();
3791         if (!pi)
3792             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3793         unit.parsed_info = pi;
3794
3795         result = gst_h264_parser_identify_nalu_avc(
3796             priv->parser,
3797             buf, ofs, buf_size, 2,
3798             &pi->nalu
3799         );
3800         if (result != GST_H264_PARSER_OK) {
3801             status = get_status(result);
3802             goto cleanup;
3803         }
3804
3805         status = parse_sps(decoder, &unit);
3806         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3807             goto cleanup;
3808         ofs = pi->nalu.offset + pi->nalu.size;
3809
3810         status = decode_sps(decoder, &unit);
3811         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3812             goto cleanup;
3813         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3814     }
3815
3816     num_pps = buf[ofs];
3817     ofs++;
3818
3819     for (i = 0; i < num_pps; i++) {
3820         pi = gst_vaapi_parser_info_h264_new();
3821         if (!pi)
3822             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3823         unit.parsed_info = pi;
3824
3825         result = gst_h264_parser_identify_nalu_avc(
3826             priv->parser,
3827             buf, ofs, buf_size, 2,
3828             &pi->nalu
3829         );
3830         if (result != GST_H264_PARSER_OK) {
3831             status = get_status(result);
3832             goto cleanup;
3833         }
3834
3835         status = parse_pps(decoder, &unit);
3836         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3837             goto cleanup;
3838         ofs = pi->nalu.offset + pi->nalu.size;
3839
3840         status = decode_pps(decoder, &unit);
3841         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3842             goto cleanup;
3843         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3844     }
3845
3846     priv->is_avcC = TRUE;
3847     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3848
3849 cleanup:
3850     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3851     return status;
3852 }
3853
3854 static GstVaapiDecoderStatus
3855 ensure_decoder(GstVaapiDecoderH264 *decoder)
3856 {
3857     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3858     GstVaapiDecoderStatus status;
3859
3860     if (!priv->is_opened) {
3861         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3862         if (!priv->is_opened)
3863             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3864
3865         status = gst_vaapi_decoder_decode_codec_data(
3866             GST_VAAPI_DECODER_CAST(decoder));
3867         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3868             return status;
3869     }
3870     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3871 }
3872
3873 static GstVaapiDecoderStatus
3874 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3875     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3876 {
3877     GstVaapiDecoderH264 * const decoder =
3878         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3879     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3880     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3881     GstVaapiParserInfoH264 *pi;
3882     GstVaapiDecoderStatus status;
3883     GstH264ParserResult result;
3884     guchar *buf;
3885     guint i, size, buf_size, nalu_size, flags;
3886     guint32 start_code;
3887     gint ofs, ofs2;
3888
3889     status = ensure_decoder(decoder);
3890     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3891         return status;
3892
3893     switch (priv->stream_alignment) {
3894     case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3895         size = gst_adapter_available_fast(adapter);
3896         break;
3897     default:
3898         size = gst_adapter_available(adapter);
3899         break;
3900     }
3901
3902     if (priv->is_avcC) {
3903         if (size < priv->nal_length_size)
3904             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3905
3906         buf = (guchar *)&start_code;
3907         g_assert(priv->nal_length_size <= sizeof(start_code));
3908         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3909
3910         nalu_size = 0;
3911         for (i = 0; i < priv->nal_length_size; i++)
3912             nalu_size = (nalu_size << 8) | buf[i];
3913
3914         buf_size = priv->nal_length_size + nalu_size;
3915         if (size < buf_size)
3916             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3917     }
3918     else {
3919         if (size < 4)
3920             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3921
3922         if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3923             buf_size = size;
3924         else {
3925             ofs = scan_for_start_code(adapter, 0, size, NULL);
3926             if (ofs < 0)
3927                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3928
3929             if (ofs > 0) {
3930                 gst_adapter_flush(adapter, ofs);
3931                 size -= ofs;
3932             }
3933
3934             ofs2 = ps->input_offset2 - ofs - 4;
3935             if (ofs2 < 4)
3936                 ofs2 = 4;
3937
3938             ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3939                 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3940             if (ofs < 0) {
3941                 // Assume the whole NAL unit is present if end-of-stream
3942                 if (!at_eos) {
3943                     ps->input_offset2 = size;
3944                     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3945                 }
3946                 ofs = size;
3947             }
3948             buf_size = ofs;
3949         }
3950     }
3951     ps->input_offset2 = 0;
3952
3953     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3954     if (!buf)
3955         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3956
3957     unit->size = buf_size;
3958
3959     pi = gst_vaapi_parser_info_h264_new();
3960     if (!pi)
3961         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3962
3963     gst_vaapi_decoder_unit_set_parsed_info(unit,
3964         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3965
3966     if (priv->is_avcC)
3967         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3968             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3969     else
3970         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3971             buf, 0, buf_size, &pi->nalu);
3972     status = get_status(result);
3973     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3974         return status;
3975
3976     switch (pi->nalu.type) {
3977     case GST_H264_NAL_SPS:
3978         status = parse_sps(decoder, unit);
3979         break;
3980     case GST_H264_NAL_SUBSET_SPS:
3981         status = parse_subset_sps(decoder, unit);
3982         break;
3983     case GST_H264_NAL_PPS:
3984         status = parse_pps(decoder, unit);
3985         break;
3986     case GST_H264_NAL_SEI:
3987         status = parse_sei(decoder, unit);
3988         break;
3989     case GST_H264_NAL_SLICE_EXT:
3990         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3991             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3992             break;
3993         }
3994         /* fall-through */
3995     case GST_H264_NAL_SLICE_IDR:
3996     case GST_H264_NAL_SLICE:
3997         status = parse_slice(decoder, unit);
3998         break;
3999     default:
4000         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4001         break;
4002     }
4003     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4004         return status;
4005
4006     flags = 0;
4007     switch (pi->nalu.type) {
4008     case GST_H264_NAL_AU_DELIMITER:
4009         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4010         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4011         /* fall-through */
4012     case GST_H264_NAL_FILLER_DATA:
4013         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4014         break;
4015     case GST_H264_NAL_STREAM_END:
4016         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4017         /* fall-through */
4018     case GST_H264_NAL_SEQ_END:
4019         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4020         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4021         break;
4022     case GST_H264_NAL_SPS:
4023     case GST_H264_NAL_SUBSET_SPS:
4024     case GST_H264_NAL_PPS:
4025     case GST_H264_NAL_SEI:
4026         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4027         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4028         break;
4029     case GST_H264_NAL_SLICE_EXT:
4030         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4031             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4032             break;
4033         }
4034         /* fall-through */
4035     case GST_H264_NAL_SLICE_IDR:
4036     case GST_H264_NAL_SLICE:
4037         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4038         if (is_new_picture(pi, priv->prev_slice_pi)) {
4039             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4040             if (is_new_access_unit(pi, priv->prev_slice_pi))
4041                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4042         }
4043         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4044         break;
4045     case GST_H264_NAL_SPS_EXT:
4046     case GST_H264_NAL_SLICE_AUX:
4047         /* skip SPS extension and auxiliary slice for now */
4048         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4049         break;
4050     case GST_H264_NAL_PREFIX_UNIT:
4051         /* skip Prefix NAL units for now */
4052         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4053             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4054             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4055         break;
4056     default:
4057         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4058             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4059                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4060         break;
4061     }
4062     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4063         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4064     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4065
4066     pi->nalu.data = NULL;
4067     pi->state = priv->parser_state;
4068     pi->flags = flags;
4069     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4070     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4071 }
4072
4073 static GstVaapiDecoderStatus
4074 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4075     GstVaapiDecoderUnit *unit)
4076 {
4077     GstVaapiDecoderH264 * const decoder =
4078         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4079     GstVaapiDecoderStatus status;
4080
4081     status = ensure_decoder(decoder);
4082     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4083         return status;
4084     return decode_unit(decoder, unit);
4085 }
4086
4087 static GstVaapiDecoderStatus
4088 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4089     GstVaapiDecoderUnit *unit)
4090 {
4091     GstVaapiDecoderH264 * const decoder =
4092         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4093
4094     return decode_picture(decoder, unit);
4095 }
4096
4097 static GstVaapiDecoderStatus
4098 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4099 {
4100     GstVaapiDecoderH264 * const decoder =
4101         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4102
4103     return decode_current_picture(decoder);
4104 }
4105
4106 static GstVaapiDecoderStatus
4107 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4108 {
4109     GstVaapiDecoderH264 * const decoder =
4110         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4111
4112     dpb_flush(decoder, NULL);
4113     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4114 }
4115
4116 static void
4117 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4118 {
4119     GstVaapiMiniObjectClass * const object_class =
4120         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4121     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4122
4123     object_class->size          = sizeof(GstVaapiDecoderH264);
4124     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4125
4126     decoder_class->create       = gst_vaapi_decoder_h264_create;
4127     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4128     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4129     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4130     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4131     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4132     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4133
4134     decoder_class->decode_codec_data =
4135         gst_vaapi_decoder_h264_decode_codec_data;
4136 }
4137
4138 static inline const GstVaapiDecoderClass *
4139 gst_vaapi_decoder_h264_class(void)
4140 {
4141     static GstVaapiDecoderH264Class g_class;
4142     static gsize g_class_init = FALSE;
4143
4144     if (g_once_init_enter(&g_class_init)) {
4145         gst_vaapi_decoder_h264_class_init(&g_class);
4146         g_once_init_leave(&g_class_init, TRUE);
4147     }
4148     return GST_VAAPI_DECODER_CLASS(&g_class);
4149 }
4150
4151 /**
4152  * gst_vaapi_decoder_h264_set_alignment:
4153  * @decoder: a #GstVaapiDecoderH264
4154  * @alignment: the #GstVaapiStreamAlignH264
4155  *
4156  * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4157  * of each buffer that is supplied to the decoder. This could be no
4158  * specific alignment, NAL unit boundaries, or access unit boundaries.
4159  */
4160 void
4161 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4162     GstVaapiStreamAlignH264 alignment)
4163 {
4164     g_return_if_fail(decoder != NULL);
4165
4166     decoder->priv.stream_alignment = alignment;
4167 }
4168
4169 /**
4170  * gst_vaapi_decoder_h264_new:
4171  * @display: a #GstVaapiDisplay
4172  * @caps: a #GstCaps holding codec information
4173  *
4174  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4175  * hold extra information like codec-data and pictured coded size.
4176  *
4177  * Return value: the newly allocated #GstVaapiDecoder object
4178  */
4179 GstVaapiDecoder *
4180 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4181 {
4182     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4183 }