decoder: h264: fix caps to report interlace-mode accordingly.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiPictureH264        *current_picture;
449     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
450     GstVaapiParserInfoH264     *active_sps;
451     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
452     GstVaapiParserInfoH264     *active_pps;
453     GstVaapiParserInfoH264     *prev_pi;
454     GstVaapiParserInfoH264     *prev_slice_pi;
455     GstVaapiFrameStore        **prev_frames;
456     guint                       prev_frames_alloc;
457     GstVaapiFrameStore        **dpb;
458     guint                       dpb_count;
459     guint                       dpb_size;
460     guint                       dpb_size_max;
461     guint                       max_views;
462     GstVaapiProfile             profile;
463     GstVaapiEntrypoint          entrypoint;
464     GstVaapiChromaType          chroma_type;
465     GPtrArray                  *inter_views;
466     GstVaapiPictureH264        *short_ref[32];
467     guint                       short_ref_count;
468     GstVaapiPictureH264        *long_ref[32];
469     guint                       long_ref_count;
470     GstVaapiPictureH264        *RefPicList0[32];
471     guint                       RefPicList0_count;
472     GstVaapiPictureH264        *RefPicList1[32];
473     guint                       RefPicList1_count;
474     guint                       nal_length_size;
475     guint                       mb_width;
476     guint                       mb_height;
477     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
478     gint32                      poc_msb;                // PicOrderCntMsb
479     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
480     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
481     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
482     gint32                      frame_num_offset;       // FrameNumOffset
483     gint32                      frame_num;              // frame_num (from slice_header())
484     gint32                      prev_frame_num;         // prevFrameNum
485     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
486     gboolean                    prev_pic_structure;     // previous picture structure
487     guint                       is_opened               : 1;
488     guint                       is_avcC                 : 1;
489     guint                       has_context             : 1;
490     guint                       progressive_sequence    : 1;
491 };
492
493 /**
494  * GstVaapiDecoderH264:
495  *
496  * A decoder based on H264.
497  */
498 struct _GstVaapiDecoderH264 {
499     /*< private >*/
500     GstVaapiDecoder             parent_instance;
501     GstVaapiDecoderH264Private  priv;
502 };
503
504 /**
505  * GstVaapiDecoderH264Class:
506  *
507  * A decoder class based on H264.
508  */
509 struct _GstVaapiDecoderH264Class {
510     /*< private >*/
511     GstVaapiDecoderClass parent_class;
512 };
513
514 static gboolean
515 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
516
517 static gboolean
518 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
519     GstVaapiPictureH264 *picture);
520
521 static inline gboolean
522 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
523     GstVaapiFrameStore *fs)
524 {
525     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
526 }
527
528 /* Determines if the supplied profile is one of the MVC set */
529 static gboolean
530 is_mvc_profile(GstH264Profile profile)
531 {
532     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
533         profile == GST_H264_PROFILE_STEREO_HIGH;
534 }
535
536 /* Determines the view_id from the supplied NAL unit */
537 static inline guint
538 get_view_id(GstH264NalUnit *nalu)
539 {
540     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
541 }
542
543 /* Determines the view order index (VOIdx) from the supplied view_id */
544 static gint
545 get_view_order_index(GstH264SPS *sps, guint16 view_id)
546 {
547     GstH264SPSExtMVC *mvc;
548     gint i;
549
550     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
551         return 0;
552
553     mvc = &sps->extension.mvc;
554     for (i = 0; i <= mvc->num_views_minus1; i++) {
555         if (mvc->view[i].view_id == view_id)
556             return i;
557     }
558     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
559     return -1;
560 }
561
562 /* Determines NumViews */
563 static guint
564 get_num_views(GstH264SPS *sps)
565 {
566     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
567         sps->extension.mvc.num_views_minus1 : 0);
568 }
569
570 /* Get number of reference frames to use */
571 static guint
572 get_max_dec_frame_buffering(GstH264SPS *sps)
573 {
574     guint num_views, max_dpb_frames;
575     guint max_dec_frame_buffering, PicSizeMbs;
576     GstVaapiLevelH264 level;
577     const GstVaapiH264LevelLimits *level_limits;
578
579     /* Table A-1 - Level limits */
580     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
581         level = GST_VAAPI_LEVEL_H264_L1b;
582     else
583         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
584     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
585     if (G_UNLIKELY(!level_limits)) {
586         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
587         max_dec_frame_buffering = 16;
588     }
589     else {
590         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
591                       (sps->pic_height_in_map_units_minus1 + 1) *
592                       (sps->frame_mbs_only_flag ? 1 : 2));
593         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
594     }
595     if (is_mvc_profile(sps->profile_idc))
596         max_dec_frame_buffering <<= 1;
597
598     /* VUI parameters */
599     if (sps->vui_parameters_present_flag) {
600         GstH264VUIParams * const vui_params = &sps->vui_parameters;
601         if (vui_params->bitstream_restriction_flag)
602             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
603         else {
604             switch (sps->profile_idc) {
605             case 44:  // CAVLC 4:4:4 Intra profile
606             case GST_H264_PROFILE_SCALABLE_HIGH:
607             case GST_H264_PROFILE_HIGH:
608             case GST_H264_PROFILE_HIGH10:
609             case GST_H264_PROFILE_HIGH_422:
610             case GST_H264_PROFILE_HIGH_444:
611                 if (sps->constraint_set3_flag)
612                     max_dec_frame_buffering = 0;
613                 break;
614             }
615         }
616     }
617
618     num_views = get_num_views(sps);
619     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
620     if (max_dec_frame_buffering > max_dpb_frames)
621         max_dec_frame_buffering = max_dpb_frames;
622     else if (max_dec_frame_buffering < sps->num_ref_frames)
623         max_dec_frame_buffering = sps->num_ref_frames;
624     return MAX(1, max_dec_frame_buffering);
625 }
626
627 static void
628 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
629 {
630     gpointer * const entries = array;
631     guint num_entries = *array_length_ptr;
632
633     g_return_if_fail(index < num_entries);
634
635     if (index != --num_entries)
636         entries[index] = entries[num_entries];
637     entries[num_entries] = NULL;
638     *array_length_ptr = num_entries;
639 }
640
641 #if 1
642 static inline void
643 array_remove_index(void *array, guint *array_length_ptr, guint index)
644 {
645     array_remove_index_fast(array, array_length_ptr, index);
646 }
647 #else
648 static void
649 array_remove_index(void *array, guint *array_length_ptr, guint index)
650 {
651     gpointer * const entries = array;
652     const guint num_entries = *array_length_ptr - 1;
653     guint i;
654
655     g_return_if_fail(index <= num_entries);
656
657     for (i = index; i < num_entries; i++)
658         entries[i] = entries[i + 1];
659     entries[num_entries] = NULL;
660     *array_length_ptr = num_entries;
661 }
662 #endif
663
664 #define ARRAY_REMOVE_INDEX(array, index) \
665     array_remove_index(array, &array##_count, index)
666
667 static void
668 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
669 {
670     GstVaapiDecoderH264Private * const priv = &decoder->priv;
671     guint i, num_frames = --priv->dpb_count;
672
673     if (USE_STRICT_DPB_ORDERING) {
674         for (i = index; i < num_frames; i++)
675             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
676     }
677     else if (index != num_frames)
678         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
679     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
680 }
681
682 static gboolean
683 dpb_output(
684     GstVaapiDecoderH264 *decoder,
685     GstVaapiFrameStore  *fs,
686     GstVaapiPictureH264 *picture
687 )
688 {
689     picture->output_needed = FALSE;
690
691     if (fs) {
692         if (--fs->output_needed > 0)
693             return TRUE;
694         picture = fs->buffers[0];
695     }
696     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
697 }
698
699 static inline void
700 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
701 {
702     GstVaapiDecoderH264Private * const priv = &decoder->priv;
703     GstVaapiFrameStore * const fs = priv->dpb[i];
704
705     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
706         dpb_remove_index(decoder, i);
707 }
708
709 /* Finds the frame store holding the supplied picture */
710 static gint
711 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
712 {
713     GstVaapiDecoderH264Private * const priv = &decoder->priv;
714     gint i, j;
715
716     for (i = 0; i < priv->dpb_count; i++) {
717         GstVaapiFrameStore * const fs = priv->dpb[i];
718         for (j = 0; j < fs->num_buffers; j++) {
719             if (fs->buffers[j] == picture)
720                 return i;
721         }
722     }
723     return -1;
724 }
725
726 /* Finds the picture with the lowest POC that needs to be output */
727 static gint
728 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
729     GstVaapiPictureH264 **found_picture_ptr)
730 {
731     GstVaapiDecoderH264Private * const priv = &decoder->priv;
732     GstVaapiPictureH264 *found_picture = NULL;
733     guint i, j, found_index;
734
735     for (i = 0; i < priv->dpb_count; i++) {
736         GstVaapiFrameStore * const fs = priv->dpb[i];
737         if (!fs->output_needed)
738             continue;
739         if (picture && picture->base.view_id != fs->view_id)
740             continue;
741         for (j = 0; j < fs->num_buffers; j++) {
742             GstVaapiPictureH264 * const pic = fs->buffers[j];
743             if (!pic->output_needed)
744                 continue;
745             if (!found_picture || found_picture->base.poc > pic->base.poc ||
746                 (found_picture->base.poc == pic->base.poc &&
747                  found_picture->base.voc > pic->base.voc))
748                 found_picture = pic, found_index = i;
749         }
750     }
751
752     if (found_picture_ptr)
753         *found_picture_ptr = found_picture;
754     return found_picture ? found_index : -1;
755 }
756
757 /* Finds the picture with the lowest VOC that needs to be output */
758 static gint
759 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
760     GstVaapiPictureH264 **found_picture_ptr)
761 {
762     GstVaapiDecoderH264Private * const priv = &decoder->priv;
763     GstVaapiPictureH264 *found_picture = NULL;
764     guint i, j, found_index;
765
766     for (i = 0; i < priv->dpb_count; i++) {
767         GstVaapiFrameStore * const fs = priv->dpb[i];
768         if (!fs->output_needed || fs->view_id == picture->base.view_id)
769             continue;
770         for (j = 0; j < fs->num_buffers; j++) {
771             GstVaapiPictureH264 * const pic = fs->buffers[j];
772             if (!pic->output_needed || pic->base.poc != picture->base.poc)
773                 continue;
774             if (!found_picture || found_picture->base.voc > pic->base.voc)
775                 found_picture = pic, found_index = i;
776         }
777     }
778
779     if (found_picture_ptr)
780         *found_picture_ptr = found_picture;
781     return found_picture ? found_index : -1;
782 }
783
784 static gboolean
785 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
786     GstVaapiPictureH264 *picture, guint voc)
787 {
788     GstVaapiDecoderH264Private * const priv = &decoder->priv;
789     GstVaapiPictureH264 *found_picture;
790     gint found_index;
791     gboolean success;
792
793     if (priv->max_views == 1)
794         return TRUE;
795
796     /* Emit all other view components that were in the same access
797        unit than the picture we have just found */
798     found_picture = picture;
799     for (;;) {
800         found_index = dpb_find_lowest_voc(decoder, found_picture,
801             &found_picture);
802         if (found_index < 0 || found_picture->base.voc >= voc)
803             break;
804         success = dpb_output(decoder, priv->dpb[found_index], found_picture);
805         dpb_evict(decoder, found_picture, found_index);
806         if (!success)
807             return FALSE;
808     }
809     return TRUE;
810 }
811
812 static gboolean
813 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
814 {
815     GstVaapiDecoderH264Private * const priv = &decoder->priv;
816     GstVaapiPictureH264 *found_picture;
817     gint found_index;
818     gboolean success;
819
820     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
821     if (found_index < 0)
822         return FALSE;
823
824     if (picture && picture->base.poc != found_picture->base.poc)
825         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
826
827     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
828     dpb_evict(decoder, found_picture, found_index);
829     if (priv->max_views == 1)
830         return success;
831
832     if (picture && picture->base.poc != found_picture->base.poc)
833         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
834     return success;
835 }
836
837 static void
838 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
839 {
840     GstVaapiDecoderH264Private * const priv = &decoder->priv;
841     guint i, n;
842
843     for (i = 0; i < priv->dpb_count; i++) {
844         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
845             continue;
846         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
847     }
848
849     for (i = 0, n = 0; i < priv->dpb_count; i++) {
850         if (priv->dpb[i])
851             priv->dpb[n++] = priv->dpb[i];
852     }
853     priv->dpb_count = n;
854
855     /* Clear previous frame buffers only if this is a "flush-all" operation,
856        or if the picture is the first one in the access unit */
857     if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
858             GST_VAAPI_PICTURE_FLAG_AU_START)) {
859         for (i = 0; i < priv->max_views; i++)
860             gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
861     }
862 }
863
864 static void
865 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
866 {
867     while (dpb_bump(decoder, picture))
868         ;
869     dpb_clear(decoder, picture);
870 }
871
872 static void
873 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
874 {
875     GstVaapiDecoderH264Private * const priv = &decoder->priv;
876     const gboolean is_last_picture = /* in the access unit */
877         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
878     guint i;
879
880     // Remove all unused inter-view only reference components of the current AU
881     i = 0;
882     while (i < priv->dpb_count) {
883         GstVaapiFrameStore * const fs = priv->dpb[i];
884         if (fs->view_id != picture->base.view_id &&
885             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
886             (is_last_picture ||
887              !is_inter_view_reference_for_next_frames(decoder, fs)))
888             dpb_remove_index(decoder, i);
889         else
890             i++;
891     }
892 }
893
894 static gboolean
895 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
896 {
897     GstVaapiDecoderH264Private * const priv = &decoder->priv;
898     GstVaapiFrameStore *fs;
899     guint i;
900
901     if (priv->max_views > 1)
902         dpb_prune_mvc(decoder, picture);
903
904     // Remove all unused pictures
905     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
906         i = 0;
907         while (i < priv->dpb_count) {
908             GstVaapiFrameStore * const fs = priv->dpb[i];
909             if (fs->view_id == picture->base.view_id &&
910                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
911                 dpb_remove_index(decoder, i);
912             else
913                 i++;
914         }
915     }
916
917     // Check if picture is the second field and the first field is still in DPB
918     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
919         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
920         const gint found_index = dpb_find_picture(decoder,
921             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
922         if (found_index >= 0)
923             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
924     }
925
926     // Create new frame store, and split fields if necessary
927     fs = gst_vaapi_frame_store_new(picture);
928     if (!fs)
929         return FALSE;
930     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
931     gst_vaapi_frame_store_unref(fs);
932
933     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
934         if (!gst_vaapi_frame_store_split_fields(fs))
935             return FALSE;
936     }
937
938     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
939     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
940         while (priv->dpb_count == priv->dpb_size) {
941             if (!dpb_bump(decoder, picture))
942                 return FALSE;
943         }
944     }
945
946     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
947     else {
948         const gboolean StoreInterViewOnlyRefFlag =
949             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
950                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
951             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
952                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
953         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
954             return TRUE;
955         while (priv->dpb_count == priv->dpb_size) {
956             if (!StoreInterViewOnlyRefFlag) {
957                 if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
958                     return dpb_output(decoder, NULL, picture);
959             }
960             if (!dpb_bump(decoder, picture))
961                 return FALSE;
962         }
963     }
964
965     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
966     if (picture->output_flag) {
967         picture->output_needed = TRUE;
968         fs->output_needed++;
969     }
970     return TRUE;
971 }
972
973 static gboolean
974 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
975 {
976     GstVaapiDecoderH264Private * const priv = &decoder->priv;
977
978     if (dpb_size < priv->dpb_count)
979         return FALSE;
980
981     if (dpb_size > priv->dpb_size_max) {
982         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
983         if (!priv->dpb)
984             return FALSE;
985         memset(&priv->dpb[priv->dpb_size_max], 0,
986             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
987         priv->dpb_size_max = dpb_size;
988     }
989
990     if (priv->dpb_size < dpb_size)
991         priv->dpb_size = dpb_size;
992     else if (dpb_size < priv->dpb_count)
993         return FALSE;
994
995     GST_DEBUG("DPB size %u", priv->dpb_size);
996     return TRUE;
997 }
998
999 static void
1000 unref_inter_view(GstVaapiPictureH264 *picture)
1001 {
1002     if (!picture)
1003         return;
1004     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1005     gst_vaapi_picture_unref(picture);
1006 }
1007
1008 /* Resets MVC resources */
1009 static gboolean
1010 mvc_reset(GstVaapiDecoderH264 *decoder)
1011 {
1012     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1013     guint i;
1014
1015     // Resize array of inter-view references
1016     if (!priv->inter_views) {
1017         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1018             (GDestroyNotify)unref_inter_view);
1019         if (!priv->inter_views)
1020             return FALSE;
1021     }
1022
1023     // Resize array of previous frame buffers
1024     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1025         gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
1026
1027     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1028         sizeof(*priv->prev_frames));
1029     if (!priv->prev_frames) {
1030         priv->prev_frames_alloc = 0;
1031         return FALSE;
1032     }
1033     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1034         priv->prev_frames[i] = NULL;
1035     priv->prev_frames_alloc = priv->max_views;
1036     return TRUE;
1037 }
1038
1039 static GstVaapiDecoderStatus
1040 get_status(GstH264ParserResult result)
1041 {
1042     GstVaapiDecoderStatus status;
1043
1044     switch (result) {
1045     case GST_H264_PARSER_OK:
1046         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1047         break;
1048     case GST_H264_PARSER_NO_NAL_END:
1049         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1050         break;
1051     case GST_H264_PARSER_ERROR:
1052         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1053         break;
1054     default:
1055         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1056         break;
1057     }
1058     return status;
1059 }
1060
1061 static void
1062 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1063 {
1064     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1065
1066     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1067     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1068     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1069
1070     dpb_clear(decoder, NULL);
1071
1072     if (priv->inter_views) {
1073         g_ptr_array_unref(priv->inter_views);
1074         priv->inter_views = NULL;
1075     }
1076
1077     if (priv->parser) {
1078         gst_h264_nal_parser_free(priv->parser);
1079         priv->parser = NULL;
1080     }
1081 }
1082
1083 static gboolean
1084 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1085 {
1086     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1087
1088     gst_vaapi_decoder_h264_close(decoder);
1089
1090     priv->parser = gst_h264_nal_parser_new();
1091     if (!priv->parser)
1092         return FALSE;
1093     return TRUE;
1094 }
1095
1096 static void
1097 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1098 {
1099     GstVaapiDecoderH264 * const decoder =
1100         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1101     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1102     guint i;
1103
1104     gst_vaapi_decoder_h264_close(decoder);
1105
1106     g_free(priv->dpb);
1107     priv->dpb = NULL;
1108     priv->dpb_size = 0;
1109
1110     g_free(priv->prev_frames);
1111     priv->prev_frames = NULL;
1112     priv->prev_frames_alloc = 0;
1113
1114     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1115         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1116     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1117
1118     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1119         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1120     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1121 }
1122
1123 static gboolean
1124 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1125 {
1126     GstVaapiDecoderH264 * const decoder =
1127         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1128     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1129
1130     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1131     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1132     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1133     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1134     priv->progressive_sequence  = TRUE;
1135     return TRUE;
1136 }
1137
1138 /* Activates the supplied PPS */
1139 static GstH264PPS *
1140 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1141 {
1142     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1143     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1144
1145     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1146     return pi ? &pi->data.pps : NULL;
1147 }
1148
1149 /* Returns the active PPS */
1150 static inline GstH264PPS *
1151 get_pps(GstVaapiDecoderH264 *decoder)
1152 {
1153     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1154
1155     return pi ? &pi->data.pps : NULL;
1156 }
1157
1158 /* Activate the supplied SPS */
1159 static GstH264SPS *
1160 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1161 {
1162     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1163     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1164
1165     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1166     return pi ? &pi->data.sps : NULL;
1167 }
1168
1169 /* Returns the active SPS */
1170 static inline GstH264SPS *
1171 get_sps(GstVaapiDecoderH264 *decoder)
1172 {
1173     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1174
1175     return pi ? &pi->data.sps : NULL;
1176 }
1177
1178 static void
1179 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1180     GstVaapiProfile profile)
1181 {
1182     guint n_profiles = *n_profiles_ptr;
1183
1184     profiles[n_profiles++] = profile;
1185     switch (profile) {
1186     case GST_VAAPI_PROFILE_H264_MAIN:
1187         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1188         break;
1189     default:
1190         break;
1191     }
1192     *n_profiles_ptr = n_profiles;
1193 }
1194
1195 /* Fills in compatible profiles for MVC decoding */
1196 static void
1197 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1198     guint *n_profiles_ptr, guint dpb_size)
1199 {
1200     const gchar * const vendor_string =
1201         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1202
1203     gboolean add_high_profile = FALSE;
1204     struct map {
1205         const gchar *str;
1206         guint str_len;
1207     };
1208     const struct map *m;
1209
1210     // Drivers that support slice level decoding
1211     if (vendor_string && dpb_size <= 16) {
1212         static const struct map drv_names[] = {
1213             { "Intel i965 driver", 17 },
1214             { NULL, 0 }
1215         };
1216         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1217             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1218                 add_high_profile = TRUE;
1219         }
1220     }
1221
1222     if (add_high_profile)
1223         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1224 }
1225
1226 static GstVaapiProfile
1227 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1228 {
1229     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1230     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1231     GstVaapiProfile profile, profiles[4];
1232     guint i, n_profiles = 0;
1233
1234     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1235     if (!profile)
1236         return GST_VAAPI_PROFILE_UNKNOWN;
1237
1238     fill_profiles(profiles, &n_profiles, profile);
1239     switch (profile) {
1240     case GST_VAAPI_PROFILE_H264_BASELINE:
1241         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1242             fill_profiles(profiles, &n_profiles,
1243                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1244             fill_profiles(profiles, &n_profiles,
1245                 GST_VAAPI_PROFILE_H264_MAIN);
1246         }
1247         break;
1248     case GST_VAAPI_PROFILE_H264_EXTENDED:
1249         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1250             fill_profiles(profiles, &n_profiles,
1251                 GST_VAAPI_PROFILE_H264_MAIN);
1252         }
1253         break;
1254     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1255         if (priv->max_views == 2) {
1256             fill_profiles(profiles, &n_profiles,
1257                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1258         }
1259         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1260         break;
1261     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1262         if (sps->frame_mbs_only_flag) {
1263             fill_profiles(profiles, &n_profiles,
1264                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1265         }
1266         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1267         break;
1268     default:
1269         break;
1270     }
1271
1272     /* If the preferred profile (profiles[0]) matches one that we already
1273        found, then just return it now instead of searching for it again */
1274     if (profiles[0] == priv->profile)
1275         return priv->profile;
1276
1277     for (i = 0; i < n_profiles; i++) {
1278         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1279             return profiles[i];
1280     }
1281     return GST_VAAPI_PROFILE_UNKNOWN;
1282 }
1283
1284 static GstVaapiDecoderStatus
1285 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1286 {
1287     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1288     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1289     GstVaapiContextInfo info;
1290     GstVaapiProfile profile;
1291     GstVaapiChromaType chroma_type;
1292     gboolean reset_context = FALSE;
1293     guint mb_width, mb_height, dpb_size;
1294
1295     dpb_size = get_max_dec_frame_buffering(sps);
1296     if (priv->dpb_size < dpb_size) {
1297         GST_DEBUG("DPB size increased");
1298         reset_context = TRUE;
1299     }
1300
1301     profile = get_profile(decoder, sps, dpb_size);
1302     if (!profile) {
1303         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1304         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1305     }
1306
1307     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1308         GST_DEBUG("profile changed");
1309         reset_context = TRUE;
1310         priv->profile = profile;
1311     }
1312
1313     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1314     if (!chroma_type) {
1315         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1316         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1317     }
1318
1319     if (priv->chroma_type != chroma_type) {
1320         GST_DEBUG("chroma format changed");
1321         reset_context     = TRUE;
1322         priv->chroma_type = chroma_type;
1323     }
1324
1325     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1326     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1327         !sps->frame_mbs_only_flag;
1328     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1329         GST_DEBUG("size changed");
1330         reset_context   = TRUE;
1331         priv->mb_width  = mb_width;
1332         priv->mb_height = mb_height;
1333     }
1334
1335     priv->progressive_sequence = sps->frame_mbs_only_flag;
1336     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1337
1338     gst_vaapi_decoder_set_pixel_aspect_ratio(
1339         base_decoder,
1340         sps->vui_parameters.par_n,
1341         sps->vui_parameters.par_d
1342     );
1343
1344     if (!reset_context && priv->has_context)
1345         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1346
1347     /* XXX: fix surface size when cropping is implemented */
1348     info.profile    = priv->profile;
1349     info.entrypoint = priv->entrypoint;
1350     info.chroma_type = priv->chroma_type;
1351     info.width      = sps->width;
1352     info.height     = sps->height;
1353     info.ref_frames = dpb_size;
1354
1355     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1356         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1357     priv->has_context = TRUE;
1358
1359     /* Reset DPB */
1360     if (!dpb_reset(decoder, dpb_size))
1361         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1362
1363     /* Reset MVC data */
1364     if (!mvc_reset(decoder))
1365         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1366     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1367 }
1368
1369 static void
1370 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1371     const GstH264SPS *sps)
1372 {
1373     guint i;
1374
1375     /* There are always 6 4x4 scaling lists */
1376     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1377     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1378
1379     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1380         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1381             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1382 }
1383
1384 static void
1385 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1386     const GstH264SPS *sps)
1387 {
1388     guint i, n;
1389
1390     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1391     if (!pps->transform_8x8_mode_flag)
1392         return;
1393
1394     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1395     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1396
1397     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1398     for (i = 0; i < n; i++) {
1399         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1400             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1401     }
1402 }
1403
1404 static GstVaapiDecoderStatus
1405 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1406 {
1407     GstVaapiPicture * const base_picture = &picture->base;
1408     GstH264PPS * const pps = get_pps(decoder);
1409     GstH264SPS * const sps = get_sps(decoder);
1410     VAIQMatrixBufferH264 *iq_matrix;
1411
1412     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1413     if (!base_picture->iq_matrix) {
1414         GST_ERROR("failed to allocate IQ matrix");
1415         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1416     }
1417     iq_matrix = base_picture->iq_matrix->param;
1418
1419     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1420        is not large enough to hold lists for 4:4:4 */
1421     if (sps->chroma_format_idc == 3)
1422         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1423
1424     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1425     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1426
1427     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1428 }
1429
1430 static inline gboolean
1431 is_valid_state(guint state, guint ref_state)
1432 {
1433     return (state & ref_state) == ref_state;
1434 }
1435
1436 static GstVaapiDecoderStatus
1437 decode_current_picture(GstVaapiDecoderH264 *decoder)
1438 {
1439     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1440     GstVaapiPictureH264 * const picture = priv->current_picture;
1441
1442     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1443         goto drop_frame;
1444     priv->decoder_state = 0;
1445
1446     if (!picture)
1447         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1448
1449     if (!exec_ref_pic_marking(decoder, picture))
1450         goto error;
1451     if (!dpb_add(decoder, picture))
1452         goto error;
1453     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1454         goto error;
1455     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1456     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1457
1458 error:
1459     /* XXX: fix for cases where first field failed to be decoded */
1460     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1461     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1462
1463 drop_frame:
1464     priv->decoder_state = 0;
1465     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1466 }
1467
1468 static GstVaapiDecoderStatus
1469 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1470 {
1471     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1472     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1473     GstH264SPS * const sps = &pi->data.sps;
1474     GstH264ParserResult result;
1475
1476     GST_DEBUG("parse SPS");
1477
1478     priv->parser_state = 0;
1479
1480     /* Variables that don't have inferred values per the H.264
1481        standard but that should get a default value anyway */
1482     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1483
1484     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1485     if (result != GST_H264_PARSER_OK)
1486         return get_status(result);
1487
1488     /* Reset defaults */
1489     priv->max_views = 1;
1490
1491     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1492     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1493 }
1494
1495 static GstVaapiDecoderStatus
1496 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1497 {
1498     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1499     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1500     GstH264SPS * const sps = &pi->data.sps;
1501     GstH264ParserResult result;
1502
1503     GST_DEBUG("parse subset SPS");
1504
1505     /* Variables that don't have inferred values per the H.264
1506        standard but that should get a default value anyway */
1507     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1508
1509     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1510         TRUE);
1511     if (result != GST_H264_PARSER_OK)
1512         return get_status(result);
1513
1514     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1515     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1516 }
1517
1518 static GstVaapiDecoderStatus
1519 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1520 {
1521     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1522     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1523     GstH264PPS * const pps = &pi->data.pps;
1524     GstH264ParserResult result;
1525
1526     GST_DEBUG("parse PPS");
1527
1528     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1529
1530     /* Variables that don't have inferred values per the H.264
1531        standard but that should get a default value anyway */
1532     pps->slice_group_map_type = 0;
1533     pps->slice_group_change_rate_minus1 = 0;
1534
1535     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1536     if (result != GST_H264_PARSER_OK)
1537         return get_status(result);
1538
1539     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1540     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1541 }
1542
1543 static GstVaapiDecoderStatus
1544 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1545 {
1546     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1547     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1548     GArray ** const sei_ptr = &pi->data.sei;
1549     GstH264ParserResult result;
1550
1551     GST_DEBUG("parse SEI");
1552
1553     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1554     if (result != GST_H264_PARSER_OK) {
1555         GST_WARNING("failed to parse SEI messages");
1556         return get_status(result);
1557     }
1558     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1559 }
1560
1561 static GstVaapiDecoderStatus
1562 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1563 {
1564     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1565     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1566     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1567     GstH264NalUnit * const nalu = &pi->nalu;
1568     GstH264SPS *sps;
1569     GstH264ParserResult result;
1570     guint num_views;
1571
1572     GST_DEBUG("parse slice");
1573
1574     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1575                            GST_H264_VIDEO_STATE_GOT_PPS);
1576
1577     /* Propagate Prefix NAL unit info, if necessary */
1578     switch (nalu->type) {
1579     case GST_H264_NAL_SLICE:
1580     case GST_H264_NAL_SLICE_IDR: {
1581         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1582         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1583             /* MVC sequences shall have a Prefix NAL unit immediately
1584                preceding this NAL unit */
1585             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1586             pi->nalu.extension = prev_pi->nalu.extension;
1587         }
1588         else {
1589             /* In the very unlikely case there is no Prefix NAL unit
1590                immediately preceding this NAL unit, try to infer some
1591                defaults (H.7.4.1.1) */
1592             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1593             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1594             nalu->idr_pic_flag = !mvc->non_idr_flag;
1595             mvc->priority_id = 0;
1596             mvc->view_id = 0;
1597             mvc->temporal_id = 0;
1598             mvc->anchor_pic_flag = 0;
1599             mvc->inter_view_flag = 1;
1600         }
1601         break;
1602     }
1603     }
1604
1605     /* Variables that don't have inferred values per the H.264
1606        standard but that should get a default value anyway */
1607     slice_hdr->cabac_init_idc = 0;
1608     slice_hdr->direct_spatial_mv_pred_flag = 0;
1609
1610     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1611         slice_hdr, TRUE, TRUE);
1612     if (result != GST_H264_PARSER_OK)
1613         return get_status(result);
1614
1615     sps = slice_hdr->pps->sequence;
1616
1617     /* Update MVC data */
1618     num_views = get_num_views(sps);
1619     if (priv->max_views < num_views) {
1620         priv->max_views = num_views;
1621         GST_DEBUG("maximum number of views changed to %u", num_views);
1622     }
1623     pi->view_id = get_view_id(&pi->nalu);
1624     pi->voc = get_view_order_index(sps, pi->view_id);
1625
1626     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1627     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1628 }
1629
1630 static GstVaapiDecoderStatus
1631 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1632 {
1633     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1634     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1635     GstH264SPS * const sps = &pi->data.sps;
1636
1637     GST_DEBUG("decode SPS");
1638
1639     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1640     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1641 }
1642
1643 static GstVaapiDecoderStatus
1644 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1645 {
1646     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1647     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1648     GstH264SPS * const sps = &pi->data.sps;
1649
1650     GST_DEBUG("decode subset SPS");
1651
1652     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1653     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1654 }
1655
1656 static GstVaapiDecoderStatus
1657 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1658 {
1659     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1660     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1661     GstH264PPS * const pps = &pi->data.pps;
1662
1663     GST_DEBUG("decode PPS");
1664
1665     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1666     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1667 }
1668
1669 static GstVaapiDecoderStatus
1670 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1671 {
1672     GstVaapiDecoderStatus status;
1673
1674     GST_DEBUG("decode sequence-end");
1675
1676     status = decode_current_picture(decoder);
1677     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1678         return status;
1679
1680     dpb_flush(decoder, NULL);
1681     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1682 }
1683
1684 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1685 static void
1686 init_picture_poc_0(
1687     GstVaapiDecoderH264 *decoder,
1688     GstVaapiPictureH264 *picture,
1689     GstH264SliceHdr     *slice_hdr
1690 )
1691 {
1692     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1693     GstH264SPS * const sps = get_sps(decoder);
1694     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1695     gint32 temp_poc;
1696
1697     GST_DEBUG("decode picture order count type 0");
1698
1699     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1700         priv->prev_poc_msb = 0;
1701         priv->prev_poc_lsb = 0;
1702     }
1703     else if (priv->prev_pic_has_mmco5) {
1704         priv->prev_poc_msb = 0;
1705         priv->prev_poc_lsb =
1706             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1707              0 : priv->field_poc[TOP_FIELD]);
1708     }
1709     else {
1710         priv->prev_poc_msb = priv->poc_msb;
1711         priv->prev_poc_lsb = priv->poc_lsb;
1712     }
1713
1714     // (8-3)
1715     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1716     if (priv->poc_lsb < priv->prev_poc_lsb &&
1717         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1718         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1719     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1720              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1721         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1722     else
1723         priv->poc_msb = priv->prev_poc_msb;
1724
1725     temp_poc = priv->poc_msb + priv->poc_lsb;
1726     switch (picture->structure) {
1727     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1728         // (8-4, 8-5)
1729         priv->field_poc[TOP_FIELD] = temp_poc;
1730         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1731             slice_hdr->delta_pic_order_cnt_bottom;
1732         break;
1733     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1734         // (8-4)
1735         priv->field_poc[TOP_FIELD] = temp_poc;
1736         break;
1737     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1738         // (8-5)
1739         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1740         break;
1741     }
1742 }
1743
1744 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1745 static void
1746 init_picture_poc_1(
1747     GstVaapiDecoderH264 *decoder,
1748     GstVaapiPictureH264 *picture,
1749     GstH264SliceHdr     *slice_hdr
1750 )
1751 {
1752     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1753     GstH264SPS * const sps = get_sps(decoder);
1754     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1755     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1756     guint i;
1757
1758     GST_DEBUG("decode picture order count type 1");
1759
1760     if (priv->prev_pic_has_mmco5)
1761         prev_frame_num_offset = 0;
1762     else
1763         prev_frame_num_offset = priv->frame_num_offset;
1764
1765     // (8-6)
1766     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1767         priv->frame_num_offset = 0;
1768     else if (priv->prev_frame_num > priv->frame_num)
1769         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1770     else
1771         priv->frame_num_offset = prev_frame_num_offset;
1772
1773     // (8-7)
1774     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1775         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1776     else
1777         abs_frame_num = 0;
1778     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1779         abs_frame_num = abs_frame_num - 1;
1780
1781     if (abs_frame_num > 0) {
1782         gint32 expected_delta_per_poc_cycle;
1783         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1784
1785         expected_delta_per_poc_cycle = 0;
1786         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1787             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1788
1789         // (8-8)
1790         poc_cycle_cnt = (abs_frame_num - 1) /
1791             sps->num_ref_frames_in_pic_order_cnt_cycle;
1792         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1793             sps->num_ref_frames_in_pic_order_cnt_cycle;
1794
1795         // (8-9)
1796         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1797         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1798             expected_poc += sps->offset_for_ref_frame[i];
1799     }
1800     else
1801         expected_poc = 0;
1802     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1803         expected_poc += sps->offset_for_non_ref_pic;
1804
1805     // (8-10)
1806     switch (picture->structure) {
1807     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1808         priv->field_poc[TOP_FIELD] = expected_poc +
1809             slice_hdr->delta_pic_order_cnt[0];
1810         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1811             sps->offset_for_top_to_bottom_field +
1812             slice_hdr->delta_pic_order_cnt[1];
1813         break;
1814     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1815         priv->field_poc[TOP_FIELD] = expected_poc +
1816             slice_hdr->delta_pic_order_cnt[0];
1817         break;
1818     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1819         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1820             sps->offset_for_top_to_bottom_field +
1821             slice_hdr->delta_pic_order_cnt[0];
1822         break;
1823     }
1824 }
1825
1826 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1827 static void
1828 init_picture_poc_2(
1829     GstVaapiDecoderH264 *decoder,
1830     GstVaapiPictureH264 *picture,
1831     GstH264SliceHdr     *slice_hdr
1832 )
1833 {
1834     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1835     GstH264SPS * const sps = get_sps(decoder);
1836     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1837     gint32 prev_frame_num_offset, temp_poc;
1838
1839     GST_DEBUG("decode picture order count type 2");
1840
1841     if (priv->prev_pic_has_mmco5)
1842         prev_frame_num_offset = 0;
1843     else
1844         prev_frame_num_offset = priv->frame_num_offset;
1845
1846     // (8-11)
1847     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1848         priv->frame_num_offset = 0;
1849     else if (priv->prev_frame_num > priv->frame_num)
1850         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1851     else
1852         priv->frame_num_offset = prev_frame_num_offset;
1853
1854     // (8-12)
1855     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1856         temp_poc = 0;
1857     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1858         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1859     else
1860         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1861
1862     // (8-13)
1863     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1864         priv->field_poc[TOP_FIELD] = temp_poc;
1865     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1866         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1867 }
1868
1869 /* 8.2.1 - Decoding process for picture order count */
1870 static void
1871 init_picture_poc(
1872     GstVaapiDecoderH264 *decoder,
1873     GstVaapiPictureH264 *picture,
1874     GstH264SliceHdr     *slice_hdr
1875 )
1876 {
1877     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1878     GstH264SPS * const sps = get_sps(decoder);
1879
1880     switch (sps->pic_order_cnt_type) {
1881     case 0:
1882         init_picture_poc_0(decoder, picture, slice_hdr);
1883         break;
1884     case 1:
1885         init_picture_poc_1(decoder, picture, slice_hdr);
1886         break;
1887     case 2:
1888         init_picture_poc_2(decoder, picture, slice_hdr);
1889         break;
1890     }
1891
1892     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1893         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1894     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1895         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1896     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1897 }
1898
1899 static int
1900 compare_picture_pic_num_dec(const void *a, const void *b)
1901 {
1902     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1903     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1904
1905     return picB->pic_num - picA->pic_num;
1906 }
1907
1908 static int
1909 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1910 {
1911     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1912     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1913
1914     return picA->long_term_pic_num - picB->long_term_pic_num;
1915 }
1916
1917 static int
1918 compare_picture_poc_dec(const void *a, const void *b)
1919 {
1920     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1921     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1922
1923     return picB->base.poc - picA->base.poc;
1924 }
1925
1926 static int
1927 compare_picture_poc_inc(const void *a, const void *b)
1928 {
1929     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1930     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1931
1932     return picA->base.poc - picB->base.poc;
1933 }
1934
1935 static int
1936 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1937 {
1938     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1939     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1940
1941     return picB->frame_num_wrap - picA->frame_num_wrap;
1942 }
1943
1944 static int
1945 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1946 {
1947     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1948     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1949
1950     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1951 }
1952
1953 /* 8.2.4.1 - Decoding process for picture numbers */
1954 static void
1955 init_picture_refs_pic_num(
1956     GstVaapiDecoderH264 *decoder,
1957     GstVaapiPictureH264 *picture,
1958     GstH264SliceHdr     *slice_hdr
1959 )
1960 {
1961     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1962     GstH264SPS * const sps = get_sps(decoder);
1963     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1964     guint i;
1965
1966     GST_DEBUG("decode picture numbers");
1967
1968     for (i = 0; i < priv->short_ref_count; i++) {
1969         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1970
1971         // (H.8.2)
1972         if (pic->base.view_id != picture->base.view_id)
1973             continue;
1974
1975         // (8-27)
1976         if (pic->frame_num > priv->frame_num)
1977             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1978         else
1979             pic->frame_num_wrap = pic->frame_num;
1980
1981         // (8-28, 8-30, 8-31)
1982         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1983             pic->pic_num = pic->frame_num_wrap;
1984         else {
1985             if (pic->structure == picture->structure)
1986                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1987             else
1988                 pic->pic_num = 2 * pic->frame_num_wrap;
1989         }
1990     }
1991
1992     for (i = 0; i < priv->long_ref_count; i++) {
1993         GstVaapiPictureH264 * const pic = priv->long_ref[i];
1994
1995         // (H.8.2)
1996         if (pic->base.view_id != picture->base.view_id)
1997             continue;
1998
1999         // (8-29, 8-32, 8-33)
2000         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2001             pic->long_term_pic_num = pic->long_term_frame_idx;
2002         else {
2003             if (pic->structure == picture->structure)
2004                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2005             else
2006                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2007         }
2008     }
2009 }
2010
2011 #define SORT_REF_LIST(list, n, compare_func) \
2012     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2013
2014 static void
2015 init_picture_refs_fields_1(
2016     guint                picture_structure,
2017     GstVaapiPictureH264 *RefPicList[32],
2018     guint               *RefPicList_count,
2019     GstVaapiPictureH264 *ref_list[32],
2020     guint                ref_list_count
2021 )
2022 {
2023     guint i, j, n;
2024
2025     i = 0;
2026     j = 0;
2027     n = *RefPicList_count;
2028     do {
2029         g_assert(n < 32);
2030         for (; i < ref_list_count; i++) {
2031             if (ref_list[i]->structure == picture_structure) {
2032                 RefPicList[n++] = ref_list[i++];
2033                 break;
2034             }
2035         }
2036         for (; j < ref_list_count; j++) {
2037             if (ref_list[j]->structure != picture_structure) {
2038                 RefPicList[n++] = ref_list[j++];
2039                 break;
2040             }
2041         }
2042     } while (i < ref_list_count || j < ref_list_count);
2043     *RefPicList_count = n;
2044 }
2045
2046 static inline void
2047 init_picture_refs_fields(
2048     GstVaapiPictureH264 *picture,
2049     GstVaapiPictureH264 *RefPicList[32],
2050     guint               *RefPicList_count,
2051     GstVaapiPictureH264 *short_ref[32],
2052     guint                short_ref_count,
2053     GstVaapiPictureH264 *long_ref[32],
2054     guint                long_ref_count
2055 )
2056 {
2057     guint n = 0;
2058
2059     /* 8.2.4.2.5 - reference picture lists in fields */
2060     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2061         short_ref, short_ref_count);
2062     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2063         long_ref, long_ref_count);
2064     *RefPicList_count = n;
2065 }
2066
2067 /* Finds the inter-view reference picture with the supplied view id */
2068 static GstVaapiPictureH264 *
2069 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2070 {
2071     GPtrArray * const inter_views = decoder->priv.inter_views;
2072     guint i;
2073
2074     for (i = 0; i < inter_views->len; i++) {
2075         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2076         if (picture->base.view_id == view_id)
2077             return picture;
2078     }
2079
2080     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2081         view_id);
2082     return NULL;
2083 }
2084
2085 /* Checks whether the view id exists in the supplied list of view ids */
2086 static gboolean
2087 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2088 {
2089     guint i;
2090
2091     for (i = 0; i < num_view_ids; i++) {
2092         if (view_ids[i] == view_id)
2093             return TRUE;
2094     }
2095     return FALSE;
2096 }
2097
2098 static gboolean
2099 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2100     gboolean is_anchor)
2101 {
2102     if (is_anchor)
2103         return (find_view_id(view_id, view->anchor_ref_l0,
2104                     view->num_anchor_refs_l0) ||
2105                 find_view_id(view_id, view->anchor_ref_l1,
2106                     view->num_anchor_refs_l1));
2107
2108     return (find_view_id(view_id, view->non_anchor_ref_l0,
2109                 view->num_non_anchor_refs_l0) ||
2110             find_view_id(view_id, view->non_anchor_ref_l1,
2111                 view->num_non_anchor_refs_l1));
2112 }
2113
2114 /* Checks whether the inter-view reference picture with the supplied
2115    view id is used for decoding the current view component picture */
2116 static gboolean
2117 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2118     guint16 view_id, GstVaapiPictureH264 *picture)
2119 {
2120     const GstH264SPS * const sps = get_sps(decoder);
2121     gboolean is_anchor;
2122
2123     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2124         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2125         return FALSE;
2126
2127     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2128     return find_view_id_in_view(view_id,
2129         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2130 }
2131
2132 /* Checks whether the supplied inter-view reference picture is used
2133    for decoding the next view component pictures */
2134 static gboolean
2135 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2136     GstVaapiPictureH264 *picture)
2137 {
2138     const GstH264SPS * const sps = get_sps(decoder);
2139     gboolean is_anchor;
2140     guint i, num_views;
2141
2142     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2143         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2144         return FALSE;
2145
2146     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2147     num_views = sps->extension.mvc.num_views_minus1 + 1;
2148     for (i = picture->base.voc + 1; i < num_views; i++) {
2149         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2150         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2151             return TRUE;
2152     }
2153     return FALSE;
2154 }
2155
2156 /* H.8.2.1 - Initialization process for inter-view prediction references */
2157 static void
2158 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2159     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2160     const guint16 *view_ids, guint num_view_ids)
2161 {
2162     guint j, n;
2163
2164     n = *ref_list_count_ptr;
2165     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2166         GstVaapiPictureH264 * const pic =
2167             find_inter_view_reference(decoder, view_ids[j]);
2168         if (pic)
2169             ref_list[n++] = pic;
2170     }
2171     *ref_list_count_ptr = n;
2172 }
2173
2174 static inline void
2175 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2176     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2177 {
2178     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2179     const GstH264SPS * const sps = get_sps(decoder);
2180     const GstH264SPSExtMVCView *view;
2181
2182     GST_DEBUG("initialize reference picture list for inter-view prediction");
2183
2184     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2185         return;
2186     view = &sps->extension.mvc.view[picture->base.voc];
2187
2188 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2189         init_picture_refs_mvc_1(decoder,                                \
2190             priv->RefPicList##ref_list,                                 \
2191             &priv->RefPicList##ref_list##_count,                        \
2192             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2193             view->view_list##_l##ref_list,                              \
2194             view->num_##view_list##s_l##ref_list);                      \
2195     } while (0)
2196
2197     if (list == 0) {
2198         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2199             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2200         else
2201             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2202     }
2203     else {
2204         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2205             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2206         else
2207             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2208     }
2209
2210 #undef INVOKE_INIT_PICTURE_REFS_MVC
2211 }
2212
2213 static void
2214 init_picture_refs_p_slice(
2215     GstVaapiDecoderH264 *decoder,
2216     GstVaapiPictureH264 *picture,
2217     GstH264SliceHdr     *slice_hdr
2218 )
2219 {
2220     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2221     GstVaapiPictureH264 **ref_list;
2222     guint i;
2223
2224     GST_DEBUG("decode reference picture list for P and SP slices");
2225
2226     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2227         /* 8.2.4.2.1 - P and SP slices in frames */
2228         if (priv->short_ref_count > 0) {
2229             ref_list = priv->RefPicList0;
2230             for (i = 0; i < priv->short_ref_count; i++)
2231                 ref_list[i] = priv->short_ref[i];
2232             SORT_REF_LIST(ref_list, i, pic_num_dec);
2233             priv->RefPicList0_count += i;
2234         }
2235
2236         if (priv->long_ref_count > 0) {
2237             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2238             for (i = 0; i < priv->long_ref_count; i++)
2239                 ref_list[i] = priv->long_ref[i];
2240             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2241             priv->RefPicList0_count += i;
2242         }
2243     }
2244     else {
2245         /* 8.2.4.2.2 - P and SP slices in fields */
2246         GstVaapiPictureH264 *short_ref[32];
2247         guint short_ref_count = 0;
2248         GstVaapiPictureH264 *long_ref[32];
2249         guint long_ref_count = 0;
2250
2251         if (priv->short_ref_count > 0) {
2252             for (i = 0; i < priv->short_ref_count; i++)
2253                 short_ref[i] = priv->short_ref[i];
2254             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2255             short_ref_count = i;
2256         }
2257
2258         if (priv->long_ref_count > 0) {
2259             for (i = 0; i < priv->long_ref_count; i++)
2260                 long_ref[i] = priv->long_ref[i];
2261             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2262             long_ref_count = i;
2263         }
2264
2265         init_picture_refs_fields(
2266             picture,
2267             priv->RefPicList0, &priv->RefPicList0_count,
2268             short_ref,          short_ref_count,
2269             long_ref,           long_ref_count
2270         );
2271     }
2272
2273     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2274         /* RefPicList0 */
2275         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2276     }
2277 }
2278
2279 static void
2280 init_picture_refs_b_slice(
2281     GstVaapiDecoderH264 *decoder,
2282     GstVaapiPictureH264 *picture,
2283     GstH264SliceHdr     *slice_hdr
2284 )
2285 {
2286     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2287     GstVaapiPictureH264 **ref_list;
2288     guint i, n;
2289
2290     GST_DEBUG("decode reference picture list for B slices");
2291
2292     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2293         /* 8.2.4.2.3 - B slices in frames */
2294
2295         /* RefPicList0 */
2296         if (priv->short_ref_count > 0) {
2297             // 1. Short-term references
2298             ref_list = priv->RefPicList0;
2299             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2300                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2301                     ref_list[n++] = priv->short_ref[i];
2302             }
2303             SORT_REF_LIST(ref_list, n, poc_dec);
2304             priv->RefPicList0_count += n;
2305
2306             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2307             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2308                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2309                     ref_list[n++] = priv->short_ref[i];
2310             }
2311             SORT_REF_LIST(ref_list, n, poc_inc);
2312             priv->RefPicList0_count += n;
2313         }
2314
2315         if (priv->long_ref_count > 0) {
2316             // 2. Long-term references
2317             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2318             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2319                 ref_list[n++] = priv->long_ref[i];
2320             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2321             priv->RefPicList0_count += n;
2322         }
2323
2324         /* RefPicList1 */
2325         if (priv->short_ref_count > 0) {
2326             // 1. Short-term references
2327             ref_list = priv->RefPicList1;
2328             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2329                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2330                     ref_list[n++] = priv->short_ref[i];
2331             }
2332             SORT_REF_LIST(ref_list, n, poc_inc);
2333             priv->RefPicList1_count += n;
2334
2335             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2336             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2337                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2338                     ref_list[n++] = priv->short_ref[i];
2339             }
2340             SORT_REF_LIST(ref_list, n, poc_dec);
2341             priv->RefPicList1_count += n;
2342         }
2343
2344         if (priv->long_ref_count > 0) {
2345             // 2. Long-term references
2346             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2347             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2348                 ref_list[n++] = priv->long_ref[i];
2349             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2350             priv->RefPicList1_count += n;
2351         }
2352     }
2353     else {
2354         /* 8.2.4.2.4 - B slices in fields */
2355         GstVaapiPictureH264 *short_ref0[32];
2356         guint short_ref0_count = 0;
2357         GstVaapiPictureH264 *short_ref1[32];
2358         guint short_ref1_count = 0;
2359         GstVaapiPictureH264 *long_ref[32];
2360         guint long_ref_count = 0;
2361
2362         /* refFrameList0ShortTerm */
2363         if (priv->short_ref_count > 0) {
2364             ref_list = short_ref0;
2365             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2366                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2367                     ref_list[n++] = priv->short_ref[i];
2368             }
2369             SORT_REF_LIST(ref_list, n, poc_dec);
2370             short_ref0_count += n;
2371
2372             ref_list = &short_ref0[short_ref0_count];
2373             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2374                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2375                     ref_list[n++] = priv->short_ref[i];
2376             }
2377             SORT_REF_LIST(ref_list, n, poc_inc);
2378             short_ref0_count += n;
2379         }
2380
2381         /* refFrameList1ShortTerm */
2382         if (priv->short_ref_count > 0) {
2383             ref_list = short_ref1;
2384             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2385                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2386                     ref_list[n++] = priv->short_ref[i];
2387             }
2388             SORT_REF_LIST(ref_list, n, poc_inc);
2389             short_ref1_count += n;
2390
2391             ref_list = &short_ref1[short_ref1_count];
2392             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2393                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2394                     ref_list[n++] = priv->short_ref[i];
2395             }
2396             SORT_REF_LIST(ref_list, n, poc_dec);
2397             short_ref1_count += n;
2398         }
2399
2400         /* refFrameListLongTerm */
2401         if (priv->long_ref_count > 0) {
2402             for (i = 0; i < priv->long_ref_count; i++)
2403                 long_ref[i] = priv->long_ref[i];
2404             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2405             long_ref_count = i;
2406         }
2407
2408         init_picture_refs_fields(
2409             picture,
2410             priv->RefPicList0, &priv->RefPicList0_count,
2411             short_ref0,         short_ref0_count,
2412             long_ref,           long_ref_count
2413         );
2414
2415         init_picture_refs_fields(
2416             picture,
2417             priv->RefPicList1, &priv->RefPicList1_count,
2418             short_ref1,         short_ref1_count,
2419             long_ref,           long_ref_count
2420         );
2421    }
2422
2423     /* Check whether RefPicList1 is identical to RefPicList0, then
2424        swap if necessary */
2425     if (priv->RefPicList1_count > 1 &&
2426         priv->RefPicList1_count == priv->RefPicList0_count &&
2427         memcmp(priv->RefPicList0, priv->RefPicList1,
2428                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2429         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2430         priv->RefPicList1[0] = priv->RefPicList1[1];
2431         priv->RefPicList1[1] = tmp;
2432     }
2433
2434     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2435         /* RefPicList0 */
2436         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2437
2438         /* RefPicList1 */
2439         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2440     }
2441 }
2442
2443 #undef SORT_REF_LIST
2444
2445 static gint
2446 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2447 {
2448     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2449     guint i;
2450
2451     for (i = 0; i < priv->short_ref_count; i++) {
2452         if (priv->short_ref[i]->pic_num == pic_num)
2453             return i;
2454     }
2455     GST_ERROR("found no short-term reference picture with PicNum = %d",
2456               pic_num);
2457     return -1;
2458 }
2459
2460 static gint
2461 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2462 {
2463     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2464     guint i;
2465
2466     for (i = 0; i < priv->long_ref_count; i++) {
2467         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2468             return i;
2469     }
2470     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2471               long_term_pic_num);
2472     return -1;
2473 }
2474
2475 static void
2476 exec_picture_refs_modification_1(
2477     GstVaapiDecoderH264           *decoder,
2478     GstVaapiPictureH264           *picture,
2479     GstH264SliceHdr               *slice_hdr,
2480     guint                          list
2481 )
2482 {
2483     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2484     GstH264SPS * const sps = get_sps(decoder);
2485     GstH264RefPicListModification *ref_pic_list_modification;
2486     guint num_ref_pic_list_modifications;
2487     GstVaapiPictureH264 **ref_list;
2488     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2489     const guint16 *view_ids = NULL;
2490     guint i, j, n, num_refs, num_view_ids = 0;
2491     gint found_ref_idx;
2492     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2493
2494     GST_DEBUG("modification process of reference picture list %u", list);
2495
2496     if (list == 0) {
2497         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2498         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2499         ref_list                       = priv->RefPicList0;
2500         ref_list_count_ptr             = &priv->RefPicList0_count;
2501         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2502
2503         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2504             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2505             const GstH264SPSExtMVCView * const view =
2506                 &sps->extension.mvc.view[picture->base.voc];
2507             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2508                 view_ids = view->anchor_ref_l0;
2509                 num_view_ids = view->num_anchor_refs_l0;
2510             }
2511             else {
2512                 view_ids = view->non_anchor_ref_l0;
2513                 num_view_ids = view->num_non_anchor_refs_l0;
2514             }
2515         }
2516     }
2517     else {
2518         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2519         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2520         ref_list                       = priv->RefPicList1;
2521         ref_list_count_ptr             = &priv->RefPicList1_count;
2522         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2523
2524         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2525             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2526             const GstH264SPSExtMVCView * const view =
2527                 &sps->extension.mvc.view[picture->base.voc];
2528             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2529                 view_ids = view->anchor_ref_l1;
2530                 num_view_ids = view->num_anchor_refs_l1;
2531             }
2532             else {
2533                 view_ids = view->non_anchor_ref_l1;
2534                 num_view_ids = view->num_non_anchor_refs_l1;
2535             }
2536         }
2537     }
2538     ref_list_count = *ref_list_count_ptr;
2539
2540     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2541         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2542         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2543     }
2544     else {
2545         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2546         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2547     }
2548
2549     picNumPred = CurrPicNum;
2550     picViewIdxPred = -1;
2551
2552     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2553         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2554         if (l->modification_of_pic_nums_idc == 3)
2555             break;
2556
2557         /* 8.2.4.3.1 - Short-term reference pictures */
2558         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2559             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2560             gint32 picNum, picNumNoWrap;
2561
2562             // (8-34)
2563             if (l->modification_of_pic_nums_idc == 0) {
2564                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2565                 if (picNumNoWrap < 0)
2566                     picNumNoWrap += MaxPicNum;
2567             }
2568
2569             // (8-35)
2570             else {
2571                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2572                 if (picNumNoWrap >= MaxPicNum)
2573                     picNumNoWrap -= MaxPicNum;
2574             }
2575             picNumPred = picNumNoWrap;
2576
2577             // (8-36)
2578             picNum = picNumNoWrap;
2579             if (picNum > CurrPicNum)
2580                 picNum -= MaxPicNum;
2581
2582             // (8-37)
2583             for (j = num_refs; j > ref_list_idx; j--)
2584                 ref_list[j] = ref_list[j - 1];
2585             found_ref_idx = find_short_term_reference(decoder, picNum);
2586             ref_list[ref_list_idx++] =
2587                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2588             n = ref_list_idx;
2589             for (j = ref_list_idx; j <= num_refs; j++) {
2590                 gint32 PicNumF;
2591                 if (!ref_list[j])
2592                     continue;
2593                 PicNumF =
2594                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2595                     ref_list[j]->pic_num : MaxPicNum;
2596                 if (PicNumF != picNum ||
2597                     ref_list[j]->base.view_id != picture->base.view_id)
2598                     ref_list[n++] = ref_list[j];
2599             }
2600         }
2601
2602         /* 8.2.4.3.2 - Long-term reference pictures */
2603         else if (l->modification_of_pic_nums_idc == 2) {
2604
2605             for (j = num_refs; j > ref_list_idx; j--)
2606                 ref_list[j] = ref_list[j - 1];
2607             found_ref_idx =
2608                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2609             ref_list[ref_list_idx++] =
2610                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2611             n = ref_list_idx;
2612             for (j = ref_list_idx; j <= num_refs; j++) {
2613                 gint32 LongTermPicNumF;
2614                 if (!ref_list[j])
2615                     continue;
2616                 LongTermPicNumF =
2617                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2618                     ref_list[j]->long_term_pic_num : INT_MAX;
2619                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2620                     ref_list[j]->base.view_id != picture->base.view_id)
2621                     ref_list[n++] = ref_list[j];
2622             }
2623         }
2624
2625         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2626         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2627                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2628                  (l->modification_of_pic_nums_idc == 4 ||
2629                   l->modification_of_pic_nums_idc == 5)) {
2630             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2631             gint32 picViewIdx, targetViewId;
2632
2633             // (H-6)
2634             if (l->modification_of_pic_nums_idc == 4) {
2635                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2636                 if (picViewIdx < 0)
2637                     picViewIdx += num_view_ids;
2638             }
2639
2640             // (H-7)
2641             else {
2642                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2643                 if (picViewIdx >= num_view_ids)
2644                     picViewIdx -= num_view_ids;
2645             }
2646             picViewIdxPred = picViewIdx;
2647
2648             // (H-8, H-9)
2649             targetViewId = view_ids[picViewIdx];
2650
2651             // (H-10)
2652             for (j = num_refs; j > ref_list_idx; j--)
2653                 ref_list[j] = ref_list[j - 1];
2654             ref_list[ref_list_idx++] =
2655                 find_inter_view_reference(decoder, targetViewId);
2656             n = ref_list_idx;
2657             for (j = ref_list_idx; j <= num_refs; j++) {
2658                 if (!ref_list[j])
2659                     continue;
2660                 if (ref_list[j]->base.view_id != targetViewId ||
2661                     ref_list[j]->base.poc != picture->base.poc)
2662                     ref_list[n++] = ref_list[j];
2663             }
2664         }
2665     }
2666
2667 #if DEBUG
2668     for (i = 0; i < num_refs; i++)
2669         if (!ref_list[i])
2670             GST_ERROR("list %u entry %u is empty", list, i);
2671 #endif
2672     *ref_list_count_ptr = num_refs;
2673 }
2674
2675 /* 8.2.4.3 - Modification process for reference picture lists */
2676 static void
2677 exec_picture_refs_modification(
2678     GstVaapiDecoderH264 *decoder,
2679     GstVaapiPictureH264 *picture,
2680     GstH264SliceHdr     *slice_hdr
2681 )
2682 {
2683     GST_DEBUG("execute ref_pic_list_modification()");
2684
2685     /* RefPicList0 */
2686     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2687         slice_hdr->ref_pic_list_modification_flag_l0)
2688         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2689
2690     /* RefPicList1 */
2691     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2692         slice_hdr->ref_pic_list_modification_flag_l1)
2693         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2694 }
2695
2696 static void
2697 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2698     GstVaapiPictureH264 *picture)
2699 {
2700     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2701     guint i, j, short_ref_count, long_ref_count;
2702
2703     short_ref_count = 0;
2704     long_ref_count  = 0;
2705     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2706         for (i = 0; i < priv->dpb_count; i++) {
2707             GstVaapiFrameStore * const fs = priv->dpb[i];
2708             GstVaapiPictureH264 *pic;
2709             if (!gst_vaapi_frame_store_has_frame(fs))
2710                 continue;
2711             pic = fs->buffers[0];
2712             if (pic->base.view_id != picture->base.view_id)
2713                 continue;
2714             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2715                 priv->short_ref[short_ref_count++] = pic;
2716             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2717                 priv->long_ref[long_ref_count++] = pic;
2718             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2719             pic->other_field = fs->buffers[1];
2720         }
2721     }
2722     else {
2723         for (i = 0; i < priv->dpb_count; i++) {
2724             GstVaapiFrameStore * const fs = priv->dpb[i];
2725             for (j = 0; j < fs->num_buffers; j++) {
2726                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2727                 if (pic->base.view_id != picture->base.view_id)
2728                     continue;
2729                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2730                     priv->short_ref[short_ref_count++] = pic;
2731                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2732                     priv->long_ref[long_ref_count++] = pic;
2733                 pic->structure = pic->base.structure;
2734                 pic->other_field = fs->buffers[j ^ 1];
2735             }
2736         }
2737     }
2738
2739     for (i = short_ref_count; i < priv->short_ref_count; i++)
2740         priv->short_ref[i] = NULL;
2741     priv->short_ref_count = short_ref_count;
2742
2743     for (i = long_ref_count; i < priv->long_ref_count; i++)
2744         priv->long_ref[i] = NULL;
2745     priv->long_ref_count = long_ref_count;
2746 }
2747
2748 static void
2749 init_picture_refs(
2750     GstVaapiDecoderH264 *decoder,
2751     GstVaapiPictureH264 *picture,
2752     GstH264SliceHdr     *slice_hdr
2753 )
2754 {
2755     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2756     guint i, num_refs;
2757
2758     init_picture_ref_lists(decoder, picture);
2759     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2760
2761     priv->RefPicList0_count = 0;
2762     priv->RefPicList1_count = 0;
2763
2764     switch (slice_hdr->type % 5) {
2765     case GST_H264_P_SLICE:
2766     case GST_H264_SP_SLICE:
2767         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2768         break;
2769     case GST_H264_B_SLICE:
2770         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2771         break;
2772     default:
2773         break;
2774     }
2775
2776     exec_picture_refs_modification(decoder, picture, slice_hdr);
2777
2778     switch (slice_hdr->type % 5) {
2779     case GST_H264_B_SLICE:
2780         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2781         for (i = priv->RefPicList1_count; i < num_refs; i++)
2782             priv->RefPicList1[i] = NULL;
2783         priv->RefPicList1_count = num_refs;
2784
2785         // fall-through
2786     case GST_H264_P_SLICE:
2787     case GST_H264_SP_SLICE:
2788         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2789         for (i = priv->RefPicList0_count; i < num_refs; i++)
2790             priv->RefPicList0[i] = NULL;
2791         priv->RefPicList0_count = num_refs;
2792         break;
2793     default:
2794         break;
2795     }
2796 }
2797
2798 static gboolean
2799 init_picture(
2800     GstVaapiDecoderH264 *decoder,
2801     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2802 {
2803     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2804     GstVaapiPicture * const base_picture = &picture->base;
2805     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2806
2807     priv->prev_frame_num        = priv->frame_num;
2808     priv->frame_num             = slice_hdr->frame_num;
2809     picture->frame_num          = priv->frame_num;
2810     picture->frame_num_wrap     = priv->frame_num;
2811     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2812     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2813     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2814     base_picture->view_id       = pi->view_id;
2815     base_picture->voc           = pi->voc;
2816
2817     /* Initialize extensions */
2818     switch (pi->nalu.extension_type) {
2819     case GST_H264_NAL_EXTENSION_MVC: {
2820         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2821
2822         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2823         if (mvc->inter_view_flag)
2824             GST_VAAPI_PICTURE_FLAG_SET(picture,
2825                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2826         if (mvc->anchor_pic_flag)
2827             GST_VAAPI_PICTURE_FLAG_SET(picture,
2828                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2829         break;
2830     }
2831     }
2832
2833     /* Reset decoder state for IDR pictures */
2834     if (pi->nalu.idr_pic_flag) {
2835         GST_DEBUG("<IDR>");
2836         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2837         dpb_flush(decoder, picture);
2838     }
2839
2840     /* Initialize picture structure */
2841     if (!slice_hdr->field_pic_flag)
2842         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2843     else {
2844         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2845         if (!slice_hdr->bottom_field_flag)
2846             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2847         else
2848             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2849     }
2850     picture->structure = base_picture->structure;
2851
2852     /* Initialize reference flags */
2853     if (pi->nalu.ref_idc) {
2854         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2855             &slice_hdr->dec_ref_pic_marking;
2856
2857         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2858             dec_ref_pic_marking->long_term_reference_flag)
2859             GST_VAAPI_PICTURE_FLAG_SET(picture,
2860                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2861         else
2862             GST_VAAPI_PICTURE_FLAG_SET(picture,
2863                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2864     }
2865
2866     init_picture_poc(decoder, picture, slice_hdr);
2867     return TRUE;
2868 }
2869
2870 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2871 static gboolean
2872 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2873 {
2874     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2875     GstH264SPS * const sps = get_sps(decoder);
2876     GstVaapiPictureH264 *ref_picture;
2877     guint i, m, max_num_ref_frames;
2878
2879     GST_DEBUG("reference picture marking process (sliding window)");
2880
2881     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2882         return TRUE;
2883
2884     max_num_ref_frames = sps->num_ref_frames;
2885     if (max_num_ref_frames == 0)
2886         max_num_ref_frames = 1;
2887     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2888         max_num_ref_frames <<= 1;
2889
2890     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2891         return TRUE;
2892     if (priv->short_ref_count < 1)
2893         return FALSE;
2894
2895     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2896         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2897         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2898             m = i;
2899     }
2900
2901     ref_picture = priv->short_ref[m];
2902     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2903     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2904
2905     /* Both fields need to be marked as "unused for reference", so
2906        remove the other field from the short_ref[] list as well */
2907     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2908         for (i = 0; i < priv->short_ref_count; i++) {
2909             if (priv->short_ref[i] == ref_picture->other_field) {
2910                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2911                 break;
2912             }
2913         }
2914     }
2915     return TRUE;
2916 }
2917
2918 static inline gint32
2919 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2920 {
2921     gint32 pic_num;
2922
2923     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2924         pic_num = picture->frame_num_wrap;
2925     else
2926         pic_num = 2 * picture->frame_num_wrap + 1;
2927     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2928     return pic_num;
2929 }
2930
2931 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2932 static void
2933 exec_ref_pic_marking_adaptive_mmco_1(
2934     GstVaapiDecoderH264  *decoder,
2935     GstVaapiPictureH264  *picture,
2936     GstH264RefPicMarking *ref_pic_marking
2937 )
2938 {
2939     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2940     gint32 i, picNumX;
2941
2942     picNumX = get_picNumX(picture, ref_pic_marking);
2943     i = find_short_term_reference(decoder, picNumX);
2944     if (i < 0)
2945         return;
2946
2947     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2948         GST_VAAPI_PICTURE_IS_FRAME(picture));
2949     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2950 }
2951
2952 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2953 static void
2954 exec_ref_pic_marking_adaptive_mmco_2(
2955     GstVaapiDecoderH264  *decoder,
2956     GstVaapiPictureH264  *picture,
2957     GstH264RefPicMarking *ref_pic_marking
2958 )
2959 {
2960     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2961     gint32 i;
2962
2963     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2964     if (i < 0)
2965         return;
2966
2967     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2968         GST_VAAPI_PICTURE_IS_FRAME(picture));
2969     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2970 }
2971
2972 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2973 static void
2974 exec_ref_pic_marking_adaptive_mmco_3(
2975     GstVaapiDecoderH264  *decoder,
2976     GstVaapiPictureH264  *picture,
2977     GstH264RefPicMarking *ref_pic_marking
2978 )
2979 {
2980     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2981     GstVaapiPictureH264 *ref_picture, *other_field;
2982     gint32 i, picNumX;
2983
2984     for (i = 0; i < priv->long_ref_count; i++) {
2985         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2986             break;
2987     }
2988     if (i != priv->long_ref_count) {
2989         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
2990         ARRAY_REMOVE_INDEX(priv->long_ref, i);
2991     }
2992
2993     picNumX = get_picNumX(picture, ref_pic_marking);
2994     i = find_short_term_reference(decoder, picNumX);
2995     if (i < 0)
2996         return;
2997
2998     ref_picture = priv->short_ref[i];
2999     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3000     priv->long_ref[priv->long_ref_count++] = ref_picture;
3001
3002     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3003     gst_vaapi_picture_h264_set_reference(ref_picture,
3004         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3005         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3006
3007     /* Assign LongTermFrameIdx to the other field if it was also
3008        marked as "used for long-term reference */
3009     other_field = ref_picture->other_field;
3010     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3011         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3012 }
3013
3014 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3015  * as "unused for reference" */
3016 static void
3017 exec_ref_pic_marking_adaptive_mmco_4(
3018     GstVaapiDecoderH264  *decoder,
3019     GstVaapiPictureH264  *picture,
3020     GstH264RefPicMarking *ref_pic_marking
3021 )
3022 {
3023     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3024     gint32 i, long_term_frame_idx;
3025
3026     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3027
3028     for (i = 0; i < priv->long_ref_count; i++) {
3029         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3030             continue;
3031         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3032         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3033         i--;
3034     }
3035 }
3036
3037 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3038 static void
3039 exec_ref_pic_marking_adaptive_mmco_5(
3040     GstVaapiDecoderH264  *decoder,
3041     GstVaapiPictureH264  *picture,
3042     GstH264RefPicMarking *ref_pic_marking
3043 )
3044 {
3045     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3046
3047     dpb_flush(decoder, picture);
3048
3049     priv->prev_pic_has_mmco5 = TRUE;
3050
3051     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3052     priv->frame_num = 0;
3053     priv->frame_num_offset = 0;
3054     picture->frame_num = 0;
3055
3056     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3057     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3058         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3059     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3060         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3061     picture->base.poc = 0;
3062 }
3063
3064 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3065 static void
3066 exec_ref_pic_marking_adaptive_mmco_6(
3067     GstVaapiDecoderH264  *decoder,
3068     GstVaapiPictureH264  *picture,
3069     GstH264RefPicMarking *ref_pic_marking
3070 )
3071 {
3072     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3073     GstVaapiPictureH264 *other_field;
3074     guint i;
3075
3076     for (i = 0; i < priv->long_ref_count; i++) {
3077         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3078             break;
3079     }
3080     if (i != priv->long_ref_count) {
3081         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3082         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3083     }
3084
3085     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3086     gst_vaapi_picture_h264_set_reference(picture,
3087         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3088         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3089
3090     /* Assign LongTermFrameIdx to the other field if it was also
3091        marked as "used for long-term reference */
3092     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3093     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3094         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3095 }
3096
3097 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3098 static gboolean
3099 exec_ref_pic_marking_adaptive(
3100     GstVaapiDecoderH264     *decoder,
3101     GstVaapiPictureH264     *picture,
3102     GstH264DecRefPicMarking *dec_ref_pic_marking
3103 )
3104 {
3105     guint i;
3106
3107     GST_DEBUG("reference picture marking process (adaptive memory control)");
3108
3109     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3110         GstVaapiDecoderH264  *decoder,
3111         GstVaapiPictureH264  *picture,
3112         GstH264RefPicMarking *ref_pic_marking
3113     );
3114
3115     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3116         NULL,
3117         exec_ref_pic_marking_adaptive_mmco_1,
3118         exec_ref_pic_marking_adaptive_mmco_2,
3119         exec_ref_pic_marking_adaptive_mmco_3,
3120         exec_ref_pic_marking_adaptive_mmco_4,
3121         exec_ref_pic_marking_adaptive_mmco_5,
3122         exec_ref_pic_marking_adaptive_mmco_6,
3123     };
3124
3125     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3126         GstH264RefPicMarking * const ref_pic_marking =
3127             &dec_ref_pic_marking->ref_pic_marking[i];
3128
3129         const guint mmco = ref_pic_marking->memory_management_control_operation;
3130         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3131             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3132         else {
3133             GST_ERROR("unhandled MMCO %u", mmco);
3134             return FALSE;
3135         }
3136     }
3137     return TRUE;
3138 }
3139
3140 /* 8.2.5 - Execute reference picture marking process */
3141 static gboolean
3142 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3143 {
3144     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3145
3146     priv->prev_pic_has_mmco5 = FALSE;
3147     priv->prev_pic_structure = picture->structure;
3148
3149     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3150         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3151
3152     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3153         return TRUE;
3154
3155     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3156         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3157             &picture->last_slice_hdr->dec_ref_pic_marking;
3158         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3159             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3160                 return FALSE;
3161         }
3162         else {
3163             if (!exec_ref_pic_marking_sliding_window(decoder))
3164                 return FALSE;
3165         }
3166     }
3167     return TRUE;
3168 }
3169
3170 static void
3171 vaapi_init_picture(VAPictureH264 *pic)
3172 {
3173     pic->picture_id           = VA_INVALID_ID;
3174     pic->frame_idx            = 0;
3175     pic->flags                = VA_PICTURE_H264_INVALID;
3176     pic->TopFieldOrderCnt     = 0;
3177     pic->BottomFieldOrderCnt  = 0;
3178 }
3179
3180 static void
3181 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3182     guint picture_structure)
3183 {
3184     if (!picture_structure)
3185         picture_structure = picture->structure;
3186
3187     pic->picture_id = picture->base.surface_id;
3188     pic->flags = 0;
3189
3190     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3191         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3192         pic->frame_idx = picture->long_term_frame_idx;
3193     }
3194     else {
3195         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3196             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3197         pic->frame_idx = picture->frame_num;
3198     }
3199
3200     switch (picture_structure) {
3201     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3202         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3203         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3204         break;
3205     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3206         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3207         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3208         pic->BottomFieldOrderCnt = 0;
3209         break;
3210     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3211         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3212         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3213         pic->TopFieldOrderCnt = 0;
3214         break;
3215     }
3216 }
3217
3218 static void
3219 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3220     GstVaapiPictureH264 *picture)
3221 {
3222     vaapi_fill_picture(pic, picture, 0);
3223
3224     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3225     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3226         /* The inter-view reference components and inter-view only
3227            reference components that are included in the reference
3228            picture lists are considered as not being marked as "used for
3229            short-term reference" or "used for long-term reference" */
3230         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3231                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3232     }
3233 }
3234
3235 static gboolean
3236 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3237 {
3238     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3239     GstVaapiPicture * const base_picture = &picture->base;
3240     GstH264PPS * const pps = get_pps(decoder);
3241     GstH264SPS * const sps = get_sps(decoder);
3242     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3243     guint i, n;
3244
3245     /* Fill in VAPictureParameterBufferH264 */
3246     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3247
3248     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3249         GstVaapiFrameStore * const fs = priv->dpb[i];
3250         if ((gst_vaapi_frame_store_has_reference(fs) &&
3251              fs->view_id == picture->base.view_id) ||
3252             (gst_vaapi_frame_store_has_inter_view(fs) &&
3253              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3254             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3255                 fs->buffers[0], fs->structure);
3256         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3257             break;
3258     }
3259     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3260         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3261
3262 #define COPY_FIELD(s, f) \
3263     pic_param->f = (s)->f
3264
3265 #define COPY_BFM(a, s, f) \
3266     pic_param->a.bits.f = (s)->f
3267
3268     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3269     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3270     pic_param->frame_num                    = priv->frame_num;
3271
3272     COPY_FIELD(sps, bit_depth_luma_minus8);
3273     COPY_FIELD(sps, bit_depth_chroma_minus8);
3274     COPY_FIELD(sps, num_ref_frames);
3275     COPY_FIELD(pps, num_slice_groups_minus1);
3276     COPY_FIELD(pps, slice_group_map_type);
3277     COPY_FIELD(pps, slice_group_change_rate_minus1);
3278     COPY_FIELD(pps, pic_init_qp_minus26);
3279     COPY_FIELD(pps, pic_init_qs_minus26);
3280     COPY_FIELD(pps, chroma_qp_index_offset);
3281     COPY_FIELD(pps, second_chroma_qp_index_offset);
3282
3283     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3284     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3285     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3286
3287     COPY_BFM(seq_fields, sps, chroma_format_idc);
3288     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3289     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3290     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3291     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3292     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3293     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3294     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3295     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3296
3297     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3298     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3299     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3300
3301     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3302     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3303     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3304     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3305     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3306     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3307     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3308     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3309     return TRUE;
3310 }
3311
3312 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3313 static gboolean
3314 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3315 {
3316     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3317     GstH264PPS * const pps = slice_hdr->pps;
3318     GstH264SPS * const sps = pps->sequence;
3319     GstH264SliceHdr *prev_slice_hdr;
3320
3321     if (!prev_pi)
3322         return TRUE;
3323     prev_slice_hdr = &prev_pi->data.slice_hdr;
3324
3325 #define CHECK_EXPR(expr, field_name) do {              \
3326         if (!(expr)) {                                 \
3327             GST_DEBUG(field_name " differs in value"); \
3328             return TRUE;                               \
3329         }                                              \
3330     } while (0)
3331
3332 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3333     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3334
3335     /* view_id differs in value and VOIdx of current slice_hdr is less
3336        than the VOIdx of the prev_slice_hdr */
3337     CHECK_VALUE(pi, prev_pi, view_id);
3338
3339     /* frame_num differs in value, regardless of inferred values to 0 */
3340     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3341
3342     /* pic_parameter_set_id differs in value */
3343     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3344
3345     /* field_pic_flag differs in value */
3346     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3347
3348     /* bottom_field_flag is present in both and differs in value */
3349     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3350         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3351
3352     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3353     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3354                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3355
3356     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3357        value or delta_pic_order_cnt_bottom differs in value */
3358     if (sps->pic_order_cnt_type == 0) {
3359         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3360         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3361             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3362     }
3363
3364     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3365        differs in value or delta_pic_order_cnt[1] differs in value */
3366     else if (sps->pic_order_cnt_type == 1) {
3367         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3368         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3369     }
3370
3371     /* IdrPicFlag differs in value */
3372     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3373
3374     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3375     if (pi->nalu.idr_pic_flag)
3376         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3377
3378 #undef CHECK_EXPR
3379 #undef CHECK_VALUE
3380     return FALSE;
3381 }
3382
3383 /* Detection of a new access unit, assuming we are already in presence
3384    of a new picture */
3385 static inline gboolean
3386 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3387 {
3388     if (!prev_pi || prev_pi->view_id == pi->view_id)
3389         return TRUE;
3390     return pi->voc < prev_pi->voc;
3391 }
3392
3393 /* Finds the first field picture corresponding to the supplied picture */
3394 static GstVaapiPictureH264 *
3395 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3396 {
3397     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3398     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3399     GstVaapiFrameStore *fs;
3400
3401     if (!slice_hdr->field_pic_flag)
3402         return NULL;
3403
3404     fs = priv->prev_frames[pi->voc];
3405     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3406         return NULL;
3407
3408     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3409         return fs->buffers[0];
3410     return NULL;
3411 }
3412
3413 static GstVaapiDecoderStatus
3414 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3415 {
3416     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3417     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3418     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3419     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3420     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3421     GstVaapiPictureH264 *picture, *first_field;
3422     GstVaapiDecoderStatus status;
3423
3424     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3425     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3426
3427     /* Only decode base stream for MVC */
3428     switch (sps->profile_idc) {
3429     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3430     case GST_H264_PROFILE_STEREO_HIGH:
3431         if (0) {
3432             GST_DEBUG("drop picture from substream");
3433             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3434         }
3435         break;
3436     }
3437
3438     status = ensure_context(decoder, sps);
3439     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3440         return status;
3441
3442     priv->decoder_state = 0;
3443
3444     first_field = find_first_field(decoder, pi);
3445     if (first_field) {
3446         /* Re-use current picture where the first field was decoded */
3447         picture = gst_vaapi_picture_h264_new_field(first_field);
3448         if (!picture) {
3449             GST_ERROR("failed to allocate field picture");
3450             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3451         }
3452     }
3453     else {
3454         /* Create new picture */
3455         picture = gst_vaapi_picture_h264_new(decoder);
3456         if (!picture) {
3457             GST_ERROR("failed to allocate picture");
3458             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3459         }
3460     }
3461     gst_vaapi_picture_replace(&priv->current_picture, picture);
3462     gst_vaapi_picture_unref(picture);
3463
3464     /* Clear inter-view references list if this is the primary coded
3465        picture of the current access unit */
3466     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3467         g_ptr_array_set_size(priv->inter_views, 0);
3468
3469     /* Update cropping rectangle */
3470     if (sps->frame_cropping_flag) {
3471         GstVaapiRectangle crop_rect;
3472         crop_rect.x = sps->crop_rect_x;
3473         crop_rect.y = sps->crop_rect_y;
3474         crop_rect.width = sps->crop_rect_width;
3475         crop_rect.height = sps->crop_rect_height;
3476         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3477     }
3478
3479     status = ensure_quant_matrix(decoder, picture);
3480     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3481         GST_ERROR("failed to reset quantizer matrix");
3482         return status;
3483     }
3484
3485     if (!init_picture(decoder, picture, pi))
3486         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3487     if (!fill_picture(decoder, picture))
3488         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3489
3490     priv->decoder_state = pi->state;
3491     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3492 }
3493
3494 static inline guint
3495 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3496 {
3497     guint epb_count;
3498
3499     epb_count = slice_hdr->n_emulation_prevention_bytes;
3500     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3501 }
3502
3503 static gboolean
3504 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3505     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3506 {
3507     VASliceParameterBufferH264 * const slice_param = slice->param;
3508     GstH264PPS * const pps = get_pps(decoder);
3509     GstH264SPS * const sps = get_sps(decoder);
3510     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3511     guint num_weight_tables = 0;
3512     gint i, j;
3513
3514     if (pps->weighted_pred_flag &&
3515         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3516         num_weight_tables = 1;
3517     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3518         num_weight_tables = 2;
3519     else
3520         num_weight_tables = 0;
3521
3522     slice_param->luma_log2_weight_denom   = 0;
3523     slice_param->chroma_log2_weight_denom = 0;
3524     slice_param->luma_weight_l0_flag      = 0;
3525     slice_param->chroma_weight_l0_flag    = 0;
3526     slice_param->luma_weight_l1_flag      = 0;
3527     slice_param->chroma_weight_l1_flag    = 0;
3528
3529     if (num_weight_tables < 1)
3530         return TRUE;
3531
3532     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3533     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3534
3535     slice_param->luma_weight_l0_flag = 1;
3536     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3537         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3538         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3539     }
3540
3541     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3542     if (slice_param->chroma_weight_l0_flag) {
3543         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3544             for (j = 0; j < 2; j++) {
3545                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3546                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3547             }
3548         }
3549     }
3550
3551     if (num_weight_tables < 2)
3552         return TRUE;
3553
3554     slice_param->luma_weight_l1_flag = 1;
3555     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3556         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3557         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3558     }
3559
3560     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3561     if (slice_param->chroma_weight_l1_flag) {
3562         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3563             for (j = 0; j < 2; j++) {
3564                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3565                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3566             }
3567         }
3568     }
3569     return TRUE;
3570 }
3571
3572 static gboolean
3573 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3574     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3575 {
3576     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3577     VASliceParameterBufferH264 * const slice_param = slice->param;
3578     guint i, num_ref_lists = 0;
3579
3580     slice_param->num_ref_idx_l0_active_minus1 = 0;
3581     slice_param->num_ref_idx_l1_active_minus1 = 0;
3582
3583     if (GST_H264_IS_B_SLICE(slice_hdr))
3584         num_ref_lists = 2;
3585     else if (GST_H264_IS_I_SLICE(slice_hdr))
3586         num_ref_lists = 0;
3587     else
3588         num_ref_lists = 1;
3589
3590     if (num_ref_lists < 1)
3591         return TRUE;
3592
3593     slice_param->num_ref_idx_l0_active_minus1 =
3594         slice_hdr->num_ref_idx_l0_active_minus1;
3595
3596     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3597         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3598             priv->RefPicList0[i]);
3599     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3600         vaapi_init_picture(&slice_param->RefPicList0[i]);
3601
3602     if (num_ref_lists < 2)
3603         return TRUE;
3604
3605     slice_param->num_ref_idx_l1_active_minus1 =
3606         slice_hdr->num_ref_idx_l1_active_minus1;
3607
3608     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3609         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3610             priv->RefPicList1[i]);
3611     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3612         vaapi_init_picture(&slice_param->RefPicList1[i]);
3613     return TRUE;
3614 }
3615
3616 static gboolean
3617 fill_slice(GstVaapiDecoderH264 *decoder,
3618     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3619 {
3620     VASliceParameterBufferH264 * const slice_param = slice->param;
3621     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3622
3623     /* Fill in VASliceParameterBufferH264 */
3624     slice_param->slice_data_bit_offset =
3625         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3626     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3627     slice_param->slice_type                     = slice_hdr->type % 5;
3628     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3629     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3630     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3631     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3632     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3633     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3634
3635     if (!fill_RefPicList(decoder, slice, slice_hdr))
3636         return FALSE;
3637     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3638         return FALSE;
3639     return TRUE;
3640 }
3641
3642 static GstVaapiDecoderStatus
3643 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3644 {
3645     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3646     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3647     GstVaapiPictureH264 * const picture = priv->current_picture;
3648     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3649     GstVaapiSlice *slice;
3650     GstBuffer * const buffer =
3651         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3652     GstMapInfo map_info;
3653
3654     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3655
3656     if (!is_valid_state(pi->state,
3657             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3658         GST_WARNING("failed to receive enough headers to decode slice");
3659         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3660     }
3661
3662     if (!ensure_pps(decoder, slice_hdr->pps)) {
3663         GST_ERROR("failed to activate PPS");
3664         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3665     }
3666
3667     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3668         GST_ERROR("failed to activate SPS");
3669         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3670     }
3671
3672     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3673         GST_ERROR("failed to map buffer");
3674         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3675     }
3676
3677     /* Check wether this is the first/last slice in the current access unit */
3678     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3679         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3680     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3681         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3682
3683     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3684         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3685     gst_buffer_unmap(buffer, &map_info);
3686     if (!slice) {
3687         GST_ERROR("failed to allocate slice");
3688         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3689     }
3690
3691     init_picture_refs(decoder, picture, slice_hdr);
3692     if (!fill_slice(decoder, slice, pi)) {
3693         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3694         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3695     }
3696
3697     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3698     picture->last_slice_hdr = slice_hdr;
3699     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3700     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3701 }
3702
3703 static inline gint
3704 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3705 {
3706     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3707                                                      0xffffff00, 0x00000100,
3708                                                      ofs, size,
3709                                                      scp);
3710 }
3711
3712 static GstVaapiDecoderStatus
3713 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3714 {
3715     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3716     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3717     GstVaapiDecoderStatus status;
3718
3719     priv->decoder_state |= pi->state;
3720     switch (pi->nalu.type) {
3721     case GST_H264_NAL_SPS:
3722         status = decode_sps(decoder, unit);
3723         break;
3724     case GST_H264_NAL_SUBSET_SPS:
3725         status = decode_subset_sps(decoder, unit);
3726         break;
3727     case GST_H264_NAL_PPS:
3728         status = decode_pps(decoder, unit);
3729         break;
3730     case GST_H264_NAL_SLICE_EXT:
3731     case GST_H264_NAL_SLICE_IDR:
3732         /* fall-through. IDR specifics are handled in init_picture() */
3733     case GST_H264_NAL_SLICE:
3734         status = decode_slice(decoder, unit);
3735         break;
3736     case GST_H264_NAL_SEQ_END:
3737     case GST_H264_NAL_STREAM_END:
3738         status = decode_sequence_end(decoder);
3739         break;
3740     case GST_H264_NAL_SEI:
3741         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3742         break;
3743     default:
3744         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3745         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3746         break;
3747     }
3748     return status;
3749 }
3750
3751 static GstVaapiDecoderStatus
3752 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3753     const guchar *buf, guint buf_size)
3754 {
3755     GstVaapiDecoderH264 * const decoder =
3756         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3757     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3758     GstVaapiDecoderStatus status;
3759     GstVaapiDecoderUnit unit;
3760     GstVaapiParserInfoH264 *pi = NULL;
3761     GstH264ParserResult result;
3762     guint i, ofs, num_sps, num_pps;
3763
3764     unit.parsed_info = NULL;
3765
3766     if (buf_size < 8)
3767         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3768
3769     if (buf[0] != 1) {
3770         GST_ERROR("failed to decode codec-data, not in avcC format");
3771         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3772     }
3773
3774     priv->nal_length_size = (buf[4] & 0x03) + 1;
3775
3776     num_sps = buf[5] & 0x1f;
3777     ofs = 6;
3778
3779     for (i = 0; i < num_sps; i++) {
3780         pi = gst_vaapi_parser_info_h264_new();
3781         if (!pi)
3782             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3783         unit.parsed_info = pi;
3784
3785         result = gst_h264_parser_identify_nalu_avc(
3786             priv->parser,
3787             buf, ofs, buf_size, 2,
3788             &pi->nalu
3789         );
3790         if (result != GST_H264_PARSER_OK) {
3791             status = get_status(result);
3792             goto cleanup;
3793         }
3794
3795         status = parse_sps(decoder, &unit);
3796         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3797             goto cleanup;
3798         ofs = pi->nalu.offset + pi->nalu.size;
3799
3800         status = decode_sps(decoder, &unit);
3801         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3802             goto cleanup;
3803         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3804     }
3805
3806     num_pps = buf[ofs];
3807     ofs++;
3808
3809     for (i = 0; i < num_pps; i++) {
3810         pi = gst_vaapi_parser_info_h264_new();
3811         if (!pi)
3812             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3813         unit.parsed_info = pi;
3814
3815         result = gst_h264_parser_identify_nalu_avc(
3816             priv->parser,
3817             buf, ofs, buf_size, 2,
3818             &pi->nalu
3819         );
3820         if (result != GST_H264_PARSER_OK) {
3821             status = get_status(result);
3822             goto cleanup;
3823         }
3824
3825         status = parse_pps(decoder, &unit);
3826         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3827             goto cleanup;
3828         ofs = pi->nalu.offset + pi->nalu.size;
3829
3830         status = decode_pps(decoder, &unit);
3831         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3832             goto cleanup;
3833         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3834     }
3835
3836     priv->is_avcC = TRUE;
3837     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3838
3839 cleanup:
3840     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3841     return status;
3842 }
3843
3844 static GstVaapiDecoderStatus
3845 ensure_decoder(GstVaapiDecoderH264 *decoder)
3846 {
3847     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3848     GstVaapiDecoderStatus status;
3849
3850     if (!priv->is_opened) {
3851         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3852         if (!priv->is_opened)
3853             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3854
3855         status = gst_vaapi_decoder_decode_codec_data(
3856             GST_VAAPI_DECODER_CAST(decoder));
3857         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3858             return status;
3859     }
3860     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3861 }
3862
3863 static GstVaapiDecoderStatus
3864 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3865     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3866 {
3867     GstVaapiDecoderH264 * const decoder =
3868         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3869     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3870     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3871     GstVaapiParserInfoH264 *pi;
3872     GstVaapiDecoderStatus status;
3873     GstH264ParserResult result;
3874     guchar *buf;
3875     guint i, size, buf_size, nalu_size, flags;
3876     guint32 start_code;
3877     gint ofs, ofs2;
3878
3879     status = ensure_decoder(decoder);
3880     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3881         return status;
3882
3883     size = gst_adapter_available(adapter);
3884
3885     if (priv->is_avcC) {
3886         if (size < priv->nal_length_size)
3887             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3888
3889         buf = (guchar *)&start_code;
3890         g_assert(priv->nal_length_size <= sizeof(start_code));
3891         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3892
3893         nalu_size = 0;
3894         for (i = 0; i < priv->nal_length_size; i++)
3895             nalu_size = (nalu_size << 8) | buf[i];
3896
3897         buf_size = priv->nal_length_size + nalu_size;
3898         if (size < buf_size)
3899             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3900     }
3901     else {
3902         if (size < 4)
3903             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3904
3905         ofs = scan_for_start_code(adapter, 0, size, NULL);
3906         if (ofs < 0)
3907             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3908
3909         if (ofs > 0) {
3910             gst_adapter_flush(adapter, ofs);
3911             size -= ofs;
3912         }
3913
3914         ofs2 = ps->input_offset2 - ofs - 4;
3915         if (ofs2 < 4)
3916             ofs2 = 4;
3917
3918         ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3919             scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3920         if (ofs < 0) {
3921             // Assume the whole NAL unit is present if end-of-stream
3922             if (!at_eos) {
3923                 ps->input_offset2 = size;
3924                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3925             }
3926             ofs = size;
3927         }
3928         buf_size = ofs;
3929     }
3930     ps->input_offset2 = 0;
3931
3932     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3933     if (!buf)
3934         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3935
3936     unit->size = buf_size;
3937
3938     pi = gst_vaapi_parser_info_h264_new();
3939     if (!pi)
3940         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3941
3942     gst_vaapi_decoder_unit_set_parsed_info(unit,
3943         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3944
3945     if (priv->is_avcC)
3946         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3947             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3948     else
3949         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3950             buf, 0, buf_size, &pi->nalu);
3951     status = get_status(result);
3952     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3953         return status;
3954
3955     switch (pi->nalu.type) {
3956     case GST_H264_NAL_SPS:
3957         status = parse_sps(decoder, unit);
3958         break;
3959     case GST_H264_NAL_SUBSET_SPS:
3960         status = parse_subset_sps(decoder, unit);
3961         break;
3962     case GST_H264_NAL_PPS:
3963         status = parse_pps(decoder, unit);
3964         break;
3965     case GST_H264_NAL_SEI:
3966         status = parse_sei(decoder, unit);
3967         break;
3968     case GST_H264_NAL_SLICE_EXT:
3969         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
3970             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3971             break;
3972         }
3973         /* fall-through */
3974     case GST_H264_NAL_SLICE_IDR:
3975     case GST_H264_NAL_SLICE:
3976         status = parse_slice(decoder, unit);
3977         break;
3978     default:
3979         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3980         break;
3981     }
3982     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3983         return status;
3984
3985     flags = 0;
3986     switch (pi->nalu.type) {
3987     case GST_H264_NAL_AU_DELIMITER:
3988         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
3989         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
3990         /* fall-through */
3991     case GST_H264_NAL_FILLER_DATA:
3992         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
3993         break;
3994     case GST_H264_NAL_STREAM_END:
3995         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
3996         /* fall-through */
3997     case GST_H264_NAL_SEQ_END:
3998         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
3999         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4000         break;
4001     case GST_H264_NAL_SPS:
4002     case GST_H264_NAL_SUBSET_SPS:
4003     case GST_H264_NAL_PPS:
4004     case GST_H264_NAL_SEI:
4005         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4006         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4007         break;
4008     case GST_H264_NAL_SLICE_EXT:
4009         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4010             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4011             break;
4012         }
4013         /* fall-through */
4014     case GST_H264_NAL_SLICE_IDR:
4015     case GST_H264_NAL_SLICE:
4016         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4017         if (is_new_picture(pi, priv->prev_slice_pi)) {
4018             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4019             if (is_new_access_unit(pi, priv->prev_slice_pi))
4020                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4021         }
4022         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4023         break;
4024     case GST_H264_NAL_SPS_EXT:
4025     case GST_H264_NAL_SLICE_AUX:
4026         /* skip SPS extension and auxiliary slice for now */
4027         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4028         break;
4029     case GST_H264_NAL_PREFIX_UNIT:
4030         /* skip Prefix NAL units for now */
4031         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4032             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4033             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4034         break;
4035     default:
4036         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4037             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4038                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4039         break;
4040     }
4041     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4042         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4043     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4044
4045     pi->nalu.data = NULL;
4046     pi->state = priv->parser_state;
4047     pi->flags = flags;
4048     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4049     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4050 }
4051
4052 static GstVaapiDecoderStatus
4053 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4054     GstVaapiDecoderUnit *unit)
4055 {
4056     GstVaapiDecoderH264 * const decoder =
4057         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4058     GstVaapiDecoderStatus status;
4059
4060     status = ensure_decoder(decoder);
4061     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4062         return status;
4063     return decode_unit(decoder, unit);
4064 }
4065
4066 static GstVaapiDecoderStatus
4067 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4068     GstVaapiDecoderUnit *unit)
4069 {
4070     GstVaapiDecoderH264 * const decoder =
4071         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4072
4073     return decode_picture(decoder, unit);
4074 }
4075
4076 static GstVaapiDecoderStatus
4077 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4078 {
4079     GstVaapiDecoderH264 * const decoder =
4080         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4081
4082     return decode_current_picture(decoder);
4083 }
4084
4085 static GstVaapiDecoderStatus
4086 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4087 {
4088     GstVaapiDecoderH264 * const decoder =
4089         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4090
4091     dpb_flush(decoder, NULL);
4092     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4093 }
4094
4095 static void
4096 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4097 {
4098     GstVaapiMiniObjectClass * const object_class =
4099         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4100     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4101
4102     object_class->size          = sizeof(GstVaapiDecoderH264);
4103     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4104
4105     decoder_class->create       = gst_vaapi_decoder_h264_create;
4106     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4107     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4108     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4109     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4110     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4111     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4112
4113     decoder_class->decode_codec_data =
4114         gst_vaapi_decoder_h264_decode_codec_data;
4115 }
4116
4117 static inline const GstVaapiDecoderClass *
4118 gst_vaapi_decoder_h264_class(void)
4119 {
4120     static GstVaapiDecoderH264Class g_class;
4121     static gsize g_class_init = FALSE;
4122
4123     if (g_once_init_enter(&g_class_init)) {
4124         gst_vaapi_decoder_h264_class_init(&g_class);
4125         g_once_init_leave(&g_class_init, TRUE);
4126     }
4127     return GST_VAAPI_DECODER_CLASS(&g_class);
4128 }
4129
4130 /**
4131  * gst_vaapi_decoder_h264_new:
4132  * @display: a #GstVaapiDisplay
4133  * @caps: a #GstCaps holding codec information
4134  *
4135  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4136  * hold extra information like codec-data and pictured coded size.
4137  *
4138  * Return value: the newly allocated #GstVaapiDecoder object
4139  */
4140 GstVaapiDecoder *
4141 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4142 {
4143     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4144 }