ignore bitparser error
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_PPS:
108         gst_h264_pps_clear(&pi->data.pps);
109         break;
110     case GST_H264_NAL_SEI:
111         if (pi->data.sei) {
112             g_array_unref(pi->data.sei);
113             pi->data.sei = NULL;
114         }
115         break;
116     }
117 }
118
119 static inline const GstVaapiMiniObjectClass *
120 gst_vaapi_parser_info_h264_class(void)
121 {
122     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
123         .size = sizeof(GstVaapiParserInfoH264),
124         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
125     };
126     return &GstVaapiParserInfoH264Class;
127 }
128
129 static inline GstVaapiParserInfoH264 *
130 gst_vaapi_parser_info_h264_new(void)
131 {
132     return (GstVaapiParserInfoH264 *)
133         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
134 }
135
136 #define gst_vaapi_parser_info_h264_ref(pi) \
137     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_unref(pi) \
140     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
141
142 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
143     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
144         (GstVaapiMiniObject *)(new_pi))
145
146 /* ------------------------------------------------------------------------- */
147 /* --- H.264 Pictures                                                    --- */
148 /* ------------------------------------------------------------------------- */
149
150 /*
151  * Extended picture flags:
152  *
153  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
154  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
155  *   may be used for inter-view prediction
156  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
157  *   i.e. a picture that is decoded with only inter-view prediction,
158  *   and not inter prediction
159  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
160  *   access unit (AU)
161  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
162  *   access unit (AU)
163  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
164  *     "used for short-term reference"
165  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
166  *     "used for long-term reference"
167  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
168  *     reference picture (short-term reference or long-term reference)
169  */
170 enum {
171     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
172     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
173     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
174     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
175     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
176     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
177
178     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
179         GST_VAAPI_PICTURE_FLAG_REFERENCE),
180     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
181         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
182     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
183         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
184         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
185 };
186
187 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
188     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
189
190 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
191     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
192       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
193      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
194
195 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
196     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
197       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
198      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
199
200 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
202
203 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
204     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
205
206 #define GST_VAAPI_PICTURE_H264(picture) \
207     ((GstVaapiPictureH264 *)(picture))
208
209 struct _GstVaapiPictureH264 {
210     GstVaapiPicture             base;
211     GstH264SliceHdr            *last_slice_hdr;
212     guint                       structure;
213     gint32                      field_poc[2];
214     gint32                      frame_num;              // Original frame_num from slice_header()
215     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
216     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
217     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
218     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
219     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
220     guint                       output_flag             : 1;
221     guint                       output_needed           : 1;
222 };
223
224 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
225
226 void
227 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
228 {
229     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
230 }
231
232 gboolean
233 gst_vaapi_picture_h264_create(
234     GstVaapiPictureH264                      *picture,
235     const GstVaapiCodecObjectConstructorArgs *args
236 )
237 {
238     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
239         return FALSE;
240
241     picture->field_poc[0]       = G_MAXINT32;
242     picture->field_poc[1]       = G_MAXINT32;
243     picture->output_needed      = FALSE;
244     return TRUE;
245 }
246
247 static inline GstVaapiPictureH264 *
248 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
249 {
250     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
251         &GstVaapiPictureH264Class,
252         GST_VAAPI_CODEC_BASE(decoder),
253         NULL, sizeof(VAPictureParameterBufferH264),
254         NULL, 0,
255         0);
256 }
257
258 static inline void
259 gst_vaapi_picture_h264_set_reference(
260     GstVaapiPictureH264 *picture,
261     guint                reference_flags,
262     gboolean             other_field
263 )
264 {
265     if (!picture)
266         return;
267     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
268     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
269
270     if (!other_field || !(picture = picture->other_field))
271         return;
272     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
273     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
274 }
275
276 static inline GstVaapiPictureH264 *
277 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
278 {
279     g_return_val_if_fail(picture, NULL);
280
281     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
282 }
283
284 /* ------------------------------------------------------------------------- */
285 /* --- Frame Buffers (DPB)                                               --- */
286 /* ------------------------------------------------------------------------- */
287
288 struct _GstVaapiFrameStore {
289     /*< private >*/
290     GstVaapiMiniObject          parent_instance;
291
292     guint                       view_id;
293     guint                       structure;
294     GstVaapiPictureH264        *buffers[2];
295     guint                       num_buffers;
296     guint                       output_needed;
297 };
298
299 static void
300 gst_vaapi_frame_store_finalize(gpointer object)
301 {
302     GstVaapiFrameStore * const fs = object;
303     guint i;
304
305     for (i = 0; i < fs->num_buffers; i++)
306         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
307 }
308
309 static GstVaapiFrameStore *
310 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
311 {
312     GstVaapiFrameStore *fs;
313
314     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
315         sizeof(GstVaapiFrameStore),
316         gst_vaapi_frame_store_finalize
317     };
318
319     fs = (GstVaapiFrameStore *)
320         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
321     if (!fs)
322         return NULL;
323
324     fs->view_id         = picture->base.view_id;
325     fs->structure       = picture->structure;
326     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
327     fs->buffers[1]      = NULL;
328     fs->num_buffers     = 1;
329     fs->output_needed   = 0;
330
331     if (picture->output_flag) {
332         picture->output_needed = TRUE;
333         fs->output_needed++;
334     }
335     return fs;
336 }
337
338 static gboolean
339 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
340 {
341     guint field;
342
343     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
344     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
345     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
346
347     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
348     if (picture->output_flag) {
349         picture->output_needed = TRUE;
350         fs->output_needed++;
351     }
352
353     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
354
355     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
356         TOP_FIELD : BOTTOM_FIELD;
357     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
358     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
359     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
360     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
361     return TRUE;
362 }
363
364 static gboolean
365 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
366 {
367     GstVaapiPictureH264 * const first_field = fs->buffers[0];
368     GstVaapiPictureH264 *second_field;
369
370     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
371
372     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
373     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
374
375     second_field = gst_vaapi_picture_h264_new_field(first_field);
376     if (!second_field)
377         return FALSE;
378     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
379     gst_vaapi_picture_unref(second_field);
380
381     second_field->frame_num    = first_field->frame_num;
382     second_field->field_poc[0] = first_field->field_poc[0];
383     second_field->field_poc[1] = first_field->field_poc[1];
384     second_field->output_flag  = first_field->output_flag;
385     if (second_field->output_flag) {
386         second_field->output_needed = TRUE;
387         fs->output_needed++;
388     }
389     return TRUE;
390 }
391
392 static inline gboolean
393 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
394 {
395     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
396 }
397
398 static inline gboolean
399 gst_vaapi_frame_store_is_complete(GstVaapiFrameStore *fs)
400 {
401     return gst_vaapi_frame_store_has_frame(fs) ||
402         GST_VAAPI_PICTURE_IS_ONEFIELD(fs->buffers[0]);
403 }
404
405 static inline gboolean
406 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
407 {
408     guint i;
409
410     for (i = 0; i < fs->num_buffers; i++) {
411         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
412             return TRUE;
413     }
414     return FALSE;
415 }
416
417 static gboolean
418 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
419 {
420     guint i;
421
422     for (i = 0; i < fs->num_buffers; i++) {
423         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
424             return TRUE;
425     }
426     return FALSE;
427 }
428
429 #define gst_vaapi_frame_store_ref(fs) \
430     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
431
432 #define gst_vaapi_frame_store_unref(fs) \
433     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
434
435 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
436     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
437         (GstVaapiMiniObject *)(new_fs))
438
439 /* ------------------------------------------------------------------------- */
440 /* --- H.264 Decoder                                                     --- */
441 /* ------------------------------------------------------------------------- */
442
443 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
444     ((GstVaapiDecoderH264 *)(decoder))
445
446 typedef enum {
447     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
448     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
449     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
450
451     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
452         GST_H264_VIDEO_STATE_GOT_SPS |
453         GST_H264_VIDEO_STATE_GOT_PPS),
454     GST_H264_VIDEO_STATE_VALID_PICTURE = (
455         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
456         GST_H264_VIDEO_STATE_GOT_SLICE)
457 } GstH264VideoState;
458
459 struct _GstVaapiDecoderH264Private {
460     GstH264NalParser           *parser;
461     guint                       parser_state;
462     guint                       decoder_state;
463     GstVaapiStreamAlignH264     stream_alignment;
464     GstVaapiPictureH264        *current_picture;
465     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
466     GstVaapiParserInfoH264     *active_sps;
467     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
468     GstVaapiParserInfoH264     *active_pps;
469     GstVaapiParserInfoH264     *prev_pi;
470     GstVaapiParserInfoH264     *prev_slice_pi;
471     GstVaapiFrameStore        **prev_frames;
472     guint                       prev_frames_alloc;
473     GstVaapiFrameStore        **dpb;
474     guint                       dpb_count;
475     guint                       dpb_size;
476     guint                       dpb_size_max;
477     guint                       max_views;
478     GstVaapiProfile             profile;
479     GstVaapiEntrypoint          entrypoint;
480     GstVaapiChromaType          chroma_type;
481     GPtrArray                  *inter_views;
482     GstVaapiPictureH264        *short_ref[32];
483     guint                       short_ref_count;
484     GstVaapiPictureH264        *long_ref[32];
485     guint                       long_ref_count;
486     GstVaapiPictureH264        *RefPicList0[32];
487     guint                       RefPicList0_count;
488     GstVaapiPictureH264        *RefPicList1[32];
489     guint                       RefPicList1_count;
490     guint                       nal_length_size;
491     guint                       mb_width;
492     guint                       mb_height;
493     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
494     gint32                      poc_msb;                // PicOrderCntMsb
495     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
496     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
497     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
498     gint32                      frame_num_offset;       // FrameNumOffset
499     gint32                      frame_num;              // frame_num (from slice_header())
500     gint32                      prev_frame_num;         // prevFrameNum
501     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
502     gboolean                    prev_pic_structure;     // previous picture structure
503     guint                       is_opened               : 1;
504     guint                       is_avcC                 : 1;
505     guint                       has_context             : 1;
506     guint                       progressive_sequence    : 1;
507 };
508
509 /**
510  * GstVaapiDecoderH264:
511  *
512  * A decoder based on H264.
513  */
514 struct _GstVaapiDecoderH264 {
515     /*< private >*/
516     GstVaapiDecoder             parent_instance;
517     GstVaapiDecoderH264Private  priv;
518 };
519
520 /**
521  * GstVaapiDecoderH264Class:
522  *
523  * A decoder class based on H264.
524  */
525 struct _GstVaapiDecoderH264Class {
526     /*< private >*/
527     GstVaapiDecoderClass parent_class;
528 };
529
530 static gboolean
531 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
532
533 static gboolean
534 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
535     GstVaapiPictureH264 *picture);
536
537 static inline gboolean
538 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
539     GstVaapiFrameStore *fs)
540 {
541     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
542 }
543
544 /* Determines if the supplied profile is one of the MVC set */
545 static gboolean
546 is_mvc_profile(GstH264Profile profile)
547 {
548     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
549         profile == GST_H264_PROFILE_STEREO_HIGH;
550 }
551
552 /* Determines the view_id from the supplied NAL unit */
553 static inline guint
554 get_view_id(GstH264NalUnit *nalu)
555 {
556     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
557 }
558
559 /* Determines the view order index (VOIdx) from the supplied view_id */
560 static gint
561 get_view_order_index(GstH264SPS *sps, guint16 view_id)
562 {
563     GstH264SPSExtMVC *mvc;
564     gint i;
565
566     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
567         return 0;
568
569     mvc = &sps->extension.mvc;
570     for (i = 0; i <= mvc->num_views_minus1; i++) {
571         if (mvc->view[i].view_id == view_id)
572             return i;
573     }
574     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
575     return -1;
576 }
577
578 /* Determines NumViews */
579 static guint
580 get_num_views(GstH264SPS *sps)
581 {
582     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
583         sps->extension.mvc.num_views_minus1 : 0);
584 }
585
586 /* Get number of reference frames to use */
587 static guint
588 get_max_dec_frame_buffering(GstH264SPS *sps)
589 {
590     guint num_views, max_dpb_frames;
591     guint max_dec_frame_buffering, PicSizeMbs;
592     GstVaapiLevelH264 level;
593     const GstVaapiH264LevelLimits *level_limits;
594
595     /* Table A-1 - Level limits */
596     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
597         level = GST_VAAPI_LEVEL_H264_L1b;
598     else
599         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
600     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
601     if (G_UNLIKELY(!level_limits)) {
602         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
603         max_dec_frame_buffering = 16;
604     }
605     else {
606         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
607                       (sps->pic_height_in_map_units_minus1 + 1) *
608                       (sps->frame_mbs_only_flag ? 1 : 2));
609         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
610     }
611     if (is_mvc_profile(sps->profile_idc))
612         max_dec_frame_buffering <<= 1;
613
614     /* VUI parameters */
615     if (sps->vui_parameters_present_flag) {
616         GstH264VUIParams * const vui_params = &sps->vui_parameters;
617         if (vui_params->bitstream_restriction_flag)
618             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
619         else {
620             switch (sps->profile_idc) {
621             case 44:  // CAVLC 4:4:4 Intra profile
622             case GST_H264_PROFILE_SCALABLE_HIGH:
623             case GST_H264_PROFILE_HIGH:
624             case GST_H264_PROFILE_HIGH10:
625             case GST_H264_PROFILE_HIGH_422:
626             case GST_H264_PROFILE_HIGH_444:
627                 if (sps->constraint_set3_flag)
628                     max_dec_frame_buffering = 0;
629                 break;
630             }
631         }
632     }
633
634     num_views = get_num_views(sps);
635     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
636     if (max_dec_frame_buffering > max_dpb_frames)
637         max_dec_frame_buffering = max_dpb_frames;
638     else if (max_dec_frame_buffering < sps->num_ref_frames)
639         max_dec_frame_buffering = sps->num_ref_frames;
640     return MAX(1, max_dec_frame_buffering);
641 }
642
643 static void
644 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
645 {
646     gpointer * const entries = array;
647     guint num_entries = *array_length_ptr;
648
649     g_return_if_fail(index < num_entries);
650
651     if (index != --num_entries)
652         entries[index] = entries[num_entries];
653     entries[num_entries] = NULL;
654     *array_length_ptr = num_entries;
655 }
656
657 #if 1
658 static inline void
659 array_remove_index(void *array, guint *array_length_ptr, guint index)
660 {
661     array_remove_index_fast(array, array_length_ptr, index);
662 }
663 #else
664 static void
665 array_remove_index(void *array, guint *array_length_ptr, guint index)
666 {
667     gpointer * const entries = array;
668     const guint num_entries = *array_length_ptr - 1;
669     guint i;
670
671     g_return_if_fail(index <= num_entries);
672
673     for (i = index; i < num_entries; i++)
674         entries[i] = entries[i + 1];
675     entries[num_entries] = NULL;
676     *array_length_ptr = num_entries;
677 }
678 #endif
679
680 #define ARRAY_REMOVE_INDEX(array, index) \
681     array_remove_index(array, &array##_count, index)
682
683 static void
684 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
685 {
686     GstVaapiDecoderH264Private * const priv = &decoder->priv;
687     guint i, num_frames = --priv->dpb_count;
688
689     if (USE_STRICT_DPB_ORDERING) {
690         for (i = index; i < num_frames; i++)
691             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
692     }
693     else if (index != num_frames)
694         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
695     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
696 }
697
698 static gboolean
699 dpb_output(GstVaapiDecoderH264 *decoder, GstVaapiFrameStore *fs)
700 {
701     GstVaapiPictureH264 *picture;
702
703     g_return_val_if_fail(fs != NULL, FALSE);
704
705     if (!gst_vaapi_frame_store_is_complete(fs))
706         return TRUE;
707
708     picture = fs->buffers[0];
709     g_return_val_if_fail(picture != NULL, FALSE);
710     picture->output_needed = FALSE;
711
712     if (fs->num_buffers > 1) {
713         picture = fs->buffers[1];
714         g_return_val_if_fail(picture != NULL, FALSE);
715         picture->output_needed = FALSE;
716     }
717
718     fs->output_needed = 0;
719     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
720 }
721
722 static inline void
723 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
724 {
725     GstVaapiDecoderH264Private * const priv = &decoder->priv;
726     GstVaapiFrameStore * const fs = priv->dpb[i];
727
728     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
729         dpb_remove_index(decoder, i);
730 }
731
732 /* Finds the frame store holding the supplied picture */
733 static gint
734 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
735 {
736     GstVaapiDecoderH264Private * const priv = &decoder->priv;
737     gint i, j;
738
739     for (i = 0; i < priv->dpb_count; i++) {
740         GstVaapiFrameStore * const fs = priv->dpb[i];
741         for (j = 0; j < fs->num_buffers; j++) {
742             if (fs->buffers[j] == picture)
743                 return i;
744         }
745     }
746     return -1;
747 }
748
749 /* Finds the picture with the lowest POC that needs to be output */
750 static gint
751 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
752     GstVaapiPictureH264 **found_picture_ptr)
753 {
754     GstVaapiDecoderH264Private * const priv = &decoder->priv;
755     GstVaapiPictureH264 *found_picture = NULL;
756     guint i, j, found_index;
757
758     for (i = 0; i < priv->dpb_count; i++) {
759         GstVaapiFrameStore * const fs = priv->dpb[i];
760         if (!fs->output_needed)
761             continue;
762         if (picture && picture->base.view_id != fs->view_id)
763             continue;
764         for (j = 0; j < fs->num_buffers; j++) {
765             GstVaapiPictureH264 * const pic = fs->buffers[j];
766             if (!pic->output_needed)
767                 continue;
768             if (!found_picture || found_picture->base.poc > pic->base.poc ||
769                 (found_picture->base.poc == pic->base.poc &&
770                  found_picture->base.voc > pic->base.voc))
771                 found_picture = pic, found_index = i;
772         }
773     }
774
775     if (found_picture_ptr)
776         *found_picture_ptr = found_picture;
777     return found_picture ? found_index : -1;
778 }
779
780 /* Finds the picture with the lowest VOC that needs to be output */
781 static gint
782 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
783     GstVaapiPictureH264 **found_picture_ptr)
784 {
785     GstVaapiDecoderH264Private * const priv = &decoder->priv;
786     GstVaapiPictureH264 *found_picture = NULL;
787     guint i, j, found_index;
788
789     for (i = 0; i < priv->dpb_count; i++) {
790         GstVaapiFrameStore * const fs = priv->dpb[i];
791         if (!fs->output_needed || fs->view_id == picture->base.view_id)
792             continue;
793         for (j = 0; j < fs->num_buffers; j++) {
794             GstVaapiPictureH264 * const pic = fs->buffers[j];
795             if (!pic->output_needed || pic->base.poc != picture->base.poc)
796                 continue;
797             if (!found_picture || found_picture->base.voc > pic->base.voc)
798                 found_picture = pic, found_index = i;
799         }
800     }
801
802     if (found_picture_ptr)
803         *found_picture_ptr = found_picture;
804     return found_picture ? found_index : -1;
805 }
806
807 static gboolean
808 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
809     GstVaapiPictureH264 *picture, guint voc)
810 {
811     GstVaapiDecoderH264Private * const priv = &decoder->priv;
812     GstVaapiPictureH264 *found_picture;
813     gint found_index;
814     gboolean success;
815
816     if (priv->max_views == 1)
817         return TRUE;
818
819     /* Emit all other view components that were in the same access
820        unit than the picture we have just found */
821     found_picture = picture;
822     for (;;) {
823         found_index = dpb_find_lowest_voc(decoder, found_picture,
824             &found_picture);
825         if (found_index < 0 || found_picture->base.voc >= voc)
826             break;
827         success = dpb_output(decoder, priv->dpb[found_index]);
828         dpb_evict(decoder, found_picture, found_index);
829         if (!success)
830             return FALSE;
831     }
832     return TRUE;
833 }
834
835 static gboolean
836 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
837 {
838     GstVaapiDecoderH264Private * const priv = &decoder->priv;
839     GstVaapiPictureH264 *found_picture;
840     gint found_index;
841     gboolean success;
842
843     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
844     if (found_index < 0)
845         return FALSE;
846
847     if (picture && picture->base.poc != found_picture->base.poc)
848         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
849
850     success = dpb_output(decoder, priv->dpb[found_index]);
851     dpb_evict(decoder, found_picture, found_index);
852     if (priv->max_views == 1)
853         return success;
854
855     if (picture && picture->base.poc != found_picture->base.poc)
856         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
857     return success;
858 }
859
860 static void
861 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
862 {
863     GstVaapiDecoderH264Private * const priv = &decoder->priv;
864     guint i, n;
865
866     for (i = 0; i < priv->dpb_count; i++) {
867         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
868             continue;
869         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
870     }
871
872     /* Compact the resulting DPB, i.e. remove holes */
873     for (i = 0, n = 0; i < priv->dpb_count; i++) {
874         if (priv->dpb[i]) {
875             if (i != n) {
876                 priv->dpb[n] = priv->dpb[i];
877                 priv->dpb[i] = NULL;
878             }
879             n++;
880         }
881     }
882     priv->dpb_count = n;
883
884     /* Clear previous frame buffers only if this is a "flush-all" operation,
885        or if the picture is the first one in the access unit */
886     if (priv->prev_frames && (!picture ||
887             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
888                 GST_VAAPI_PICTURE_FLAG_AU_START))) {
889         for (i = 0; i < priv->max_views; i++)
890             gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
891     }
892 }
893
894 static void
895 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
896 {
897     GstVaapiDecoderH264Private * const priv = &decoder->priv;
898     guint i;
899
900     /* Detect broken frames and mark them as having a single field if
901        needed */
902     for (i = 0; i < priv->dpb_count; i++) {
903         GstVaapiFrameStore * const fs = priv->dpb[i];
904         if (!fs->output_needed || gst_vaapi_frame_store_is_complete(fs))
905             continue;
906         GST_VAAPI_PICTURE_FLAG_SET(fs->buffers[0],
907             GST_VAAPI_PICTURE_FLAG_ONEFIELD);
908     }
909
910     /* Output any frame remaining in DPB */
911     while (dpb_bump(decoder, picture))
912         ;
913     dpb_clear(decoder, picture);
914 }
915
916 static void
917 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
918 {
919     GstVaapiDecoderH264Private * const priv = &decoder->priv;
920     const gboolean is_last_picture = /* in the access unit */
921         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
922     guint i;
923
924     // Remove all unused inter-view only reference components of the current AU
925     i = 0;
926     while (i < priv->dpb_count) {
927         GstVaapiFrameStore * const fs = priv->dpb[i];
928         if (fs->view_id != picture->base.view_id &&
929             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
930             (is_last_picture ||
931              !is_inter_view_reference_for_next_frames(decoder, fs)))
932             dpb_remove_index(decoder, i);
933         else
934             i++;
935     }
936 }
937
938 static gboolean
939 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
940 {
941     GstVaapiDecoderH264Private * const priv = &decoder->priv;
942     GstVaapiFrameStore *fs;
943     guint i;
944
945     if (priv->max_views > 1)
946         dpb_prune_mvc(decoder, picture);
947
948     // Remove all unused pictures
949     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
950         i = 0;
951         while (i < priv->dpb_count) {
952             GstVaapiFrameStore * const fs = priv->dpb[i];
953             if (fs->view_id == picture->base.view_id &&
954                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
955                 dpb_remove_index(decoder, i);
956             else
957                 i++;
958         }
959     }
960
961     // Check if picture is the second field and the first field is still in DPB
962     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
963         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
964         const gint found_index = dpb_find_picture(decoder,
965             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
966         if (found_index >= 0)
967             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
968
969         // ... also check the previous picture that was immediately output
970         fs = priv->prev_frames[picture->base.voc];
971         if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
972             if (!gst_vaapi_frame_store_add(fs, picture))
973                 return FALSE;
974             return dpb_output(decoder, fs);
975         }
976     }
977
978     // Create new frame store, and split fields if necessary
979     fs = gst_vaapi_frame_store_new(picture);
980     if (!fs)
981         return FALSE;
982     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
983     gst_vaapi_frame_store_unref(fs);
984
985     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
986         if (!gst_vaapi_frame_store_split_fields(fs))
987             return FALSE;
988     }
989
990     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
991     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
992         while (priv->dpb_count == priv->dpb_size) {
993             if (!dpb_bump(decoder, picture))
994                 return FALSE;
995         }
996     }
997
998     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
999     else {
1000         const gboolean StoreInterViewOnlyRefFlag =
1001             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
1002                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
1003             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
1004                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1005         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
1006             return TRUE;
1007         while (priv->dpb_count == priv->dpb_size) {
1008             GstVaapiPictureH264 *found_picture;
1009             if (!StoreInterViewOnlyRefFlag) {
1010                 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
1011                     found_picture->base.poc > picture->base.poc)
1012                     return dpb_output(decoder, fs);
1013             }
1014             if (!dpb_bump(decoder, picture))
1015                 return FALSE;
1016         }
1017     }
1018     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
1019     return TRUE;
1020 }
1021
1022 static gboolean
1023 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
1024 {
1025     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1026
1027     if (dpb_size > priv->dpb_size_max) {
1028         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
1029         if (!priv->dpb)
1030             return FALSE;
1031         memset(&priv->dpb[priv->dpb_size_max], 0,
1032             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
1033         priv->dpb_size_max = dpb_size;
1034     }
1035     priv->dpb_size = dpb_size;
1036
1037     GST_DEBUG("DPB size %u", priv->dpb_size);
1038     return TRUE;
1039 }
1040
1041 static void
1042 unref_inter_view(GstVaapiPictureH264 *picture)
1043 {
1044     if (!picture)
1045         return;
1046     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1047     gst_vaapi_picture_unref(picture);
1048 }
1049
1050 /* Resets MVC resources */
1051 static gboolean
1052 mvc_reset(GstVaapiDecoderH264 *decoder)
1053 {
1054     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1055     guint i;
1056
1057     // Resize array of inter-view references
1058     if (!priv->inter_views) {
1059         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1060             (GDestroyNotify)unref_inter_view);
1061         if (!priv->inter_views)
1062             return FALSE;
1063     }
1064
1065     // Resize array of previous frame buffers
1066     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1067         gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1068
1069     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1070         sizeof(*priv->prev_frames));
1071     if (!priv->prev_frames) {
1072         priv->prev_frames_alloc = 0;
1073         return FALSE;
1074     }
1075     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1076         priv->prev_frames[i] = NULL;
1077     priv->prev_frames_alloc = priv->max_views;
1078     return TRUE;
1079 }
1080
1081 static GstVaapiDecoderStatus
1082 get_status(GstH264ParserResult result)
1083 {
1084     GstVaapiDecoderStatus status;
1085
1086     switch (result) {
1087     case GST_H264_PARSER_OK:
1088         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1089         break;
1090     case GST_H264_PARSER_NO_NAL_END:
1091         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1092         break;
1093     case GST_H264_PARSER_ERROR:
1094         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1095         break;
1096     default:
1097         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1098         break;
1099     }
1100     return status;
1101 }
1102
1103 static void
1104 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1105 {
1106     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1107
1108     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1109     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1110     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1111
1112     dpb_clear(decoder, NULL);
1113
1114     if (priv->inter_views) {
1115         g_ptr_array_unref(priv->inter_views);
1116         priv->inter_views = NULL;
1117     }
1118
1119     if (priv->parser) {
1120         gst_h264_nal_parser_free(priv->parser);
1121         priv->parser = NULL;
1122     }
1123 }
1124
1125 static gboolean
1126 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1127 {
1128     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1129
1130     gst_vaapi_decoder_h264_close(decoder);
1131
1132     priv->parser = gst_h264_nal_parser_new();
1133     if (!priv->parser)
1134         return FALSE;
1135     return TRUE;
1136 }
1137
1138 static void
1139 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1140 {
1141     GstVaapiDecoderH264 * const decoder =
1142         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1143     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1144     guint i;
1145
1146     gst_vaapi_decoder_h264_close(decoder);
1147
1148     g_free(priv->dpb);
1149     priv->dpb = NULL;
1150     priv->dpb_size = 0;
1151
1152     g_free(priv->prev_frames);
1153     priv->prev_frames = NULL;
1154     priv->prev_frames_alloc = 0;
1155
1156     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1157         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1158     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1159
1160     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1161         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1162     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1163 }
1164
1165 static gboolean
1166 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1167 {
1168     GstVaapiDecoderH264 * const decoder =
1169         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1170     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1171
1172     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1173     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1174     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1175     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1176     priv->progressive_sequence  = TRUE;
1177     return TRUE;
1178 }
1179
1180 /* Activates the supplied PPS */
1181 static GstH264PPS *
1182 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1183 {
1184     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1185     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1186
1187     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1188     return pi ? &pi->data.pps : NULL;
1189 }
1190
1191 /* Returns the active PPS */
1192 static inline GstH264PPS *
1193 get_pps(GstVaapiDecoderH264 *decoder)
1194 {
1195     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1196
1197     return pi ? &pi->data.pps : NULL;
1198 }
1199
1200 /* Activate the supplied SPS */
1201 static GstH264SPS *
1202 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1203 {
1204     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1205     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1206
1207     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1208     return pi ? &pi->data.sps : NULL;
1209 }
1210
1211 /* Returns the active SPS */
1212 static inline GstH264SPS *
1213 get_sps(GstVaapiDecoderH264 *decoder)
1214 {
1215     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1216
1217     return pi ? &pi->data.sps : NULL;
1218 }
1219
1220 static void
1221 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1222     GstVaapiProfile profile)
1223 {
1224     guint n_profiles = *n_profiles_ptr;
1225
1226     profiles[n_profiles++] = profile;
1227     switch (profile) {
1228     case GST_VAAPI_PROFILE_H264_MAIN:
1229         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1230         break;
1231     default:
1232         break;
1233     }
1234     *n_profiles_ptr = n_profiles;
1235 }
1236
1237 /* Fills in compatible profiles for MVC decoding */
1238 static void
1239 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1240     guint *n_profiles_ptr, guint dpb_size)
1241 {
1242     const gchar * const vendor_string =
1243         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1244
1245     gboolean add_high_profile = FALSE;
1246     struct map {
1247         const gchar *str;
1248         guint str_len;
1249     };
1250     const struct map *m;
1251
1252     // Drivers that support slice level decoding
1253     if (vendor_string && dpb_size <= 16) {
1254         static const struct map drv_names[] = {
1255             { "Intel i965 driver", 17 },
1256             { NULL, 0 }
1257         };
1258         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1259             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1260                 add_high_profile = TRUE;
1261         }
1262     }
1263
1264     if (add_high_profile)
1265         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1266 }
1267
1268 static GstVaapiProfile
1269 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1270 {
1271     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1272     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1273     GstVaapiProfile profile, profiles[4];
1274     guint i, n_profiles = 0;
1275
1276     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1277     if (!profile)
1278         return GST_VAAPI_PROFILE_UNKNOWN;
1279
1280     fill_profiles(profiles, &n_profiles, profile);
1281     switch (profile) {
1282     case GST_VAAPI_PROFILE_H264_BASELINE:
1283         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1284             fill_profiles(profiles, &n_profiles,
1285                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1286             fill_profiles(profiles, &n_profiles,
1287                 GST_VAAPI_PROFILE_H264_MAIN);
1288         }
1289         break;
1290     case GST_VAAPI_PROFILE_H264_EXTENDED:
1291         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1292             fill_profiles(profiles, &n_profiles,
1293                 GST_VAAPI_PROFILE_H264_MAIN);
1294         }
1295         break;
1296     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1297         if (priv->max_views == 2) {
1298             fill_profiles(profiles, &n_profiles,
1299                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1300         }
1301         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1302         break;
1303     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1304         if (sps->frame_mbs_only_flag) {
1305             fill_profiles(profiles, &n_profiles,
1306                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1307         }
1308         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1309         break;
1310     default:
1311         break;
1312     }
1313
1314     /* If the preferred profile (profiles[0]) matches one that we already
1315        found, then just return it now instead of searching for it again */
1316     if (profiles[0] == priv->profile)
1317         return priv->profile;
1318
1319     for (i = 0; i < n_profiles; i++) {
1320         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1321             return profiles[i];
1322     }
1323     return GST_VAAPI_PROFILE_UNKNOWN;
1324 }
1325
1326 static GstVaapiDecoderStatus
1327 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1328 {
1329     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1330     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1331     GstVaapiContextInfo info;
1332     GstVaapiProfile profile;
1333     GstVaapiChromaType chroma_type;
1334     gboolean reset_context = FALSE;
1335     guint mb_width, mb_height, dpb_size, num_views;
1336
1337     num_views = get_num_views(sps);
1338     if (priv->max_views < num_views) {
1339         priv->max_views = num_views;
1340         GST_DEBUG("maximum number of views changed to %u", num_views);
1341     }
1342
1343     dpb_size = get_max_dec_frame_buffering(sps);
1344     if (priv->dpb_size < dpb_size) {
1345         GST_DEBUG("DPB size increased");
1346         reset_context = TRUE;
1347     }
1348
1349     profile = get_profile(decoder, sps, dpb_size);
1350     if (!profile) {
1351         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1352         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1353     }
1354
1355     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1356         GST_DEBUG("profile changed");
1357         reset_context = TRUE;
1358         priv->profile = profile;
1359     }
1360
1361     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1362     if (!chroma_type) {
1363         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1364         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1365     }
1366
1367     if (priv->chroma_type != chroma_type) {
1368         GST_DEBUG("chroma format changed");
1369         reset_context     = TRUE;
1370         priv->chroma_type = chroma_type;
1371     }
1372
1373     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1374     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1375         !sps->frame_mbs_only_flag;
1376     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1377         GST_DEBUG("size changed");
1378         reset_context   = TRUE;
1379         priv->mb_width  = mb_width;
1380         priv->mb_height = mb_height;
1381     }
1382
1383     priv->progressive_sequence = sps->frame_mbs_only_flag;
1384     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1385
1386     gst_vaapi_decoder_set_pixel_aspect_ratio(
1387         base_decoder,
1388         sps->vui_parameters.par_n,
1389         sps->vui_parameters.par_d
1390     );
1391
1392     if (!reset_context && priv->has_context)
1393         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1394
1395     /* XXX: fix surface size when cropping is implemented */
1396     info.profile    = priv->profile;
1397     info.entrypoint = priv->entrypoint;
1398     info.chroma_type = priv->chroma_type;
1399     info.width      = sps->width;
1400     info.height     = sps->height;
1401     info.ref_frames = dpb_size;
1402
1403     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1404         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1405     priv->has_context = TRUE;
1406
1407     /* Reset DPB */
1408     if (!dpb_reset(decoder, dpb_size))
1409         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1410
1411     /* Reset MVC data */
1412     if (!mvc_reset(decoder))
1413         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1414     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1415 }
1416
1417 static void
1418 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1419     const GstH264SPS *sps)
1420 {
1421     guint i;
1422
1423     /* There are always 6 4x4 scaling lists */
1424     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1425     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1426
1427     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1428         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1429             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1430 }
1431
1432 static void
1433 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1434     const GstH264SPS *sps)
1435 {
1436     guint i, n;
1437
1438     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1439     if (!pps->transform_8x8_mode_flag)
1440         return;
1441
1442     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1443     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1444
1445     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1446     for (i = 0; i < n; i++) {
1447         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1448             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1449     }
1450 }
1451
1452 static GstVaapiDecoderStatus
1453 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1454 {
1455     GstVaapiPicture * const base_picture = &picture->base;
1456     GstH264PPS * const pps = get_pps(decoder);
1457     GstH264SPS * const sps = get_sps(decoder);
1458     VAIQMatrixBufferH264 *iq_matrix;
1459
1460     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1461     if (!base_picture->iq_matrix) {
1462         GST_ERROR("failed to allocate IQ matrix");
1463         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1464     }
1465     iq_matrix = base_picture->iq_matrix->param;
1466
1467     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1468        is not large enough to hold lists for 4:4:4 */
1469     if (sps->chroma_format_idc == 3)
1470         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1471
1472     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1473     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1474
1475     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1476 }
1477
1478 static inline gboolean
1479 is_valid_state(guint state, guint ref_state)
1480 {
1481     return (state & ref_state) == ref_state;
1482 }
1483
1484 static GstVaapiDecoderStatus
1485 decode_current_picture(GstVaapiDecoderH264 *decoder)
1486 {
1487     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1488     GstVaapiPictureH264 * const picture = priv->current_picture;
1489
1490     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1491         goto drop_frame;
1492     priv->decoder_state = 0;
1493
1494     if (!picture)
1495         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1496
1497     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1498         goto error;
1499     if (!exec_ref_pic_marking(decoder, picture))
1500         goto error;
1501     if (!dpb_add(decoder, picture))
1502         goto error;
1503     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1504     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1505
1506 error:
1507     /* XXX: fix for cases where first field failed to be decoded */
1508     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1509     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1510
1511 drop_frame:
1512     priv->decoder_state = 0;
1513     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1514 }
1515
1516 static GstVaapiDecoderStatus
1517 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1518 {
1519     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1520     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1521     GstH264SPS * const sps = &pi->data.sps;
1522     GstH264ParserResult result;
1523
1524     GST_DEBUG("parse SPS");
1525
1526     priv->parser_state = 0;
1527
1528     /* Variables that don't have inferred values per the H.264
1529        standard but that should get a default value anyway */
1530     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1531
1532     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1533     if (result != GST_H264_PARSER_OK)
1534         return get_status(result);
1535
1536     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1537     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1538 }
1539
1540 static GstVaapiDecoderStatus
1541 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1542 {
1543     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1544     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1545     GstH264SPS * const sps = &pi->data.sps;
1546     GstH264ParserResult result;
1547
1548     GST_DEBUG("parse subset SPS");
1549
1550     /* Variables that don't have inferred values per the H.264
1551        standard but that should get a default value anyway */
1552     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1553
1554     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1555         TRUE);
1556     if (result != GST_H264_PARSER_OK)
1557         return get_status(result);
1558
1559     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1560     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1561 }
1562
1563 static GstVaapiDecoderStatus
1564 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1565 {
1566     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1567     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1568     GstH264PPS * const pps = &pi->data.pps;
1569     GstH264ParserResult result;
1570
1571     GST_DEBUG("parse PPS");
1572
1573     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1574
1575     /* Variables that don't have inferred values per the H.264
1576        standard but that should get a default value anyway */
1577     pps->slice_group_map_type = 0;
1578     pps->slice_group_change_rate_minus1 = 0;
1579
1580     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1581     if (result != GST_H264_PARSER_OK)
1582         return get_status(result);
1583
1584     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1585     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1586 }
1587
1588 static GstVaapiDecoderStatus
1589 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1590 {
1591     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1592     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1593     GArray ** const sei_ptr = &pi->data.sei;
1594     GstH264ParserResult result;
1595
1596     GST_DEBUG("parse SEI");
1597
1598     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1599     if (result != GST_H264_PARSER_OK) {
1600         GST_WARNING("failed to parse SEI messages");
1601         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1602     }
1603     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1604 }
1605
1606 static GstVaapiDecoderStatus
1607 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1608 {
1609     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1610     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1611     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1612     GstH264NalUnit * const nalu = &pi->nalu;
1613     GstH264SPS *sps;
1614     GstH264ParserResult result;
1615
1616     GST_DEBUG("parse slice");
1617
1618     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1619                            GST_H264_VIDEO_STATE_GOT_PPS);
1620
1621     /* Propagate Prefix NAL unit info, if necessary */
1622     switch (nalu->type) {
1623     case GST_H264_NAL_SLICE:
1624     case GST_H264_NAL_SLICE_IDR: {
1625         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1626         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1627             /* MVC sequences shall have a Prefix NAL unit immediately
1628                preceding this NAL unit */
1629             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1630             pi->nalu.extension = prev_pi->nalu.extension;
1631         }
1632         else {
1633             /* In the very unlikely case there is no Prefix NAL unit
1634                immediately preceding this NAL unit, try to infer some
1635                defaults (H.7.4.1.1) */
1636             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1637             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1638             nalu->idr_pic_flag = !mvc->non_idr_flag;
1639             mvc->priority_id = 0;
1640             mvc->view_id = 0;
1641             mvc->temporal_id = 0;
1642             mvc->anchor_pic_flag = 0;
1643             mvc->inter_view_flag = 1;
1644         }
1645         break;
1646     }
1647     }
1648
1649     /* Variables that don't have inferred values per the H.264
1650        standard but that should get a default value anyway */
1651     slice_hdr->cabac_init_idc = 0;
1652     slice_hdr->direct_spatial_mv_pred_flag = 0;
1653
1654     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1655         slice_hdr, TRUE, TRUE);
1656     if (result != GST_H264_PARSER_OK)
1657         return get_status(result);
1658
1659     sps = slice_hdr->pps->sequence;
1660
1661     /* Update MVC data */
1662     pi->view_id = get_view_id(&pi->nalu);
1663     pi->voc = get_view_order_index(sps, pi->view_id);
1664
1665     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1666     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1667 }
1668
1669 static GstVaapiDecoderStatus
1670 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1671 {
1672     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1673     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1674     GstH264SPS * const sps = &pi->data.sps;
1675
1676     GST_DEBUG("decode SPS");
1677
1678     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1679     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1680 }
1681
1682 static GstVaapiDecoderStatus
1683 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1684 {
1685     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1686     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1687     GstH264SPS * const sps = &pi->data.sps;
1688
1689     GST_DEBUG("decode subset SPS");
1690
1691     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1692     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1693 }
1694
1695 static GstVaapiDecoderStatus
1696 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1697 {
1698     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1699     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1700     GstH264PPS * const pps = &pi->data.pps;
1701
1702     GST_DEBUG("decode PPS");
1703
1704     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1705     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1706 }
1707
1708 static GstVaapiDecoderStatus
1709 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1710 {
1711     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1712     GstVaapiDecoderStatus status;
1713
1714     GST_DEBUG("decode sequence-end");
1715
1716     status = decode_current_picture(decoder);
1717     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1718         return status;
1719
1720     dpb_flush(decoder, NULL);
1721
1722     /* Reset defaults, should there be a new sequence available next */
1723     priv->max_views = 1;
1724     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1725 }
1726
1727 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1728 static void
1729 init_picture_poc_0(
1730     GstVaapiDecoderH264 *decoder,
1731     GstVaapiPictureH264 *picture,
1732     GstH264SliceHdr     *slice_hdr
1733 )
1734 {
1735     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1736     GstH264SPS * const sps = get_sps(decoder);
1737     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1738     gint32 temp_poc;
1739
1740     GST_DEBUG("decode picture order count type 0");
1741
1742     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1743         priv->prev_poc_msb = 0;
1744         priv->prev_poc_lsb = 0;
1745     }
1746     else if (priv->prev_pic_has_mmco5) {
1747         priv->prev_poc_msb = 0;
1748         priv->prev_poc_lsb =
1749             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1750              0 : priv->field_poc[TOP_FIELD]);
1751     }
1752     else {
1753         priv->prev_poc_msb = priv->poc_msb;
1754         priv->prev_poc_lsb = priv->poc_lsb;
1755     }
1756
1757     // (8-3)
1758     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1759     if (priv->poc_lsb < priv->prev_poc_lsb &&
1760         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1761         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1762     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1763              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1764         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1765     else
1766         priv->poc_msb = priv->prev_poc_msb;
1767
1768     temp_poc = priv->poc_msb + priv->poc_lsb;
1769     switch (picture->structure) {
1770     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1771         // (8-4, 8-5)
1772         priv->field_poc[TOP_FIELD] = temp_poc;
1773         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1774             slice_hdr->delta_pic_order_cnt_bottom;
1775         break;
1776     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1777         // (8-4)
1778         priv->field_poc[TOP_FIELD] = temp_poc;
1779         break;
1780     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1781         // (8-5)
1782         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1783         break;
1784     }
1785 }
1786
1787 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1788 static void
1789 init_picture_poc_1(
1790     GstVaapiDecoderH264 *decoder,
1791     GstVaapiPictureH264 *picture,
1792     GstH264SliceHdr     *slice_hdr
1793 )
1794 {
1795     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1796     GstH264SPS * const sps = get_sps(decoder);
1797     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1798     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1799     guint i;
1800
1801     GST_DEBUG("decode picture order count type 1");
1802
1803     if (priv->prev_pic_has_mmco5)
1804         prev_frame_num_offset = 0;
1805     else
1806         prev_frame_num_offset = priv->frame_num_offset;
1807
1808     // (8-6)
1809     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1810         priv->frame_num_offset = 0;
1811     else if (priv->prev_frame_num > priv->frame_num)
1812         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1813     else
1814         priv->frame_num_offset = prev_frame_num_offset;
1815
1816     // (8-7)
1817     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1818         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1819     else
1820         abs_frame_num = 0;
1821     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1822         abs_frame_num = abs_frame_num - 1;
1823
1824     if (abs_frame_num > 0) {
1825         gint32 expected_delta_per_poc_cycle;
1826         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1827
1828         expected_delta_per_poc_cycle = 0;
1829         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1830             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1831
1832         // (8-8)
1833         poc_cycle_cnt = (abs_frame_num - 1) /
1834             sps->num_ref_frames_in_pic_order_cnt_cycle;
1835         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1836             sps->num_ref_frames_in_pic_order_cnt_cycle;
1837
1838         // (8-9)
1839         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1840         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1841             expected_poc += sps->offset_for_ref_frame[i];
1842     }
1843     else
1844         expected_poc = 0;
1845     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1846         expected_poc += sps->offset_for_non_ref_pic;
1847
1848     // (8-10)
1849     switch (picture->structure) {
1850     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1851         priv->field_poc[TOP_FIELD] = expected_poc +
1852             slice_hdr->delta_pic_order_cnt[0];
1853         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1854             sps->offset_for_top_to_bottom_field +
1855             slice_hdr->delta_pic_order_cnt[1];
1856         break;
1857     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1858         priv->field_poc[TOP_FIELD] = expected_poc +
1859             slice_hdr->delta_pic_order_cnt[0];
1860         break;
1861     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1862         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1863             sps->offset_for_top_to_bottom_field +
1864             slice_hdr->delta_pic_order_cnt[0];
1865         break;
1866     }
1867 }
1868
1869 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1870 static void
1871 init_picture_poc_2(
1872     GstVaapiDecoderH264 *decoder,
1873     GstVaapiPictureH264 *picture,
1874     GstH264SliceHdr     *slice_hdr
1875 )
1876 {
1877     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1878     GstH264SPS * const sps = get_sps(decoder);
1879     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1880     gint32 prev_frame_num_offset, temp_poc;
1881
1882     GST_DEBUG("decode picture order count type 2");
1883
1884     if (priv->prev_pic_has_mmco5)
1885         prev_frame_num_offset = 0;
1886     else
1887         prev_frame_num_offset = priv->frame_num_offset;
1888
1889     // (8-11)
1890     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1891         priv->frame_num_offset = 0;
1892     else if (priv->prev_frame_num > priv->frame_num)
1893         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1894     else
1895         priv->frame_num_offset = prev_frame_num_offset;
1896
1897     // (8-12)
1898     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1899         temp_poc = 0;
1900     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1901         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1902     else
1903         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1904
1905     // (8-13)
1906     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1907         priv->field_poc[TOP_FIELD] = temp_poc;
1908     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1909         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1910 }
1911
1912 /* 8.2.1 - Decoding process for picture order count */
1913 static void
1914 init_picture_poc(
1915     GstVaapiDecoderH264 *decoder,
1916     GstVaapiPictureH264 *picture,
1917     GstH264SliceHdr     *slice_hdr
1918 )
1919 {
1920     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1921     GstH264SPS * const sps = get_sps(decoder);
1922
1923     switch (sps->pic_order_cnt_type) {
1924     case 0:
1925         init_picture_poc_0(decoder, picture, slice_hdr);
1926         break;
1927     case 1:
1928         init_picture_poc_1(decoder, picture, slice_hdr);
1929         break;
1930     case 2:
1931         init_picture_poc_2(decoder, picture, slice_hdr);
1932         break;
1933     }
1934
1935     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1936         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1937     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1938         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1939     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1940 }
1941
1942 static int
1943 compare_picture_pic_num_dec(const void *a, const void *b)
1944 {
1945     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1946     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1947
1948     return picB->pic_num - picA->pic_num;
1949 }
1950
1951 static int
1952 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1953 {
1954     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1955     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1956
1957     return picA->long_term_pic_num - picB->long_term_pic_num;
1958 }
1959
1960 static int
1961 compare_picture_poc_dec(const void *a, const void *b)
1962 {
1963     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1964     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1965
1966     return picB->base.poc - picA->base.poc;
1967 }
1968
1969 static int
1970 compare_picture_poc_inc(const void *a, const void *b)
1971 {
1972     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1973     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1974
1975     return picA->base.poc - picB->base.poc;
1976 }
1977
1978 static int
1979 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1980 {
1981     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1982     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1983
1984     return picB->frame_num_wrap - picA->frame_num_wrap;
1985 }
1986
1987 static int
1988 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1989 {
1990     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1991     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1992
1993     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1994 }
1995
1996 /* 8.2.4.1 - Decoding process for picture numbers */
1997 static void
1998 init_picture_refs_pic_num(
1999     GstVaapiDecoderH264 *decoder,
2000     GstVaapiPictureH264 *picture,
2001     GstH264SliceHdr     *slice_hdr
2002 )
2003 {
2004     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2005     GstH264SPS * const sps = get_sps(decoder);
2006     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
2007     guint i;
2008
2009     GST_DEBUG("decode picture numbers");
2010
2011     for (i = 0; i < priv->short_ref_count; i++) {
2012         GstVaapiPictureH264 * const pic = priv->short_ref[i];
2013
2014         // (H.8.2)
2015         if (pic->base.view_id != picture->base.view_id)
2016             continue;
2017
2018         // (8-27)
2019         if (pic->frame_num > priv->frame_num)
2020             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
2021         else
2022             pic->frame_num_wrap = pic->frame_num;
2023
2024         // (8-28, 8-30, 8-31)
2025         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2026             pic->pic_num = pic->frame_num_wrap;
2027         else {
2028             if (pic->structure == picture->structure)
2029                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
2030             else
2031                 pic->pic_num = 2 * pic->frame_num_wrap;
2032         }
2033     }
2034
2035     for (i = 0; i < priv->long_ref_count; i++) {
2036         GstVaapiPictureH264 * const pic = priv->long_ref[i];
2037
2038         // (H.8.2)
2039         if (pic->base.view_id != picture->base.view_id)
2040             continue;
2041
2042         // (8-29, 8-32, 8-33)
2043         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2044             pic->long_term_pic_num = pic->long_term_frame_idx;
2045         else {
2046             if (pic->structure == picture->structure)
2047                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2048             else
2049                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2050         }
2051     }
2052 }
2053
2054 #define SORT_REF_LIST(list, n, compare_func) \
2055     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2056
2057 static void
2058 init_picture_refs_fields_1(
2059     guint                picture_structure,
2060     GstVaapiPictureH264 *RefPicList[32],
2061     guint               *RefPicList_count,
2062     GstVaapiPictureH264 *ref_list[32],
2063     guint                ref_list_count
2064 )
2065 {
2066     guint i, j, n;
2067
2068     i = 0;
2069     j = 0;
2070     n = *RefPicList_count;
2071     do {
2072         g_assert(n < 32);
2073         for (; i < ref_list_count; i++) {
2074             if (ref_list[i]->structure == picture_structure) {
2075                 RefPicList[n++] = ref_list[i++];
2076                 break;
2077             }
2078         }
2079         for (; j < ref_list_count; j++) {
2080             if (ref_list[j]->structure != picture_structure) {
2081                 RefPicList[n++] = ref_list[j++];
2082                 break;
2083             }
2084         }
2085     } while (i < ref_list_count || j < ref_list_count);
2086     *RefPicList_count = n;
2087 }
2088
2089 static inline void
2090 init_picture_refs_fields(
2091     GstVaapiPictureH264 *picture,
2092     GstVaapiPictureH264 *RefPicList[32],
2093     guint               *RefPicList_count,
2094     GstVaapiPictureH264 *short_ref[32],
2095     guint                short_ref_count,
2096     GstVaapiPictureH264 *long_ref[32],
2097     guint                long_ref_count
2098 )
2099 {
2100     guint n = 0;
2101
2102     /* 8.2.4.2.5 - reference picture lists in fields */
2103     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2104         short_ref, short_ref_count);
2105     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2106         long_ref, long_ref_count);
2107     *RefPicList_count = n;
2108 }
2109
2110 /* Finds the inter-view reference picture with the supplied view id */
2111 static GstVaapiPictureH264 *
2112 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2113 {
2114     GPtrArray * const inter_views = decoder->priv.inter_views;
2115     guint i;
2116
2117     for (i = 0; i < inter_views->len; i++) {
2118         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2119         if (picture->base.view_id == view_id)
2120             return picture;
2121     }
2122
2123     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2124         view_id);
2125     return NULL;
2126 }
2127
2128 /* Checks whether the view id exists in the supplied list of view ids */
2129 static gboolean
2130 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2131 {
2132     guint i;
2133
2134     for (i = 0; i < num_view_ids; i++) {
2135         if (view_ids[i] == view_id)
2136             return TRUE;
2137     }
2138     return FALSE;
2139 }
2140
2141 static gboolean
2142 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2143     gboolean is_anchor)
2144 {
2145     if (is_anchor)
2146         return (find_view_id(view_id, view->anchor_ref_l0,
2147                     view->num_anchor_refs_l0) ||
2148                 find_view_id(view_id, view->anchor_ref_l1,
2149                     view->num_anchor_refs_l1));
2150
2151     return (find_view_id(view_id, view->non_anchor_ref_l0,
2152                 view->num_non_anchor_refs_l0) ||
2153             find_view_id(view_id, view->non_anchor_ref_l1,
2154                 view->num_non_anchor_refs_l1));
2155 }
2156
2157 /* Checks whether the inter-view reference picture with the supplied
2158    view id is used for decoding the current view component picture */
2159 static gboolean
2160 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2161     guint16 view_id, GstVaapiPictureH264 *picture)
2162 {
2163     const GstH264SPS * const sps = get_sps(decoder);
2164     gboolean is_anchor;
2165
2166     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2167         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2168         return FALSE;
2169
2170     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2171     return find_view_id_in_view(view_id,
2172         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2173 }
2174
2175 /* Checks whether the supplied inter-view reference picture is used
2176    for decoding the next view component pictures */
2177 static gboolean
2178 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2179     GstVaapiPictureH264 *picture)
2180 {
2181     const GstH264SPS * const sps = get_sps(decoder);
2182     gboolean is_anchor;
2183     guint i, num_views;
2184
2185     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2186         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2187         return FALSE;
2188
2189     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2190     num_views = sps->extension.mvc.num_views_minus1 + 1;
2191     for (i = picture->base.voc + 1; i < num_views; i++) {
2192         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2193         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2194             return TRUE;
2195     }
2196     return FALSE;
2197 }
2198
2199 /* H.8.2.1 - Initialization process for inter-view prediction references */
2200 static void
2201 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2202     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2203     const guint16 *view_ids, guint num_view_ids)
2204 {
2205     guint j, n;
2206
2207     n = *ref_list_count_ptr;
2208     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2209         GstVaapiPictureH264 * const pic =
2210             find_inter_view_reference(decoder, view_ids[j]);
2211         if (pic)
2212             ref_list[n++] = pic;
2213     }
2214     *ref_list_count_ptr = n;
2215 }
2216
2217 static inline void
2218 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2219     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2220 {
2221     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2222     const GstH264SPS * const sps = get_sps(decoder);
2223     const GstH264SPSExtMVCView *view;
2224
2225     GST_DEBUG("initialize reference picture list for inter-view prediction");
2226
2227     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2228         return;
2229     view = &sps->extension.mvc.view[picture->base.voc];
2230
2231 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2232         init_picture_refs_mvc_1(decoder,                                \
2233             priv->RefPicList##ref_list,                                 \
2234             &priv->RefPicList##ref_list##_count,                        \
2235             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2236             view->view_list##_l##ref_list,                              \
2237             view->num_##view_list##s_l##ref_list);                      \
2238     } while (0)
2239
2240     if (list == 0) {
2241         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2242             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2243         else
2244             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2245     }
2246     else {
2247         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2248             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2249         else
2250             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2251     }
2252
2253 #undef INVOKE_INIT_PICTURE_REFS_MVC
2254 }
2255
2256 static void
2257 init_picture_refs_p_slice(
2258     GstVaapiDecoderH264 *decoder,
2259     GstVaapiPictureH264 *picture,
2260     GstH264SliceHdr     *slice_hdr
2261 )
2262 {
2263     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2264     GstVaapiPictureH264 **ref_list;
2265     guint i;
2266
2267     GST_DEBUG("decode reference picture list for P and SP slices");
2268
2269     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2270         /* 8.2.4.2.1 - P and SP slices in frames */
2271         if (priv->short_ref_count > 0) {
2272             ref_list = priv->RefPicList0;
2273             for (i = 0; i < priv->short_ref_count; i++)
2274                 ref_list[i] = priv->short_ref[i];
2275             SORT_REF_LIST(ref_list, i, pic_num_dec);
2276             priv->RefPicList0_count += i;
2277         }
2278
2279         if (priv->long_ref_count > 0) {
2280             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2281             for (i = 0; i < priv->long_ref_count; i++)
2282                 ref_list[i] = priv->long_ref[i];
2283             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2284             priv->RefPicList0_count += i;
2285         }
2286     }
2287     else {
2288         /* 8.2.4.2.2 - P and SP slices in fields */
2289         GstVaapiPictureH264 *short_ref[32];
2290         guint short_ref_count = 0;
2291         GstVaapiPictureH264 *long_ref[32];
2292         guint long_ref_count = 0;
2293
2294         if (priv->short_ref_count > 0) {
2295             for (i = 0; i < priv->short_ref_count; i++)
2296                 short_ref[i] = priv->short_ref[i];
2297             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2298             short_ref_count = i;
2299         }
2300
2301         if (priv->long_ref_count > 0) {
2302             for (i = 0; i < priv->long_ref_count; i++)
2303                 long_ref[i] = priv->long_ref[i];
2304             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2305             long_ref_count = i;
2306         }
2307
2308         init_picture_refs_fields(
2309             picture,
2310             priv->RefPicList0, &priv->RefPicList0_count,
2311             short_ref,          short_ref_count,
2312             long_ref,           long_ref_count
2313         );
2314     }
2315
2316     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2317         /* RefPicList0 */
2318         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2319     }
2320 }
2321
2322 static void
2323 init_picture_refs_b_slice(
2324     GstVaapiDecoderH264 *decoder,
2325     GstVaapiPictureH264 *picture,
2326     GstH264SliceHdr     *slice_hdr
2327 )
2328 {
2329     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2330     GstVaapiPictureH264 **ref_list;
2331     guint i, n;
2332
2333     GST_DEBUG("decode reference picture list for B slices");
2334
2335     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2336         /* 8.2.4.2.3 - B slices in frames */
2337
2338         /* RefPicList0 */
2339         if (priv->short_ref_count > 0) {
2340             // 1. Short-term references
2341             ref_list = priv->RefPicList0;
2342             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2343                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2344                     ref_list[n++] = priv->short_ref[i];
2345             }
2346             SORT_REF_LIST(ref_list, n, poc_dec);
2347             priv->RefPicList0_count += n;
2348
2349             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2350             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2351                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2352                     ref_list[n++] = priv->short_ref[i];
2353             }
2354             SORT_REF_LIST(ref_list, n, poc_inc);
2355             priv->RefPicList0_count += n;
2356         }
2357
2358         if (priv->long_ref_count > 0) {
2359             // 2. Long-term references
2360             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2361             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2362                 ref_list[n++] = priv->long_ref[i];
2363             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2364             priv->RefPicList0_count += n;
2365         }
2366
2367         /* RefPicList1 */
2368         if (priv->short_ref_count > 0) {
2369             // 1. Short-term references
2370             ref_list = priv->RefPicList1;
2371             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2372                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2373                     ref_list[n++] = priv->short_ref[i];
2374             }
2375             SORT_REF_LIST(ref_list, n, poc_inc);
2376             priv->RefPicList1_count += n;
2377
2378             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2379             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2380                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2381                     ref_list[n++] = priv->short_ref[i];
2382             }
2383             SORT_REF_LIST(ref_list, n, poc_dec);
2384             priv->RefPicList1_count += n;
2385         }
2386
2387         if (priv->long_ref_count > 0) {
2388             // 2. Long-term references
2389             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2390             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2391                 ref_list[n++] = priv->long_ref[i];
2392             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2393             priv->RefPicList1_count += n;
2394         }
2395     }
2396     else {
2397         /* 8.2.4.2.4 - B slices in fields */
2398         GstVaapiPictureH264 *short_ref0[32];
2399         guint short_ref0_count = 0;
2400         GstVaapiPictureH264 *short_ref1[32];
2401         guint short_ref1_count = 0;
2402         GstVaapiPictureH264 *long_ref[32];
2403         guint long_ref_count = 0;
2404
2405         /* refFrameList0ShortTerm */
2406         if (priv->short_ref_count > 0) {
2407             ref_list = short_ref0;
2408             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2409                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2410                     ref_list[n++] = priv->short_ref[i];
2411             }
2412             SORT_REF_LIST(ref_list, n, poc_dec);
2413             short_ref0_count += n;
2414
2415             ref_list = &short_ref0[short_ref0_count];
2416             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2417                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2418                     ref_list[n++] = priv->short_ref[i];
2419             }
2420             SORT_REF_LIST(ref_list, n, poc_inc);
2421             short_ref0_count += n;
2422         }
2423
2424         /* refFrameList1ShortTerm */
2425         if (priv->short_ref_count > 0) {
2426             ref_list = short_ref1;
2427             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2428                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2429                     ref_list[n++] = priv->short_ref[i];
2430             }
2431             SORT_REF_LIST(ref_list, n, poc_inc);
2432             short_ref1_count += n;
2433
2434             ref_list = &short_ref1[short_ref1_count];
2435             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2436                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2437                     ref_list[n++] = priv->short_ref[i];
2438             }
2439             SORT_REF_LIST(ref_list, n, poc_dec);
2440             short_ref1_count += n;
2441         }
2442
2443         /* refFrameListLongTerm */
2444         if (priv->long_ref_count > 0) {
2445             for (i = 0; i < priv->long_ref_count; i++)
2446                 long_ref[i] = priv->long_ref[i];
2447             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2448             long_ref_count = i;
2449         }
2450
2451         init_picture_refs_fields(
2452             picture,
2453             priv->RefPicList0, &priv->RefPicList0_count,
2454             short_ref0,         short_ref0_count,
2455             long_ref,           long_ref_count
2456         );
2457
2458         init_picture_refs_fields(
2459             picture,
2460             priv->RefPicList1, &priv->RefPicList1_count,
2461             short_ref1,         short_ref1_count,
2462             long_ref,           long_ref_count
2463         );
2464    }
2465
2466     /* Check whether RefPicList1 is identical to RefPicList0, then
2467        swap if necessary */
2468     if (priv->RefPicList1_count > 1 &&
2469         priv->RefPicList1_count == priv->RefPicList0_count &&
2470         memcmp(priv->RefPicList0, priv->RefPicList1,
2471                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2472         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2473         priv->RefPicList1[0] = priv->RefPicList1[1];
2474         priv->RefPicList1[1] = tmp;
2475     }
2476
2477     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2478         /* RefPicList0 */
2479         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2480
2481         /* RefPicList1 */
2482         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2483     }
2484 }
2485
2486 #undef SORT_REF_LIST
2487
2488 static gint
2489 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2490 {
2491     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2492     guint i;
2493
2494     for (i = 0; i < priv->short_ref_count; i++) {
2495         if (priv->short_ref[i]->pic_num == pic_num)
2496             return i;
2497     }
2498     GST_ERROR("found no short-term reference picture with PicNum = %d",
2499               pic_num);
2500     return -1;
2501 }
2502
2503 static gint
2504 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2505 {
2506     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2507     guint i;
2508
2509     for (i = 0; i < priv->long_ref_count; i++) {
2510         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2511             return i;
2512     }
2513     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2514               long_term_pic_num);
2515     return -1;
2516 }
2517
2518 static void
2519 exec_picture_refs_modification_1(
2520     GstVaapiDecoderH264           *decoder,
2521     GstVaapiPictureH264           *picture,
2522     GstH264SliceHdr               *slice_hdr,
2523     guint                          list
2524 )
2525 {
2526     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2527     GstH264SPS * const sps = get_sps(decoder);
2528     GstH264RefPicListModification *ref_pic_list_modification;
2529     guint num_ref_pic_list_modifications;
2530     GstVaapiPictureH264 **ref_list;
2531     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2532     const guint16 *view_ids = NULL;
2533     guint i, j, n, num_refs, num_view_ids = 0;
2534     gint found_ref_idx;
2535     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2536
2537     GST_DEBUG("modification process of reference picture list %u", list);
2538
2539     if (list == 0) {
2540         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2541         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2542         ref_list                       = priv->RefPicList0;
2543         ref_list_count_ptr             = &priv->RefPicList0_count;
2544         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2545
2546         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2547             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2548             const GstH264SPSExtMVCView * const view =
2549                 &sps->extension.mvc.view[picture->base.voc];
2550             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2551                 view_ids = view->anchor_ref_l0;
2552                 num_view_ids = view->num_anchor_refs_l0;
2553             }
2554             else {
2555                 view_ids = view->non_anchor_ref_l0;
2556                 num_view_ids = view->num_non_anchor_refs_l0;
2557             }
2558         }
2559     }
2560     else {
2561         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2562         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2563         ref_list                       = priv->RefPicList1;
2564         ref_list_count_ptr             = &priv->RefPicList1_count;
2565         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2566
2567         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2568             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2569             const GstH264SPSExtMVCView * const view =
2570                 &sps->extension.mvc.view[picture->base.voc];
2571             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2572                 view_ids = view->anchor_ref_l1;
2573                 num_view_ids = view->num_anchor_refs_l1;
2574             }
2575             else {
2576                 view_ids = view->non_anchor_ref_l1;
2577                 num_view_ids = view->num_non_anchor_refs_l1;
2578             }
2579         }
2580     }
2581     ref_list_count = *ref_list_count_ptr;
2582
2583     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2584         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2585         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2586     }
2587     else {
2588         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2589         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2590     }
2591
2592     picNumPred = CurrPicNum;
2593     picViewIdxPred = -1;
2594
2595     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2596         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2597         if (l->modification_of_pic_nums_idc == 3)
2598             break;
2599
2600         /* 8.2.4.3.1 - Short-term reference pictures */
2601         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2602             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2603             gint32 picNum, picNumNoWrap;
2604
2605             // (8-34)
2606             if (l->modification_of_pic_nums_idc == 0) {
2607                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2608                 if (picNumNoWrap < 0)
2609                     picNumNoWrap += MaxPicNum;
2610             }
2611
2612             // (8-35)
2613             else {
2614                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2615                 if (picNumNoWrap >= MaxPicNum)
2616                     picNumNoWrap -= MaxPicNum;
2617             }
2618             picNumPred = picNumNoWrap;
2619
2620             // (8-36)
2621             picNum = picNumNoWrap;
2622             if (picNum > CurrPicNum)
2623                 picNum -= MaxPicNum;
2624
2625             // (8-37)
2626             for (j = num_refs; j > ref_list_idx; j--)
2627                 ref_list[j] = ref_list[j - 1];
2628             found_ref_idx = find_short_term_reference(decoder, picNum);
2629             ref_list[ref_list_idx++] =
2630                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2631             n = ref_list_idx;
2632             for (j = ref_list_idx; j <= num_refs; j++) {
2633                 gint32 PicNumF;
2634                 if (!ref_list[j])
2635                     continue;
2636                 PicNumF =
2637                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2638                     ref_list[j]->pic_num : MaxPicNum;
2639                 if (PicNumF != picNum ||
2640                     ref_list[j]->base.view_id != picture->base.view_id)
2641                     ref_list[n++] = ref_list[j];
2642             }
2643         }
2644
2645         /* 8.2.4.3.2 - Long-term reference pictures */
2646         else if (l->modification_of_pic_nums_idc == 2) {
2647
2648             for (j = num_refs; j > ref_list_idx; j--)
2649                 ref_list[j] = ref_list[j - 1];
2650             found_ref_idx =
2651                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2652             ref_list[ref_list_idx++] =
2653                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2654             n = ref_list_idx;
2655             for (j = ref_list_idx; j <= num_refs; j++) {
2656                 gint32 LongTermPicNumF;
2657                 if (!ref_list[j])
2658                     continue;
2659                 LongTermPicNumF =
2660                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2661                     ref_list[j]->long_term_pic_num : INT_MAX;
2662                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2663                     ref_list[j]->base.view_id != picture->base.view_id)
2664                     ref_list[n++] = ref_list[j];
2665             }
2666         }
2667
2668         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2669         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2670                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2671                  (l->modification_of_pic_nums_idc == 4 ||
2672                   l->modification_of_pic_nums_idc == 5)) {
2673             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2674             gint32 picViewIdx, targetViewId;
2675
2676             // (H-6)
2677             if (l->modification_of_pic_nums_idc == 4) {
2678                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2679                 if (picViewIdx < 0)
2680                     picViewIdx += num_view_ids;
2681             }
2682
2683             // (H-7)
2684             else {
2685                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2686                 if (picViewIdx >= num_view_ids)
2687                     picViewIdx -= num_view_ids;
2688             }
2689             picViewIdxPred = picViewIdx;
2690
2691             // (H-8, H-9)
2692             targetViewId = view_ids[picViewIdx];
2693
2694             // (H-10)
2695             for (j = num_refs; j > ref_list_idx; j--)
2696                 ref_list[j] = ref_list[j - 1];
2697             ref_list[ref_list_idx++] =
2698                 find_inter_view_reference(decoder, targetViewId);
2699             n = ref_list_idx;
2700             for (j = ref_list_idx; j <= num_refs; j++) {
2701                 if (!ref_list[j])
2702                     continue;
2703                 if (ref_list[j]->base.view_id != targetViewId ||
2704                     ref_list[j]->base.poc != picture->base.poc)
2705                     ref_list[n++] = ref_list[j];
2706             }
2707         }
2708     }
2709
2710 #if DEBUG
2711     for (i = 0; i < num_refs; i++)
2712         if (!ref_list[i])
2713             GST_ERROR("list %u entry %u is empty", list, i);
2714 #endif
2715     *ref_list_count_ptr = num_refs;
2716 }
2717
2718 /* 8.2.4.3 - Modification process for reference picture lists */
2719 static void
2720 exec_picture_refs_modification(
2721     GstVaapiDecoderH264 *decoder,
2722     GstVaapiPictureH264 *picture,
2723     GstH264SliceHdr     *slice_hdr
2724 )
2725 {
2726     GST_DEBUG("execute ref_pic_list_modification()");
2727
2728     /* RefPicList0 */
2729     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2730         slice_hdr->ref_pic_list_modification_flag_l0)
2731         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2732
2733     /* RefPicList1 */
2734     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2735         slice_hdr->ref_pic_list_modification_flag_l1)
2736         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2737 }
2738
2739 static void
2740 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2741     GstVaapiPictureH264 *picture)
2742 {
2743     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2744     guint i, j, short_ref_count, long_ref_count;
2745
2746     short_ref_count = 0;
2747     long_ref_count  = 0;
2748     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2749         for (i = 0; i < priv->dpb_count; i++) {
2750             GstVaapiFrameStore * const fs = priv->dpb[i];
2751             GstVaapiPictureH264 *pic;
2752             if (!gst_vaapi_frame_store_has_frame(fs))
2753                 continue;
2754             pic = fs->buffers[0];
2755             if (pic->base.view_id != picture->base.view_id)
2756                 continue;
2757             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2758                 priv->short_ref[short_ref_count++] = pic;
2759             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2760                 priv->long_ref[long_ref_count++] = pic;
2761             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2762             pic->other_field = fs->buffers[1];
2763         }
2764     }
2765     else {
2766         for (i = 0; i < priv->dpb_count; i++) {
2767             GstVaapiFrameStore * const fs = priv->dpb[i];
2768             for (j = 0; j < fs->num_buffers; j++) {
2769                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2770                 if (pic->base.view_id != picture->base.view_id)
2771                     continue;
2772                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2773                     priv->short_ref[short_ref_count++] = pic;
2774                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2775                     priv->long_ref[long_ref_count++] = pic;
2776                 pic->structure = pic->base.structure;
2777                 pic->other_field = fs->buffers[j ^ 1];
2778             }
2779         }
2780     }
2781
2782     for (i = short_ref_count; i < priv->short_ref_count; i++)
2783         priv->short_ref[i] = NULL;
2784     priv->short_ref_count = short_ref_count;
2785
2786     for (i = long_ref_count; i < priv->long_ref_count; i++)
2787         priv->long_ref[i] = NULL;
2788     priv->long_ref_count = long_ref_count;
2789 }
2790
2791 static void
2792 init_picture_refs(
2793     GstVaapiDecoderH264 *decoder,
2794     GstVaapiPictureH264 *picture,
2795     GstH264SliceHdr     *slice_hdr
2796 )
2797 {
2798     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2799     guint i, num_refs;
2800
2801     init_picture_ref_lists(decoder, picture);
2802     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2803
2804     priv->RefPicList0_count = 0;
2805     priv->RefPicList1_count = 0;
2806
2807     switch (slice_hdr->type % 5) {
2808     case GST_H264_P_SLICE:
2809     case GST_H264_SP_SLICE:
2810         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2811         break;
2812     case GST_H264_B_SLICE:
2813         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2814         break;
2815     default:
2816         break;
2817     }
2818
2819     exec_picture_refs_modification(decoder, picture, slice_hdr);
2820
2821     switch (slice_hdr->type % 5) {
2822     case GST_H264_B_SLICE:
2823         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2824         for (i = priv->RefPicList1_count; i < num_refs; i++)
2825             priv->RefPicList1[i] = NULL;
2826         priv->RefPicList1_count = num_refs;
2827
2828         // fall-through
2829     case GST_H264_P_SLICE:
2830     case GST_H264_SP_SLICE:
2831         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2832         for (i = priv->RefPicList0_count; i < num_refs; i++)
2833             priv->RefPicList0[i] = NULL;
2834         priv->RefPicList0_count = num_refs;
2835         break;
2836     default:
2837         break;
2838     }
2839 }
2840
2841 static gboolean
2842 init_picture(
2843     GstVaapiDecoderH264 *decoder,
2844     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2845 {
2846     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2847     GstVaapiPicture * const base_picture = &picture->base;
2848     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2849
2850     priv->prev_frame_num        = priv->frame_num;
2851     priv->frame_num             = slice_hdr->frame_num;
2852     picture->frame_num          = priv->frame_num;
2853     picture->frame_num_wrap     = priv->frame_num;
2854     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2855     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2856     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2857     base_picture->view_id       = pi->view_id;
2858     base_picture->voc           = pi->voc;
2859
2860     /* Initialize extensions */
2861     switch (pi->nalu.extension_type) {
2862     case GST_H264_NAL_EXTENSION_MVC: {
2863         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2864
2865         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2866         if (mvc->inter_view_flag)
2867             GST_VAAPI_PICTURE_FLAG_SET(picture,
2868                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2869         if (mvc->anchor_pic_flag)
2870             GST_VAAPI_PICTURE_FLAG_SET(picture,
2871                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2872         break;
2873     }
2874     }
2875
2876     /* Reset decoder state for IDR pictures */
2877     if (pi->nalu.idr_pic_flag) {
2878         GST_DEBUG("<IDR>");
2879         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2880         dpb_flush(decoder, picture);
2881     }
2882
2883     /* Initialize picture structure */
2884     if (!slice_hdr->field_pic_flag)
2885         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2886     else {
2887         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2888         if (!slice_hdr->bottom_field_flag)
2889             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2890         else
2891             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2892     }
2893     picture->structure = base_picture->structure;
2894
2895     /* Initialize reference flags */
2896     if (pi->nalu.ref_idc) {
2897         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2898             &slice_hdr->dec_ref_pic_marking;
2899
2900         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2901             dec_ref_pic_marking->long_term_reference_flag)
2902             GST_VAAPI_PICTURE_FLAG_SET(picture,
2903                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2904         else
2905             GST_VAAPI_PICTURE_FLAG_SET(picture,
2906                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2907     }
2908
2909     init_picture_poc(decoder, picture, slice_hdr);
2910     return TRUE;
2911 }
2912
2913 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2914 static gboolean
2915 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2916 {
2917     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2918     GstH264SPS * const sps = get_sps(decoder);
2919     GstVaapiPictureH264 *ref_picture;
2920     guint i, m, max_num_ref_frames;
2921
2922     GST_DEBUG("reference picture marking process (sliding window)");
2923
2924     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2925         return TRUE;
2926
2927     max_num_ref_frames = sps->num_ref_frames;
2928     if (max_num_ref_frames == 0)
2929         max_num_ref_frames = 1;
2930     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2931         max_num_ref_frames <<= 1;
2932
2933     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2934         return TRUE;
2935     if (priv->short_ref_count < 1)
2936         return FALSE;
2937
2938     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2939         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2940         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2941             m = i;
2942     }
2943
2944     ref_picture = priv->short_ref[m];
2945     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2946     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2947
2948     /* Both fields need to be marked as "unused for reference", so
2949        remove the other field from the short_ref[] list as well */
2950     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2951         for (i = 0; i < priv->short_ref_count; i++) {
2952             if (priv->short_ref[i] == ref_picture->other_field) {
2953                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2954                 break;
2955             }
2956         }
2957     }
2958     return TRUE;
2959 }
2960
2961 static inline gint32
2962 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2963 {
2964     gint32 pic_num;
2965
2966     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2967         pic_num = picture->frame_num_wrap;
2968     else
2969         pic_num = 2 * picture->frame_num_wrap + 1;
2970     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2971     return pic_num;
2972 }
2973
2974 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2975 static void
2976 exec_ref_pic_marking_adaptive_mmco_1(
2977     GstVaapiDecoderH264  *decoder,
2978     GstVaapiPictureH264  *picture,
2979     GstH264RefPicMarking *ref_pic_marking
2980 )
2981 {
2982     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2983     gint32 i, picNumX;
2984
2985     picNumX = get_picNumX(picture, ref_pic_marking);
2986     i = find_short_term_reference(decoder, picNumX);
2987     if (i < 0)
2988         return;
2989
2990     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2991         GST_VAAPI_PICTURE_IS_FRAME(picture));
2992     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2993 }
2994
2995 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2996 static void
2997 exec_ref_pic_marking_adaptive_mmco_2(
2998     GstVaapiDecoderH264  *decoder,
2999     GstVaapiPictureH264  *picture,
3000     GstH264RefPicMarking *ref_pic_marking
3001 )
3002 {
3003     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3004     gint32 i;
3005
3006     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
3007     if (i < 0)
3008         return;
3009
3010     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
3011         GST_VAAPI_PICTURE_IS_FRAME(picture));
3012     ARRAY_REMOVE_INDEX(priv->long_ref, i);
3013 }
3014
3015 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
3016 static void
3017 exec_ref_pic_marking_adaptive_mmco_3(
3018     GstVaapiDecoderH264  *decoder,
3019     GstVaapiPictureH264  *picture,
3020     GstH264RefPicMarking *ref_pic_marking
3021 )
3022 {
3023     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3024     GstVaapiPictureH264 *ref_picture, *other_field;
3025     gint32 i, picNumX;
3026
3027     for (i = 0; i < priv->long_ref_count; i++) {
3028         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3029             break;
3030     }
3031     if (i != priv->long_ref_count) {
3032         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3033         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3034     }
3035
3036     picNumX = get_picNumX(picture, ref_pic_marking);
3037     i = find_short_term_reference(decoder, picNumX);
3038     if (i < 0)
3039         return;
3040
3041     ref_picture = priv->short_ref[i];
3042     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3043     priv->long_ref[priv->long_ref_count++] = ref_picture;
3044
3045     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3046     gst_vaapi_picture_h264_set_reference(ref_picture,
3047         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3048         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3049
3050     /* Assign LongTermFrameIdx to the other field if it was also
3051        marked as "used for long-term reference */
3052     other_field = ref_picture->other_field;
3053     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3054         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3055 }
3056
3057 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3058  * as "unused for reference" */
3059 static void
3060 exec_ref_pic_marking_adaptive_mmco_4(
3061     GstVaapiDecoderH264  *decoder,
3062     GstVaapiPictureH264  *picture,
3063     GstH264RefPicMarking *ref_pic_marking
3064 )
3065 {
3066     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3067     gint32 i, long_term_frame_idx;
3068
3069     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3070
3071     for (i = 0; i < priv->long_ref_count; i++) {
3072         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3073             continue;
3074         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3075         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3076         i--;
3077     }
3078 }
3079
3080 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3081 static void
3082 exec_ref_pic_marking_adaptive_mmco_5(
3083     GstVaapiDecoderH264  *decoder,
3084     GstVaapiPictureH264  *picture,
3085     GstH264RefPicMarking *ref_pic_marking
3086 )
3087 {
3088     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3089
3090     dpb_flush(decoder, picture);
3091
3092     priv->prev_pic_has_mmco5 = TRUE;
3093
3094     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3095     priv->frame_num = 0;
3096     priv->frame_num_offset = 0;
3097     picture->frame_num = 0;
3098
3099     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3100     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3101         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3102     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3103         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3104     picture->base.poc = 0;
3105 }
3106
3107 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3108 static void
3109 exec_ref_pic_marking_adaptive_mmco_6(
3110     GstVaapiDecoderH264  *decoder,
3111     GstVaapiPictureH264  *picture,
3112     GstH264RefPicMarking *ref_pic_marking
3113 )
3114 {
3115     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3116     GstVaapiPictureH264 *other_field;
3117     guint i;
3118
3119     for (i = 0; i < priv->long_ref_count; i++) {
3120         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3121             break;
3122     }
3123     if (i != priv->long_ref_count) {
3124         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3125         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3126     }
3127
3128     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3129     gst_vaapi_picture_h264_set_reference(picture,
3130         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3131         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3132
3133     /* Assign LongTermFrameIdx to the other field if it was also
3134        marked as "used for long-term reference */
3135     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3136     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3137         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3138 }
3139
3140 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3141 static gboolean
3142 exec_ref_pic_marking_adaptive(
3143     GstVaapiDecoderH264     *decoder,
3144     GstVaapiPictureH264     *picture,
3145     GstH264DecRefPicMarking *dec_ref_pic_marking
3146 )
3147 {
3148     guint i;
3149
3150     GST_DEBUG("reference picture marking process (adaptive memory control)");
3151
3152     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3153         GstVaapiDecoderH264  *decoder,
3154         GstVaapiPictureH264  *picture,
3155         GstH264RefPicMarking *ref_pic_marking
3156     );
3157
3158     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3159         NULL,
3160         exec_ref_pic_marking_adaptive_mmco_1,
3161         exec_ref_pic_marking_adaptive_mmco_2,
3162         exec_ref_pic_marking_adaptive_mmco_3,
3163         exec_ref_pic_marking_adaptive_mmco_4,
3164         exec_ref_pic_marking_adaptive_mmco_5,
3165         exec_ref_pic_marking_adaptive_mmco_6,
3166     };
3167
3168     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3169         GstH264RefPicMarking * const ref_pic_marking =
3170             &dec_ref_pic_marking->ref_pic_marking[i];
3171
3172         const guint mmco = ref_pic_marking->memory_management_control_operation;
3173         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3174             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3175         else {
3176             GST_ERROR("unhandled MMCO %u", mmco);
3177             return FALSE;
3178         }
3179     }
3180     return TRUE;
3181 }
3182
3183 /* 8.2.5 - Execute reference picture marking process */
3184 static gboolean
3185 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3186 {
3187     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3188
3189     priv->prev_pic_has_mmco5 = FALSE;
3190     priv->prev_pic_structure = picture->structure;
3191
3192     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3193         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3194
3195     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3196         return TRUE;
3197
3198     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3199         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3200             &picture->last_slice_hdr->dec_ref_pic_marking;
3201         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3202             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3203                 return FALSE;
3204         }
3205         else {
3206             if (!exec_ref_pic_marking_sliding_window(decoder))
3207                 return FALSE;
3208         }
3209     }
3210     return TRUE;
3211 }
3212
3213 static void
3214 vaapi_init_picture(VAPictureH264 *pic)
3215 {
3216     pic->picture_id           = VA_INVALID_ID;
3217     pic->frame_idx            = 0;
3218     pic->flags                = VA_PICTURE_H264_INVALID;
3219     pic->TopFieldOrderCnt     = 0;
3220     pic->BottomFieldOrderCnt  = 0;
3221 }
3222
3223 static void
3224 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3225     guint picture_structure)
3226 {
3227     if (!picture_structure)
3228         picture_structure = picture->structure;
3229
3230     pic->picture_id = picture->base.surface_id;
3231     pic->flags = 0;
3232
3233     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3234         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3235         pic->frame_idx = picture->long_term_frame_idx;
3236     }
3237     else {
3238         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3239             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3240         pic->frame_idx = picture->frame_num;
3241     }
3242
3243     switch (picture_structure) {
3244     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3245         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3246         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3247         break;
3248     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3249         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3250         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3251         pic->BottomFieldOrderCnt = 0;
3252         break;
3253     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3254         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3255         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3256         pic->TopFieldOrderCnt = 0;
3257         break;
3258     }
3259 }
3260
3261 static void
3262 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3263     GstVaapiPictureH264 *picture)
3264 {
3265     vaapi_fill_picture(pic, picture, 0);
3266
3267     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3268     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3269         /* The inter-view reference components and inter-view only
3270            reference components that are included in the reference
3271            picture lists are considered as not being marked as "used for
3272            short-term reference" or "used for long-term reference" */
3273         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3274                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3275     }
3276 }
3277
3278 static gboolean
3279 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3280 {
3281     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3282     GstVaapiPicture * const base_picture = &picture->base;
3283     GstH264PPS * const pps = get_pps(decoder);
3284     GstH264SPS * const sps = get_sps(decoder);
3285     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3286     guint i, n;
3287
3288     /* Fill in VAPictureParameterBufferH264 */
3289     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3290
3291     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3292         GstVaapiFrameStore * const fs = priv->dpb[i];
3293         if ((gst_vaapi_frame_store_has_reference(fs) &&
3294              fs->view_id == picture->base.view_id) ||
3295             (gst_vaapi_frame_store_has_inter_view(fs) &&
3296              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3297             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3298                 fs->buffers[0], fs->structure);
3299         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3300             break;
3301     }
3302     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3303         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3304
3305 #define COPY_FIELD(s, f) \
3306     pic_param->f = (s)->f
3307
3308 #define COPY_BFM(a, s, f) \
3309     pic_param->a.bits.f = (s)->f
3310
3311     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3312     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3313     pic_param->frame_num                    = priv->frame_num;
3314
3315     COPY_FIELD(sps, bit_depth_luma_minus8);
3316     COPY_FIELD(sps, bit_depth_chroma_minus8);
3317     COPY_FIELD(sps, num_ref_frames);
3318     COPY_FIELD(pps, num_slice_groups_minus1);
3319     COPY_FIELD(pps, slice_group_map_type);
3320     COPY_FIELD(pps, slice_group_change_rate_minus1);
3321     COPY_FIELD(pps, pic_init_qp_minus26);
3322     COPY_FIELD(pps, pic_init_qs_minus26);
3323     COPY_FIELD(pps, chroma_qp_index_offset);
3324     COPY_FIELD(pps, second_chroma_qp_index_offset);
3325
3326     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3327     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3328     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3329
3330     COPY_BFM(seq_fields, sps, chroma_format_idc);
3331     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3332     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3333     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3334     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3335     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3336     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3337     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3338     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3339
3340     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3341     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3342     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3343
3344     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3345     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3346     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3347     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3348     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3349     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3350     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3351     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3352     return TRUE;
3353 }
3354
3355 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3356 static gboolean
3357 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3358 {
3359     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3360     GstH264PPS * const pps = slice_hdr->pps;
3361     GstH264SPS * const sps = pps->sequence;
3362     GstH264SliceHdr *prev_slice_hdr;
3363
3364     if (!prev_pi)
3365         return TRUE;
3366     prev_slice_hdr = &prev_pi->data.slice_hdr;
3367
3368 #define CHECK_EXPR(expr, field_name) do {              \
3369         if (!(expr)) {                                 \
3370             GST_DEBUG(field_name " differs in value"); \
3371             return TRUE;                               \
3372         }                                              \
3373     } while (0)
3374
3375 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3376     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3377
3378     /* view_id differs in value and VOIdx of current slice_hdr is less
3379        than the VOIdx of the prev_slice_hdr */
3380     CHECK_VALUE(pi, prev_pi, view_id);
3381
3382     /* frame_num differs in value, regardless of inferred values to 0 */
3383     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3384
3385     /* pic_parameter_set_id differs in value */
3386     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3387
3388     /* field_pic_flag differs in value */
3389     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3390
3391     /* bottom_field_flag is present in both and differs in value */
3392     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3393         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3394
3395     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3396     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3397                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3398
3399     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3400        value or delta_pic_order_cnt_bottom differs in value */
3401     if (sps->pic_order_cnt_type == 0) {
3402         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3403         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3404             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3405     }
3406
3407     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3408        differs in value or delta_pic_order_cnt[1] differs in value */
3409     else if (sps->pic_order_cnt_type == 1) {
3410         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3411         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3412     }
3413
3414     /* IdrPicFlag differs in value */
3415     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3416
3417     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3418     if (pi->nalu.idr_pic_flag)
3419         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3420
3421 #undef CHECK_EXPR
3422 #undef CHECK_VALUE
3423     return FALSE;
3424 }
3425
3426 /* Detection of a new access unit, assuming we are already in presence
3427    of a new picture */
3428 static inline gboolean
3429 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3430 {
3431     if (!prev_pi || prev_pi->view_id == pi->view_id)
3432         return TRUE;
3433     return pi->voc < prev_pi->voc;
3434 }
3435
3436 /* Finds the first field picture corresponding to the supplied picture */
3437 static GstVaapiPictureH264 *
3438 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3439 {
3440     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3441     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3442     GstVaapiFrameStore *fs;
3443
3444     if (!slice_hdr->field_pic_flag)
3445         return NULL;
3446
3447     fs = priv->prev_frames[pi->voc];
3448     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3449         return NULL;
3450
3451     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3452         return fs->buffers[0];
3453     return NULL;
3454 }
3455
3456 static GstVaapiDecoderStatus
3457 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3458 {
3459     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3460     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3461     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3462     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3463     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3464     GstVaapiPictureH264 *picture, *first_field;
3465     GstVaapiDecoderStatus status;
3466
3467     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3468     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3469
3470     /* Only decode base stream for MVC */
3471     switch (sps->profile_idc) {
3472     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3473     case GST_H264_PROFILE_STEREO_HIGH:
3474         if (0) {
3475             GST_DEBUG("drop picture from substream");
3476             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3477         }
3478         break;
3479     }
3480
3481     status = ensure_context(decoder, sps);
3482     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3483         return status;
3484
3485     priv->decoder_state = 0;
3486
3487     first_field = find_first_field(decoder, pi);
3488     if (first_field) {
3489         /* Re-use current picture where the first field was decoded */
3490         picture = gst_vaapi_picture_h264_new_field(first_field);
3491         if (!picture) {
3492             GST_ERROR("failed to allocate field picture");
3493             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3494         }
3495     }
3496     else {
3497         /* Create new picture */
3498         picture = gst_vaapi_picture_h264_new(decoder);
3499         if (!picture) {
3500             GST_ERROR("failed to allocate picture");
3501             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3502         }
3503     }
3504     gst_vaapi_picture_replace(&priv->current_picture, picture);
3505     gst_vaapi_picture_unref(picture);
3506
3507     /* Clear inter-view references list if this is the primary coded
3508        picture of the current access unit */
3509     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3510         g_ptr_array_set_size(priv->inter_views, 0);
3511
3512     /* Update cropping rectangle */
3513     if (sps->frame_cropping_flag) {
3514         GstVaapiRectangle crop_rect;
3515         crop_rect.x = sps->crop_rect_x;
3516         crop_rect.y = sps->crop_rect_y;
3517         crop_rect.width = sps->crop_rect_width;
3518         crop_rect.height = sps->crop_rect_height;
3519         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3520     }
3521
3522     status = ensure_quant_matrix(decoder, picture);
3523     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3524         GST_ERROR("failed to reset quantizer matrix");
3525         return status;
3526     }
3527
3528     if (!init_picture(decoder, picture, pi))
3529         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3530     if (!fill_picture(decoder, picture))
3531         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3532
3533     priv->decoder_state = pi->state;
3534     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3535 }
3536
3537 static inline guint
3538 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3539 {
3540     guint epb_count;
3541
3542     epb_count = slice_hdr->n_emulation_prevention_bytes;
3543     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3544 }
3545
3546 static gboolean
3547 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3548     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3549 {
3550     VASliceParameterBufferH264 * const slice_param = slice->param;
3551     GstH264PPS * const pps = get_pps(decoder);
3552     GstH264SPS * const sps = get_sps(decoder);
3553     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3554     guint num_weight_tables = 0;
3555     gint i, j;
3556
3557     if (pps->weighted_pred_flag &&
3558         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3559         num_weight_tables = 1;
3560     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3561         num_weight_tables = 2;
3562     else
3563         num_weight_tables = 0;
3564
3565     slice_param->luma_log2_weight_denom   = 0;
3566     slice_param->chroma_log2_weight_denom = 0;
3567     slice_param->luma_weight_l0_flag      = 0;
3568     slice_param->chroma_weight_l0_flag    = 0;
3569     slice_param->luma_weight_l1_flag      = 0;
3570     slice_param->chroma_weight_l1_flag    = 0;
3571
3572     if (num_weight_tables < 1)
3573         return TRUE;
3574
3575     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3576     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3577
3578     slice_param->luma_weight_l0_flag = 1;
3579     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3580         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3581         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3582     }
3583
3584     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3585     if (slice_param->chroma_weight_l0_flag) {
3586         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3587             for (j = 0; j < 2; j++) {
3588                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3589                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3590             }
3591         }
3592     }
3593
3594     if (num_weight_tables < 2)
3595         return TRUE;
3596
3597     slice_param->luma_weight_l1_flag = 1;
3598     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3599         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3600         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3601     }
3602
3603     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3604     if (slice_param->chroma_weight_l1_flag) {
3605         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3606             for (j = 0; j < 2; j++) {
3607                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3608                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3609             }
3610         }
3611     }
3612     return TRUE;
3613 }
3614
3615 static gboolean
3616 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3617     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3618 {
3619     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3620     VASliceParameterBufferH264 * const slice_param = slice->param;
3621     guint i, num_ref_lists = 0;
3622
3623     slice_param->num_ref_idx_l0_active_minus1 = 0;
3624     slice_param->num_ref_idx_l1_active_minus1 = 0;
3625
3626     if (GST_H264_IS_B_SLICE(slice_hdr))
3627         num_ref_lists = 2;
3628     else if (GST_H264_IS_I_SLICE(slice_hdr))
3629         num_ref_lists = 0;
3630     else
3631         num_ref_lists = 1;
3632
3633     if (num_ref_lists < 1)
3634         return TRUE;
3635
3636     slice_param->num_ref_idx_l0_active_minus1 =
3637         slice_hdr->num_ref_idx_l0_active_minus1;
3638
3639     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3640         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3641             priv->RefPicList0[i]);
3642     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3643         vaapi_init_picture(&slice_param->RefPicList0[i]);
3644
3645     if (num_ref_lists < 2)
3646         return TRUE;
3647
3648     slice_param->num_ref_idx_l1_active_minus1 =
3649         slice_hdr->num_ref_idx_l1_active_minus1;
3650
3651     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3652         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3653             priv->RefPicList1[i]);
3654     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3655         vaapi_init_picture(&slice_param->RefPicList1[i]);
3656     return TRUE;
3657 }
3658
3659 static gboolean
3660 fill_slice(GstVaapiDecoderH264 *decoder,
3661     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3662 {
3663     VASliceParameterBufferH264 * const slice_param = slice->param;
3664     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3665
3666     /* Fill in VASliceParameterBufferH264 */
3667     slice_param->slice_data_bit_offset =
3668         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3669     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3670     slice_param->slice_type                     = slice_hdr->type % 5;
3671     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3672     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3673     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3674     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3675     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3676     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3677
3678     if (!fill_RefPicList(decoder, slice, slice_hdr))
3679         return FALSE;
3680     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3681         return FALSE;
3682     return TRUE;
3683 }
3684
3685 static GstVaapiDecoderStatus
3686 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3687 {
3688     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3689     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3690     GstVaapiPictureH264 * const picture = priv->current_picture;
3691     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3692     GstVaapiSlice *slice;
3693     GstBuffer * const buffer =
3694         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3695     GstMapInfo map_info;
3696
3697     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3698
3699     if (!is_valid_state(pi->state,
3700             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3701         GST_WARNING("failed to receive enough headers to decode slice");
3702         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3703     }
3704
3705     if (!ensure_pps(decoder, slice_hdr->pps)) {
3706         GST_ERROR("failed to activate PPS");
3707         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3708     }
3709
3710     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3711         GST_ERROR("failed to activate SPS");
3712         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3713     }
3714
3715     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3716         GST_ERROR("failed to map buffer");
3717         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3718     }
3719
3720     /* Check wether this is the first/last slice in the current access unit */
3721     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3722         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3723     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3724         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3725
3726     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3727         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3728     gst_buffer_unmap(buffer, &map_info);
3729     if (!slice) {
3730         GST_ERROR("failed to allocate slice");
3731         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3732     }
3733
3734     init_picture_refs(decoder, picture, slice_hdr);
3735     if (!fill_slice(decoder, slice, pi)) {
3736         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3737         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3738     }
3739
3740     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3741     picture->last_slice_hdr = slice_hdr;
3742     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3743     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3744 }
3745
3746 static inline gint
3747 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3748 {
3749     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3750                                                      0xffffff00, 0x00000100,
3751                                                      ofs, size,
3752                                                      scp);
3753 }
3754
3755 static GstVaapiDecoderStatus
3756 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3757 {
3758     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3759     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3760     GstVaapiDecoderStatus status;
3761
3762     priv->decoder_state |= pi->state;
3763     switch (pi->nalu.type) {
3764     case GST_H264_NAL_SPS:
3765         status = decode_sps(decoder, unit);
3766         break;
3767     case GST_H264_NAL_SUBSET_SPS:
3768         status = decode_subset_sps(decoder, unit);
3769         break;
3770     case GST_H264_NAL_PPS:
3771         status = decode_pps(decoder, unit);
3772         break;
3773     case GST_H264_NAL_SLICE_EXT:
3774     case GST_H264_NAL_SLICE_IDR:
3775         /* fall-through. IDR specifics are handled in init_picture() */
3776     case GST_H264_NAL_SLICE:
3777         status = decode_slice(decoder, unit);
3778         break;
3779     case GST_H264_NAL_SEQ_END:
3780     case GST_H264_NAL_STREAM_END:
3781         status = decode_sequence_end(decoder);
3782         break;
3783     case GST_H264_NAL_SEI:
3784         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3785         break;
3786     default:
3787         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3788         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3789         break;
3790     }
3791     return status;
3792 }
3793
3794 static GstVaapiDecoderStatus
3795 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3796     const guchar *buf, guint buf_size)
3797 {
3798     GstVaapiDecoderH264 * const decoder =
3799         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3800     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3801     GstVaapiDecoderStatus status;
3802     GstVaapiDecoderUnit unit;
3803     GstVaapiParserInfoH264 *pi = NULL;
3804     GstH264ParserResult result;
3805     guint i, ofs, num_sps, num_pps;
3806
3807     unit.parsed_info = NULL;
3808
3809     if (buf_size < 8)
3810         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3811
3812     if (buf[0] != 1) {
3813         GST_ERROR("failed to decode codec-data, not in avcC format");
3814         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3815     }
3816
3817     priv->nal_length_size = (buf[4] & 0x03) + 1;
3818
3819     num_sps = buf[5] & 0x1f;
3820     ofs = 6;
3821
3822     for (i = 0; i < num_sps; i++) {
3823         pi = gst_vaapi_parser_info_h264_new();
3824         if (!pi)
3825             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3826         unit.parsed_info = pi;
3827
3828         result = gst_h264_parser_identify_nalu_avc(
3829             priv->parser,
3830             buf, ofs, buf_size, 2,
3831             &pi->nalu
3832         );
3833         if (result != GST_H264_PARSER_OK) {
3834             status = get_status(result);
3835             goto cleanup;
3836         }
3837
3838         status = parse_sps(decoder, &unit);
3839         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3840             goto cleanup;
3841         ofs = pi->nalu.offset + pi->nalu.size;
3842
3843         status = decode_sps(decoder, &unit);
3844         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3845             goto cleanup;
3846         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3847     }
3848
3849     num_pps = buf[ofs];
3850     ofs++;
3851
3852     for (i = 0; i < num_pps; i++) {
3853         pi = gst_vaapi_parser_info_h264_new();
3854         if (!pi)
3855             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3856         unit.parsed_info = pi;
3857
3858         result = gst_h264_parser_identify_nalu_avc(
3859             priv->parser,
3860             buf, ofs, buf_size, 2,
3861             &pi->nalu
3862         );
3863         if (result != GST_H264_PARSER_OK) {
3864             status = get_status(result);
3865             goto cleanup;
3866         }
3867
3868         status = parse_pps(decoder, &unit);
3869         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3870             goto cleanup;
3871         ofs = pi->nalu.offset + pi->nalu.size;
3872
3873         status = decode_pps(decoder, &unit);
3874         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3875             goto cleanup;
3876         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3877     }
3878
3879     priv->is_avcC = TRUE;
3880     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3881
3882 cleanup:
3883     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3884     return status;
3885 }
3886
3887 static GstVaapiDecoderStatus
3888 ensure_decoder(GstVaapiDecoderH264 *decoder)
3889 {
3890     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3891     GstVaapiDecoderStatus status;
3892
3893     if (!priv->is_opened) {
3894         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3895         if (!priv->is_opened)
3896             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3897
3898         status = gst_vaapi_decoder_decode_codec_data(
3899             GST_VAAPI_DECODER_CAST(decoder));
3900         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3901             return status;
3902     }
3903     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3904 }
3905
3906 static GstVaapiDecoderStatus
3907 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3908     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3909 {
3910     GstVaapiDecoderH264 * const decoder =
3911         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3912     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3913     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3914     GstVaapiParserInfoH264 *pi;
3915     GstVaapiDecoderStatus status;
3916     GstH264ParserResult result;
3917     guchar *buf;
3918     guint i, size, buf_size, nalu_size, flags;
3919     guint32 start_code;
3920     gint ofs, ofs2;
3921     gboolean at_au_end = FALSE;
3922
3923     status = ensure_decoder(decoder);
3924     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3925         return status;
3926
3927     switch (priv->stream_alignment) {
3928     case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3929     case GST_VAAPI_STREAM_ALIGN_H264_AU:
3930         size = gst_adapter_available_fast(adapter);
3931         break;
3932     default:
3933         size = gst_adapter_available(adapter);
3934         break;
3935     }
3936
3937     if (priv->is_avcC) {
3938         if (size < priv->nal_length_size)
3939             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3940
3941         buf = (guchar *)&start_code;
3942         g_assert(priv->nal_length_size <= sizeof(start_code));
3943         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3944
3945         nalu_size = 0;
3946         for (i = 0; i < priv->nal_length_size; i++)
3947             nalu_size = (nalu_size << 8) | buf[i];
3948
3949         buf_size = priv->nal_length_size + nalu_size;
3950         if (size < buf_size)
3951             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3952         else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3953             at_au_end = (buf_size == size);
3954     }
3955     else {
3956         if (size < 4)
3957             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3958
3959         if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3960             buf_size = size;
3961         else {
3962             ofs = scan_for_start_code(adapter, 0, size, NULL);
3963             if (ofs < 0)
3964                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3965
3966             if (ofs > 0) {
3967                 gst_adapter_flush(adapter, ofs);
3968                 size -= ofs;
3969             }
3970
3971             ofs2 = ps->input_offset2 - ofs - 4;
3972             if (ofs2 < 4)
3973                 ofs2 = 4;
3974
3975             ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3976                 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3977             if (ofs < 0) {
3978                 // Assume the whole NAL unit is present if end-of-stream
3979                 // or stream buffers aligned on access unit boundaries
3980                 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3981                     at_au_end = TRUE;
3982                 else if (!at_eos) {
3983                     ps->input_offset2 = size;
3984                     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3985                 }
3986                 ofs = size;
3987             }
3988             buf_size = ofs;
3989         }
3990     }
3991     ps->input_offset2 = 0;
3992
3993     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3994     if (!buf)
3995         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3996
3997     unit->size = buf_size;
3998
3999     pi = gst_vaapi_parser_info_h264_new();
4000     if (!pi)
4001         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
4002
4003     gst_vaapi_decoder_unit_set_parsed_info(unit,
4004         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
4005
4006     if (priv->is_avcC)
4007         result = gst_h264_parser_identify_nalu_avc(priv->parser,
4008             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
4009     else
4010         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
4011             buf, 0, buf_size, &pi->nalu);
4012     status = get_status(result);
4013     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4014         return status;
4015
4016     switch (pi->nalu.type) {
4017     case GST_H264_NAL_SPS:
4018         status = parse_sps(decoder, unit);
4019         break;
4020     case GST_H264_NAL_SUBSET_SPS:
4021         status = parse_subset_sps(decoder, unit);
4022         break;
4023     case GST_H264_NAL_PPS:
4024         status = parse_pps(decoder, unit);
4025         break;
4026     case GST_H264_NAL_SEI:
4027         status = parse_sei(decoder, unit);
4028         break;
4029     case GST_H264_NAL_SLICE_EXT:
4030         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4031             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4032             break;
4033         }
4034         /* fall-through */
4035     case GST_H264_NAL_SLICE_IDR:
4036     case GST_H264_NAL_SLICE:
4037         status = parse_slice(decoder, unit);
4038         break;
4039     default:
4040         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4041         break;
4042     }
4043     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4044         return status;
4045
4046     flags = 0;
4047     if (at_au_end) {
4048         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
4049             GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4050     }
4051     switch (pi->nalu.type) {
4052     case GST_H264_NAL_AU_DELIMITER:
4053         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4054         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4055         /* fall-through */
4056     case GST_H264_NAL_FILLER_DATA:
4057         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4058         break;
4059     case GST_H264_NAL_STREAM_END:
4060         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4061         /* fall-through */
4062     case GST_H264_NAL_SEQ_END:
4063         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4064         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4065         break;
4066     case GST_H264_NAL_SPS:
4067     case GST_H264_NAL_SUBSET_SPS:
4068     case GST_H264_NAL_PPS:
4069     case GST_H264_NAL_SEI:
4070         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4071         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4072         break;
4073     case GST_H264_NAL_SLICE_EXT:
4074         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4075             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4076             break;
4077         }
4078         /* fall-through */
4079     case GST_H264_NAL_SLICE_IDR:
4080     case GST_H264_NAL_SLICE:
4081         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4082         if (priv->prev_pi &&
4083             (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
4084             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4085                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4086         }
4087         else if (is_new_picture(pi, priv->prev_slice_pi)) {
4088             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4089             if (is_new_access_unit(pi, priv->prev_slice_pi))
4090                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4091         }
4092         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4093         break;
4094     case GST_H264_NAL_SPS_EXT:
4095     case GST_H264_NAL_SLICE_AUX:
4096         /* skip SPS extension and auxiliary slice for now */
4097         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4098         break;
4099     case GST_H264_NAL_PREFIX_UNIT:
4100         /* skip Prefix NAL units for now */
4101         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4102             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4103             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4104         break;
4105     default:
4106         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4107             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4108                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4109         break;
4110     }
4111     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4112         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4113     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4114
4115     pi->nalu.data = NULL;
4116     pi->state = priv->parser_state;
4117     pi->flags = flags;
4118     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4119     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4120 }
4121
4122 static GstVaapiDecoderStatus
4123 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4124     GstVaapiDecoderUnit *unit)
4125 {
4126     GstVaapiDecoderH264 * const decoder =
4127         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4128     GstVaapiDecoderStatus status;
4129
4130     status = ensure_decoder(decoder);
4131     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4132         return status;
4133     return decode_unit(decoder, unit);
4134 }
4135
4136 static GstVaapiDecoderStatus
4137 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4138     GstVaapiDecoderUnit *unit)
4139 {
4140     GstVaapiDecoderH264 * const decoder =
4141         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4142
4143     return decode_picture(decoder, unit);
4144 }
4145
4146 static GstVaapiDecoderStatus
4147 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4148 {
4149     GstVaapiDecoderH264 * const decoder =
4150         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4151
4152     return decode_current_picture(decoder);
4153 }
4154
4155 static GstVaapiDecoderStatus
4156 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4157 {
4158     GstVaapiDecoderH264 * const decoder =
4159         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4160
4161     dpb_flush(decoder, NULL);
4162     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4163 }
4164
4165 static void
4166 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4167 {
4168     GstVaapiMiniObjectClass * const object_class =
4169         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4170     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4171
4172     object_class->size          = sizeof(GstVaapiDecoderH264);
4173     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4174
4175     decoder_class->create       = gst_vaapi_decoder_h264_create;
4176     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4177     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4178     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4179     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4180     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4181     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4182
4183     decoder_class->decode_codec_data =
4184         gst_vaapi_decoder_h264_decode_codec_data;
4185 }
4186
4187 static inline const GstVaapiDecoderClass *
4188 gst_vaapi_decoder_h264_class(void)
4189 {
4190     static GstVaapiDecoderH264Class g_class;
4191     static gsize g_class_init = FALSE;
4192
4193     if (g_once_init_enter(&g_class_init)) {
4194         gst_vaapi_decoder_h264_class_init(&g_class);
4195         g_once_init_leave(&g_class_init, TRUE);
4196     }
4197     return GST_VAAPI_DECODER_CLASS(&g_class);
4198 }
4199
4200 /**
4201  * gst_vaapi_decoder_h264_set_alignment:
4202  * @decoder: a #GstVaapiDecoderH264
4203  * @alignment: the #GstVaapiStreamAlignH264
4204  *
4205  * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4206  * of each buffer that is supplied to the decoder. This could be no
4207  * specific alignment, NAL unit boundaries, or access unit boundaries.
4208  */
4209 void
4210 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4211     GstVaapiStreamAlignH264 alignment)
4212 {
4213     g_return_if_fail(decoder != NULL);
4214
4215     decoder->priv.stream_alignment = alignment;
4216 }
4217
4218 /**
4219  * gst_vaapi_decoder_h264_new:
4220  * @display: a #GstVaapiDisplay
4221  * @caps: a #GstCaps holding codec information
4222  *
4223  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4224  * hold extra information like codec-data and pictured coded size.
4225  *
4226  * Return value: the newly allocated #GstVaapiDecoder object
4227  */
4228 GstVaapiDecoder *
4229 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4230 {
4231     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4232 }