decoder: h264: fix output of second field when first field is not in DPB.
[platform/upstream/gstreamer-vaapi.git] / gst-libs / gst / vaapi / gstvaapidecoder_h264.c
1 /*
2  *  gstvaapidecoder_h264.c - H.264 decoder
3  *
4  *  Copyright (C) 2011-2014 Intel Corporation
5  *    Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
6  *
7  *  This library is free software; you can redistribute it and/or
8  *  modify it under the terms of the GNU Lesser General Public License
9  *  as published by the Free Software Foundation; either version 2.1
10  *  of the License, or (at your option) any later version.
11  *
12  *  This library is distributed in the hope that it will be useful,
13  *  but WITHOUT ANY WARRANTY; without even the implied warranty of
14  *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
15  *  Lesser General Public License for more details.
16  *
17  *  You should have received a copy of the GNU Lesser General Public
18  *  License along with this library; if not, write to the Free
19  *  Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
20  *  Boston, MA 02110-1301 USA
21  */
22
23 /**
24  * SECTION:gstvaapidecoder_h264
25  * @short_description: H.264 decoder
26  */
27
28 #include "sysdeps.h"
29 #include <string.h>
30 #include <gst/base/gstadapter.h>
31 #include <gst/codecparsers/gsth264parser.h>
32 #include "gstvaapidecoder_h264.h"
33 #include "gstvaapidecoder_objects.h"
34 #include "gstvaapidecoder_priv.h"
35 #include "gstvaapidisplay_priv.h"
36 #include "gstvaapiobject_priv.h"
37 #include "gstvaapiutils_h264_priv.h"
38
39 #define DEBUG 1
40 #include "gstvaapidebug.h"
41
42 /* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
43 #define USE_STRICT_DPB_ORDERING 0
44
45 typedef struct _GstVaapiDecoderH264Private      GstVaapiDecoderH264Private;
46 typedef struct _GstVaapiDecoderH264Class        GstVaapiDecoderH264Class;
47 typedef struct _GstVaapiFrameStore              GstVaapiFrameStore;
48 typedef struct _GstVaapiFrameStoreClass         GstVaapiFrameStoreClass;
49 typedef struct _GstVaapiParserInfoH264          GstVaapiParserInfoH264;
50 typedef struct _GstVaapiPictureH264             GstVaapiPictureH264;
51
52 // Used for field_poc[]
53 #define TOP_FIELD       0
54 #define BOTTOM_FIELD    1
55
56 /* ------------------------------------------------------------------------- */
57 /* --- H.264 Parser Info                                                 --- */
58 /* ------------------------------------------------------------------------- */
59
60 /*
61  * Extended decoder unit flags:
62  *
63  * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
64  * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
65  */
66 enum {
67     /* This flag does not strictly follow the definitions (7.4.1.2.3)
68        for detecting the start of an access unit as we are only
69        interested in knowing if the current slice is the first one or
70        the last one in the current access unit */
71     GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
72         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
73     GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
74         GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
75
76     GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
77         GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
78         GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
79 };
80
81 #define GST_VAAPI_PARSER_INFO_H264(obj) \
82     ((GstVaapiParserInfoH264 *)(obj))
83
84 struct _GstVaapiParserInfoH264 {
85     GstVaapiMiniObject  parent_instance;
86     GstH264NalUnit      nalu;
87     union {
88         GstH264SPS      sps;
89         GstH264PPS      pps;
90         GArray         *sei;
91         GstH264SliceHdr slice_hdr;
92     }                   data;
93     guint               state;
94     guint               flags;      // Same as decoder unit flags (persistent)
95     guint               view_id;    // View ID of slice
96     guint               voc;        // View order index (VOIdx) of slice
97 };
98
99 static void
100 gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
101 {
102     switch (pi->nalu.type) {
103     case GST_H264_NAL_SPS:
104     case GST_H264_NAL_SUBSET_SPS:
105         gst_h264_sps_clear(&pi->data.sps);
106         break;
107     case GST_H264_NAL_SEI:
108         if (pi->data.sei) {
109             g_array_unref(pi->data.sei);
110             pi->data.sei = NULL;
111         }
112         break;
113     }
114 }
115
116 static inline const GstVaapiMiniObjectClass *
117 gst_vaapi_parser_info_h264_class(void)
118 {
119     static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
120         .size = sizeof(GstVaapiParserInfoH264),
121         .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
122     };
123     return &GstVaapiParserInfoH264Class;
124 }
125
126 static inline GstVaapiParserInfoH264 *
127 gst_vaapi_parser_info_h264_new(void)
128 {
129     return (GstVaapiParserInfoH264 *)
130         gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
131 }
132
133 #define gst_vaapi_parser_info_h264_ref(pi) \
134     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
135
136 #define gst_vaapi_parser_info_h264_unref(pi) \
137     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
138
139 #define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi)          \
140     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr),  \
141         (GstVaapiMiniObject *)(new_pi))
142
143 /* ------------------------------------------------------------------------- */
144 /* --- H.264 Pictures                                                    --- */
145 /* ------------------------------------------------------------------------- */
146
147 /*
148  * Extended picture flags:
149  *
150  * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
151  * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
152  *   may be used for inter-view prediction
153  * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
154  *   i.e. a picture that is decoded with only inter-view prediction,
155  *   and not inter prediction
156  * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
157  *   access unit (AU)
158  * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
159  *   access unit (AU)
160  * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
161  *     "used for short-term reference"
162  * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
163  *     "used for long-term reference"
164  * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
165  *     reference picture (short-term reference or long-term reference)
166  */
167 enum {
168     GST_VAAPI_PICTURE_FLAG_IDR          = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
169     GST_VAAPI_PICTURE_FLAG_REFERENCE2   = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
170     GST_VAAPI_PICTURE_FLAG_INTER_VIEW   = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
171     GST_VAAPI_PICTURE_FLAG_ANCHOR       = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
172     GST_VAAPI_PICTURE_FLAG_AU_START     = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
173     GST_VAAPI_PICTURE_FLAG_AU_END       = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
174
175     GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
176         GST_VAAPI_PICTURE_FLAG_REFERENCE),
177     GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
178         GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
179     GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
180         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
181         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
182 };
183
184 #define GST_VAAPI_PICTURE_IS_IDR(picture) \
185     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
186
187 #define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture)      \
188     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
189       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
190      GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
191
192 #define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)       \
193     ((GST_VAAPI_PICTURE_FLAGS(picture) &                        \
194       GST_VAAPI_PICTURE_FLAGS_REFERENCE) ==                     \
195      GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
196
197 #define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
198     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
199
200 #define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
201     (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
202
203 #define GST_VAAPI_PICTURE_H264(picture) \
204     ((GstVaapiPictureH264 *)(picture))
205
206 struct _GstVaapiPictureH264 {
207     GstVaapiPicture             base;
208     GstH264SliceHdr            *last_slice_hdr;
209     guint                       structure;
210     gint32                      field_poc[2];
211     gint32                      frame_num;              // Original frame_num from slice_header()
212     gint32                      frame_num_wrap;         // Temporary for ref pic marking: FrameNumWrap
213     gint32                      long_term_frame_idx;    // Temporary for ref pic marking: LongTermFrameIdx
214     gint32                      pic_num;                // Temporary for ref pic marking: PicNum
215     gint32                      long_term_pic_num;      // Temporary for ref pic marking: LongTermPicNum
216     GstVaapiPictureH264        *other_field;            // Temporary for ref pic marking: other field in the same frame store
217     guint                       output_flag             : 1;
218     guint                       output_needed           : 1;
219 };
220
221 GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
222
223 void
224 gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
225 {
226     gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
227 }
228
229 gboolean
230 gst_vaapi_picture_h264_create(
231     GstVaapiPictureH264                      *picture,
232     const GstVaapiCodecObjectConstructorArgs *args
233 )
234 {
235     if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
236         return FALSE;
237
238     picture->field_poc[0]       = G_MAXINT32;
239     picture->field_poc[1]       = G_MAXINT32;
240     picture->output_needed      = FALSE;
241     return TRUE;
242 }
243
244 static inline GstVaapiPictureH264 *
245 gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
246 {
247     return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
248         &GstVaapiPictureH264Class,
249         GST_VAAPI_CODEC_BASE(decoder),
250         NULL, sizeof(VAPictureParameterBufferH264),
251         NULL, 0,
252         0);
253 }
254
255 static inline void
256 gst_vaapi_picture_h264_set_reference(
257     GstVaapiPictureH264 *picture,
258     guint                reference_flags,
259     gboolean             other_field
260 )
261 {
262     if (!picture)
263         return;
264     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
265     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
266
267     if (!other_field || !(picture = picture->other_field))
268         return;
269     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
270     GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
271 }
272
273 static inline GstVaapiPictureH264 *
274 gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
275 {
276     g_return_val_if_fail(picture, NULL);
277
278     return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
279 }
280
281 /* ------------------------------------------------------------------------- */
282 /* --- Frame Buffers (DPB)                                               --- */
283 /* ------------------------------------------------------------------------- */
284
285 struct _GstVaapiFrameStore {
286     /*< private >*/
287     GstVaapiMiniObject          parent_instance;
288
289     guint                       view_id;
290     guint                       structure;
291     GstVaapiPictureH264        *buffers[2];
292     guint                       num_buffers;
293     guint                       output_needed;
294 };
295
296 static void
297 gst_vaapi_frame_store_finalize(gpointer object)
298 {
299     GstVaapiFrameStore * const fs = object;
300     guint i;
301
302     for (i = 0; i < fs->num_buffers; i++)
303         gst_vaapi_picture_replace(&fs->buffers[i], NULL);
304 }
305
306 static GstVaapiFrameStore *
307 gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
308 {
309     GstVaapiFrameStore *fs;
310
311     static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
312         sizeof(GstVaapiFrameStore),
313         gst_vaapi_frame_store_finalize
314     };
315
316     fs = (GstVaapiFrameStore *)
317         gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
318     if (!fs)
319         return NULL;
320
321     fs->view_id         = picture->base.view_id;
322     fs->structure       = picture->structure;
323     fs->buffers[0]      = gst_vaapi_picture_ref(picture);
324     fs->buffers[1]      = NULL;
325     fs->num_buffers     = 1;
326     fs->output_needed   = picture->output_needed;
327     return fs;
328 }
329
330 static gboolean
331 gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
332 {
333     guint field;
334
335     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
336     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
337     g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
338
339     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
340     if (picture->output_flag) {
341         picture->output_needed = TRUE;
342         fs->output_needed++;
343     }
344
345     fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
346
347     field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
348         TOP_FIELD : BOTTOM_FIELD;
349     g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
350     fs->buffers[0]->field_poc[field] = picture->field_poc[field];
351     g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
352     picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
353     return TRUE;
354 }
355
356 static gboolean
357 gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
358 {
359     GstVaapiPictureH264 * const first_field = fs->buffers[0];
360     GstVaapiPictureH264 *second_field;
361
362     g_return_val_if_fail(fs->num_buffers == 1, FALSE);
363
364     first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
365     GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
366
367     second_field = gst_vaapi_picture_h264_new_field(first_field);
368     if (!second_field)
369         return FALSE;
370     gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
371     gst_vaapi_picture_unref(second_field);
372
373     second_field->frame_num    = first_field->frame_num;
374     second_field->field_poc[0] = first_field->field_poc[0];
375     second_field->field_poc[1] = first_field->field_poc[1];
376     second_field->output_flag  = first_field->output_flag;
377     if (second_field->output_flag) {
378         second_field->output_needed = TRUE;
379         fs->output_needed++;
380     }
381     return TRUE;
382 }
383
384 static inline gboolean
385 gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
386 {
387     return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
388 }
389
390 static inline gboolean
391 gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
392 {
393     guint i;
394
395     for (i = 0; i < fs->num_buffers; i++) {
396         if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
397             return TRUE;
398     }
399     return FALSE;
400 }
401
402 static gboolean
403 gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
404 {
405     guint i;
406
407     for (i = 0; i < fs->num_buffers; i++) {
408         if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
409             return TRUE;
410     }
411     return FALSE;
412 }
413
414 #define gst_vaapi_frame_store_ref(fs) \
415     gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
416
417 #define gst_vaapi_frame_store_unref(fs) \
418     gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
419
420 #define gst_vaapi_frame_store_replace(old_fs_p, new_fs)                 \
421     gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p),    \
422         (GstVaapiMiniObject *)(new_fs))
423
424 /* ------------------------------------------------------------------------- */
425 /* --- H.264 Decoder                                                     --- */
426 /* ------------------------------------------------------------------------- */
427
428 #define GST_VAAPI_DECODER_H264_CAST(decoder) \
429     ((GstVaapiDecoderH264 *)(decoder))
430
431 typedef enum {
432     GST_H264_VIDEO_STATE_GOT_SPS        = 1 << 0,
433     GST_H264_VIDEO_STATE_GOT_PPS        = 1 << 1,
434     GST_H264_VIDEO_STATE_GOT_SLICE      = 1 << 2,
435
436     GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
437         GST_H264_VIDEO_STATE_GOT_SPS |
438         GST_H264_VIDEO_STATE_GOT_PPS),
439     GST_H264_VIDEO_STATE_VALID_PICTURE = (
440         GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
441         GST_H264_VIDEO_STATE_GOT_SLICE)
442 } GstH264VideoState;
443
444 struct _GstVaapiDecoderH264Private {
445     GstH264NalParser           *parser;
446     guint                       parser_state;
447     guint                       decoder_state;
448     GstVaapiStreamAlignH264     stream_alignment;
449     GstVaapiPictureH264        *current_picture;
450     GstVaapiParserInfoH264     *sps[GST_H264_MAX_SPS_COUNT];
451     GstVaapiParserInfoH264     *active_sps;
452     GstVaapiParserInfoH264     *pps[GST_H264_MAX_PPS_COUNT];
453     GstVaapiParserInfoH264     *active_pps;
454     GstVaapiParserInfoH264     *prev_pi;
455     GstVaapiParserInfoH264     *prev_slice_pi;
456     GstVaapiFrameStore        **prev_frames;
457     guint                       prev_frames_alloc;
458     GstVaapiFrameStore        **dpb;
459     guint                       dpb_count;
460     guint                       dpb_size;
461     guint                       dpb_size_max;
462     guint                       max_views;
463     GstVaapiProfile             profile;
464     GstVaapiEntrypoint          entrypoint;
465     GstVaapiChromaType          chroma_type;
466     GPtrArray                  *inter_views;
467     GstVaapiPictureH264        *short_ref[32];
468     guint                       short_ref_count;
469     GstVaapiPictureH264        *long_ref[32];
470     guint                       long_ref_count;
471     GstVaapiPictureH264        *RefPicList0[32];
472     guint                       RefPicList0_count;
473     GstVaapiPictureH264        *RefPicList1[32];
474     guint                       RefPicList1_count;
475     guint                       nal_length_size;
476     guint                       mb_width;
477     guint                       mb_height;
478     gint32                      field_poc[2];           // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
479     gint32                      poc_msb;                // PicOrderCntMsb
480     gint32                      poc_lsb;                // pic_order_cnt_lsb (from slice_header())
481     gint32                      prev_poc_msb;           // prevPicOrderCntMsb
482     gint32                      prev_poc_lsb;           // prevPicOrderCntLsb
483     gint32                      frame_num_offset;       // FrameNumOffset
484     gint32                      frame_num;              // frame_num (from slice_header())
485     gint32                      prev_frame_num;         // prevFrameNum
486     gboolean                    prev_pic_has_mmco5;     // prevMmco5Pic
487     gboolean                    prev_pic_structure;     // previous picture structure
488     guint                       is_opened               : 1;
489     guint                       is_avcC                 : 1;
490     guint                       has_context             : 1;
491     guint                       progressive_sequence    : 1;
492 };
493
494 /**
495  * GstVaapiDecoderH264:
496  *
497  * A decoder based on H264.
498  */
499 struct _GstVaapiDecoderH264 {
500     /*< private >*/
501     GstVaapiDecoder             parent_instance;
502     GstVaapiDecoderH264Private  priv;
503 };
504
505 /**
506  * GstVaapiDecoderH264Class:
507  *
508  * A decoder class based on H264.
509  */
510 struct _GstVaapiDecoderH264Class {
511     /*< private >*/
512     GstVaapiDecoderClass parent_class;
513 };
514
515 static gboolean
516 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
517
518 static gboolean
519 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
520     GstVaapiPictureH264 *picture);
521
522 static inline gboolean
523 is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
524     GstVaapiFrameStore *fs)
525 {
526     return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
527 }
528
529 /* Determines if the supplied profile is one of the MVC set */
530 static gboolean
531 is_mvc_profile(GstH264Profile profile)
532 {
533     return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
534         profile == GST_H264_PROFILE_STEREO_HIGH;
535 }
536
537 /* Determines the view_id from the supplied NAL unit */
538 static inline guint
539 get_view_id(GstH264NalUnit *nalu)
540 {
541     return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
542 }
543
544 /* Determines the view order index (VOIdx) from the supplied view_id */
545 static gint
546 get_view_order_index(GstH264SPS *sps, guint16 view_id)
547 {
548     GstH264SPSExtMVC *mvc;
549     gint i;
550
551     if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
552         return 0;
553
554     mvc = &sps->extension.mvc;
555     for (i = 0; i <= mvc->num_views_minus1; i++) {
556         if (mvc->view[i].view_id == view_id)
557             return i;
558     }
559     GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
560     return -1;
561 }
562
563 /* Determines NumViews */
564 static guint
565 get_num_views(GstH264SPS *sps)
566 {
567     return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
568         sps->extension.mvc.num_views_minus1 : 0);
569 }
570
571 /* Get number of reference frames to use */
572 static guint
573 get_max_dec_frame_buffering(GstH264SPS *sps)
574 {
575     guint num_views, max_dpb_frames;
576     guint max_dec_frame_buffering, PicSizeMbs;
577     GstVaapiLevelH264 level;
578     const GstVaapiH264LevelLimits *level_limits;
579
580     /* Table A-1 - Level limits */
581     if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
582         level = GST_VAAPI_LEVEL_H264_L1b;
583     else
584         level = gst_vaapi_utils_h264_get_level(sps->level_idc);
585     level_limits = gst_vaapi_utils_h264_get_level_limits(level);
586     if (G_UNLIKELY(!level_limits)) {
587         GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
588         max_dec_frame_buffering = 16;
589     }
590     else {
591         PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
592                       (sps->pic_height_in_map_units_minus1 + 1) *
593                       (sps->frame_mbs_only_flag ? 1 : 2));
594         max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
595     }
596     if (is_mvc_profile(sps->profile_idc))
597         max_dec_frame_buffering <<= 1;
598
599     /* VUI parameters */
600     if (sps->vui_parameters_present_flag) {
601         GstH264VUIParams * const vui_params = &sps->vui_parameters;
602         if (vui_params->bitstream_restriction_flag)
603             max_dec_frame_buffering = vui_params->max_dec_frame_buffering;
604         else {
605             switch (sps->profile_idc) {
606             case 44:  // CAVLC 4:4:4 Intra profile
607             case GST_H264_PROFILE_SCALABLE_HIGH:
608             case GST_H264_PROFILE_HIGH:
609             case GST_H264_PROFILE_HIGH10:
610             case GST_H264_PROFILE_HIGH_422:
611             case GST_H264_PROFILE_HIGH_444:
612                 if (sps->constraint_set3_flag)
613                     max_dec_frame_buffering = 0;
614                 break;
615             }
616         }
617     }
618
619     num_views = get_num_views(sps);
620     max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
621     if (max_dec_frame_buffering > max_dpb_frames)
622         max_dec_frame_buffering = max_dpb_frames;
623     else if (max_dec_frame_buffering < sps->num_ref_frames)
624         max_dec_frame_buffering = sps->num_ref_frames;
625     return MAX(1, max_dec_frame_buffering);
626 }
627
628 static void
629 array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
630 {
631     gpointer * const entries = array;
632     guint num_entries = *array_length_ptr;
633
634     g_return_if_fail(index < num_entries);
635
636     if (index != --num_entries)
637         entries[index] = entries[num_entries];
638     entries[num_entries] = NULL;
639     *array_length_ptr = num_entries;
640 }
641
642 #if 1
643 static inline void
644 array_remove_index(void *array, guint *array_length_ptr, guint index)
645 {
646     array_remove_index_fast(array, array_length_ptr, index);
647 }
648 #else
649 static void
650 array_remove_index(void *array, guint *array_length_ptr, guint index)
651 {
652     gpointer * const entries = array;
653     const guint num_entries = *array_length_ptr - 1;
654     guint i;
655
656     g_return_if_fail(index <= num_entries);
657
658     for (i = index; i < num_entries; i++)
659         entries[i] = entries[i + 1];
660     entries[num_entries] = NULL;
661     *array_length_ptr = num_entries;
662 }
663 #endif
664
665 #define ARRAY_REMOVE_INDEX(array, index) \
666     array_remove_index(array, &array##_count, index)
667
668 static void
669 dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
670 {
671     GstVaapiDecoderH264Private * const priv = &decoder->priv;
672     guint i, num_frames = --priv->dpb_count;
673
674     if (USE_STRICT_DPB_ORDERING) {
675         for (i = index; i < num_frames; i++)
676             gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
677     }
678     else if (index != num_frames)
679         gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
680     gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
681 }
682
683 static gboolean
684 dpb_output(
685     GstVaapiDecoderH264 *decoder,
686     GstVaapiFrameStore  *fs,
687     GstVaapiPictureH264 *picture
688 )
689 {
690     picture->output_needed = FALSE;
691
692     if (--fs->output_needed > 0)
693         return TRUE;
694
695     if (!GST_VAAPI_PICTURE_IS_COMPLETE(picture))
696         return TRUE;
697     return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
698 }
699
700 static inline void
701 dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
702 {
703     GstVaapiDecoderH264Private * const priv = &decoder->priv;
704     GstVaapiFrameStore * const fs = priv->dpb[i];
705
706     if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
707         dpb_remove_index(decoder, i);
708 }
709
710 /* Finds the frame store holding the supplied picture */
711 static gint
712 dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
713 {
714     GstVaapiDecoderH264Private * const priv = &decoder->priv;
715     gint i, j;
716
717     for (i = 0; i < priv->dpb_count; i++) {
718         GstVaapiFrameStore * const fs = priv->dpb[i];
719         for (j = 0; j < fs->num_buffers; j++) {
720             if (fs->buffers[j] == picture)
721                 return i;
722         }
723     }
724     return -1;
725 }
726
727 /* Finds the picture with the lowest POC that needs to be output */
728 static gint
729 dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
730     GstVaapiPictureH264 **found_picture_ptr)
731 {
732     GstVaapiDecoderH264Private * const priv = &decoder->priv;
733     GstVaapiPictureH264 *found_picture = NULL;
734     guint i, j, found_index;
735
736     for (i = 0; i < priv->dpb_count; i++) {
737         GstVaapiFrameStore * const fs = priv->dpb[i];
738         if (!fs->output_needed)
739             continue;
740         if (picture && picture->base.view_id != fs->view_id)
741             continue;
742         for (j = 0; j < fs->num_buffers; j++) {
743             GstVaapiPictureH264 * const pic = fs->buffers[j];
744             if (!pic->output_needed)
745                 continue;
746             if (!found_picture || found_picture->base.poc > pic->base.poc ||
747                 (found_picture->base.poc == pic->base.poc &&
748                  found_picture->base.voc > pic->base.voc))
749                 found_picture = pic, found_index = i;
750         }
751     }
752
753     if (found_picture_ptr)
754         *found_picture_ptr = found_picture;
755     return found_picture ? found_index : -1;
756 }
757
758 /* Finds the picture with the lowest VOC that needs to be output */
759 static gint
760 dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
761     GstVaapiPictureH264 **found_picture_ptr)
762 {
763     GstVaapiDecoderH264Private * const priv = &decoder->priv;
764     GstVaapiPictureH264 *found_picture = NULL;
765     guint i, j, found_index;
766
767     for (i = 0; i < priv->dpb_count; i++) {
768         GstVaapiFrameStore * const fs = priv->dpb[i];
769         if (!fs->output_needed || fs->view_id == picture->base.view_id)
770             continue;
771         for (j = 0; j < fs->num_buffers; j++) {
772             GstVaapiPictureH264 * const pic = fs->buffers[j];
773             if (!pic->output_needed || pic->base.poc != picture->base.poc)
774                 continue;
775             if (!found_picture || found_picture->base.voc > pic->base.voc)
776                 found_picture = pic, found_index = i;
777         }
778     }
779
780     if (found_picture_ptr)
781         *found_picture_ptr = found_picture;
782     return found_picture ? found_index : -1;
783 }
784
785 static gboolean
786 dpb_output_other_views(GstVaapiDecoderH264 *decoder,
787     GstVaapiPictureH264 *picture, guint voc)
788 {
789     GstVaapiDecoderH264Private * const priv = &decoder->priv;
790     GstVaapiPictureH264 *found_picture;
791     gint found_index;
792     gboolean success;
793
794     if (priv->max_views == 1)
795         return TRUE;
796
797     /* Emit all other view components that were in the same access
798        unit than the picture we have just found */
799     found_picture = picture;
800     for (;;) {
801         found_index = dpb_find_lowest_voc(decoder, found_picture,
802             &found_picture);
803         if (found_index < 0 || found_picture->base.voc >= voc)
804             break;
805         success = dpb_output(decoder, priv->dpb[found_index], found_picture);
806         dpb_evict(decoder, found_picture, found_index);
807         if (!success)
808             return FALSE;
809     }
810     return TRUE;
811 }
812
813 static gboolean
814 dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
815 {
816     GstVaapiDecoderH264Private * const priv = &decoder->priv;
817     GstVaapiPictureH264 *found_picture;
818     gint found_index;
819     gboolean success;
820
821     found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
822     if (found_index < 0)
823         return FALSE;
824
825     if (picture && picture->base.poc != found_picture->base.poc)
826         dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
827
828     success = dpb_output(decoder, priv->dpb[found_index], found_picture);
829     dpb_evict(decoder, found_picture, found_index);
830     if (priv->max_views == 1)
831         return success;
832
833     if (picture && picture->base.poc != found_picture->base.poc)
834         dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
835     return success;
836 }
837
838 static void
839 dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
840 {
841     GstVaapiDecoderH264Private * const priv = &decoder->priv;
842     guint i, n;
843
844     for (i = 0; i < priv->dpb_count; i++) {
845         if (picture && picture->base.view_id != priv->dpb[i]->view_id)
846             continue;
847         gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
848     }
849
850     /* Compact the resulting DPB, i.e. remove holes */
851     for (i = 0, n = 0; i < priv->dpb_count; i++) {
852         if (priv->dpb[i]) {
853             if (i != n) {
854                 priv->dpb[n] = priv->dpb[i];
855                 priv->dpb[i] = NULL;
856             }
857             n++;
858         }
859     }
860     priv->dpb_count = n;
861
862     /* Clear previous frame buffers only if this is a "flush-all" operation,
863        or if the picture is the first one in the access unit */
864     if (priv->prev_frames && (!picture ||
865             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
866                 GST_VAAPI_PICTURE_FLAG_AU_START))) {
867         for (i = 0; i < priv->max_views; i++)
868             gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
869     }
870 }
871
872 static void
873 dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
874 {
875     while (dpb_bump(decoder, picture))
876         ;
877     dpb_clear(decoder, picture);
878 }
879
880 static void
881 dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
882 {
883     GstVaapiDecoderH264Private * const priv = &decoder->priv;
884     const gboolean is_last_picture = /* in the access unit */
885         GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
886     guint i;
887
888     // Remove all unused inter-view only reference components of the current AU
889     i = 0;
890     while (i < priv->dpb_count) {
891         GstVaapiFrameStore * const fs = priv->dpb[i];
892         if (fs->view_id != picture->base.view_id &&
893             !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
894             (is_last_picture ||
895              !is_inter_view_reference_for_next_frames(decoder, fs)))
896             dpb_remove_index(decoder, i);
897         else
898             i++;
899     }
900 }
901
902 static gboolean
903 dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
904 {
905     GstVaapiDecoderH264Private * const priv = &decoder->priv;
906     GstVaapiFrameStore *fs;
907     guint i;
908
909     if (priv->max_views > 1)
910         dpb_prune_mvc(decoder, picture);
911
912     // Remove all unused pictures
913     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
914         i = 0;
915         while (i < priv->dpb_count) {
916             GstVaapiFrameStore * const fs = priv->dpb[i];
917             if (fs->view_id == picture->base.view_id &&
918                 !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
919                 dpb_remove_index(decoder, i);
920             else
921                 i++;
922         }
923     }
924
925     // Check if picture is the second field and the first field is still in DPB
926     if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
927         !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
928         const gint found_index = dpb_find_picture(decoder,
929             GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
930         if (found_index >= 0)
931             return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
932
933         // ... also check the previous picture that was immediately output
934         fs = priv->prev_frames[picture->base.voc];
935         if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
936             if (!gst_vaapi_frame_store_add(fs, picture))
937                 return FALSE;
938             return dpb_output(decoder, fs, picture);
939         }
940     }
941
942     // Create new frame store, and split fields if necessary
943     fs = gst_vaapi_frame_store_new(picture);
944     if (!fs)
945         return FALSE;
946     gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
947     gst_vaapi_frame_store_unref(fs);
948
949     if (picture->output_flag) {
950         picture->output_needed = TRUE;
951         fs->output_needed++;
952     }
953
954     if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
955         if (!gst_vaapi_frame_store_split_fields(fs))
956             return FALSE;
957     }
958
959     // C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
960     if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
961         while (priv->dpb_count == priv->dpb_size) {
962             if (!dpb_bump(decoder, picture))
963                 return FALSE;
964         }
965     }
966
967     // C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
968     else {
969         const gboolean StoreInterViewOnlyRefFlag =
970             !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
971                 GST_VAAPI_PICTURE_FLAG_AU_END) &&
972             GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
973                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
974         if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
975             return TRUE;
976         while (priv->dpb_count == priv->dpb_size) {
977             GstVaapiPictureH264 *found_picture;
978             if (!StoreInterViewOnlyRefFlag) {
979                 if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
980                     found_picture->base.poc > picture->base.poc)
981                     return dpb_output(decoder, fs, picture);
982             }
983             if (!dpb_bump(decoder, picture))
984                 return FALSE;
985         }
986     }
987     gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
988     return TRUE;
989 }
990
991 static gboolean
992 dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
993 {
994     GstVaapiDecoderH264Private * const priv = &decoder->priv;
995
996     if (dpb_size > priv->dpb_size_max) {
997         priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
998         if (!priv->dpb)
999             return FALSE;
1000         memset(&priv->dpb[priv->dpb_size_max], 0,
1001             (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
1002         priv->dpb_size_max = dpb_size;
1003     }
1004     priv->dpb_size = dpb_size;
1005
1006     GST_DEBUG("DPB size %u", priv->dpb_size);
1007     return TRUE;
1008 }
1009
1010 static void
1011 unref_inter_view(GstVaapiPictureH264 *picture)
1012 {
1013     if (!picture)
1014         return;
1015     GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
1016     gst_vaapi_picture_unref(picture);
1017 }
1018
1019 /* Resets MVC resources */
1020 static gboolean
1021 mvc_reset(GstVaapiDecoderH264 *decoder)
1022 {
1023     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1024     guint i;
1025
1026     // Resize array of inter-view references
1027     if (!priv->inter_views) {
1028         priv->inter_views = g_ptr_array_new_full(priv->max_views,
1029             (GDestroyNotify)unref_inter_view);
1030         if (!priv->inter_views)
1031             return FALSE;
1032     }
1033
1034     // Resize array of previous frame buffers
1035     for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
1036         gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
1037
1038     priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
1039         sizeof(*priv->prev_frames));
1040     if (!priv->prev_frames) {
1041         priv->prev_frames_alloc = 0;
1042         return FALSE;
1043     }
1044     for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
1045         priv->prev_frames[i] = NULL;
1046     priv->prev_frames_alloc = priv->max_views;
1047     return TRUE;
1048 }
1049
1050 static GstVaapiDecoderStatus
1051 get_status(GstH264ParserResult result)
1052 {
1053     GstVaapiDecoderStatus status;
1054
1055     switch (result) {
1056     case GST_H264_PARSER_OK:
1057         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
1058         break;
1059     case GST_H264_PARSER_NO_NAL_END:
1060         status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
1061         break;
1062     case GST_H264_PARSER_ERROR:
1063         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
1064         break;
1065     default:
1066         status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1067         break;
1068     }
1069     return status;
1070 }
1071
1072 static void
1073 gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
1074 {
1075     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1076
1077     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1078     gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
1079     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
1080
1081     dpb_clear(decoder, NULL);
1082
1083     if (priv->inter_views) {
1084         g_ptr_array_unref(priv->inter_views);
1085         priv->inter_views = NULL;
1086     }
1087
1088     if (priv->parser) {
1089         gst_h264_nal_parser_free(priv->parser);
1090         priv->parser = NULL;
1091     }
1092 }
1093
1094 static gboolean
1095 gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
1096 {
1097     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1098
1099     gst_vaapi_decoder_h264_close(decoder);
1100
1101     priv->parser = gst_h264_nal_parser_new();
1102     if (!priv->parser)
1103         return FALSE;
1104     return TRUE;
1105 }
1106
1107 static void
1108 gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
1109 {
1110     GstVaapiDecoderH264 * const decoder =
1111         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1112     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1113     guint i;
1114
1115     gst_vaapi_decoder_h264_close(decoder);
1116
1117     g_free(priv->dpb);
1118     priv->dpb = NULL;
1119     priv->dpb_size = 0;
1120
1121     g_free(priv->prev_frames);
1122     priv->prev_frames = NULL;
1123     priv->prev_frames_alloc = 0;
1124
1125     for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
1126         gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
1127     gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
1128
1129     for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
1130         gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
1131     gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
1132 }
1133
1134 static gboolean
1135 gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
1136 {
1137     GstVaapiDecoderH264 * const decoder =
1138         GST_VAAPI_DECODER_H264_CAST(base_decoder);
1139     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1140
1141     priv->profile               = GST_VAAPI_PROFILE_UNKNOWN;
1142     priv->entrypoint            = GST_VAAPI_ENTRYPOINT_VLD;
1143     priv->chroma_type           = GST_VAAPI_CHROMA_TYPE_YUV420;
1144     priv->prev_pic_structure    = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
1145     priv->progressive_sequence  = TRUE;
1146     return TRUE;
1147 }
1148
1149 /* Activates the supplied PPS */
1150 static GstH264PPS *
1151 ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
1152 {
1153     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1154     GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
1155
1156     gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
1157     return pi ? &pi->data.pps : NULL;
1158 }
1159
1160 /* Returns the active PPS */
1161 static inline GstH264PPS *
1162 get_pps(GstVaapiDecoderH264 *decoder)
1163 {
1164     GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
1165
1166     return pi ? &pi->data.pps : NULL;
1167 }
1168
1169 /* Activate the supplied SPS */
1170 static GstH264SPS *
1171 ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1172 {
1173     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1174     GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
1175
1176     gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
1177     return pi ? &pi->data.sps : NULL;
1178 }
1179
1180 /* Returns the active SPS */
1181 static inline GstH264SPS *
1182 get_sps(GstVaapiDecoderH264 *decoder)
1183 {
1184     GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
1185
1186     return pi ? &pi->data.sps : NULL;
1187 }
1188
1189 static void
1190 fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
1191     GstVaapiProfile profile)
1192 {
1193     guint n_profiles = *n_profiles_ptr;
1194
1195     profiles[n_profiles++] = profile;
1196     switch (profile) {
1197     case GST_VAAPI_PROFILE_H264_MAIN:
1198         profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
1199         break;
1200     default:
1201         break;
1202     }
1203     *n_profiles_ptr = n_profiles;
1204 }
1205
1206 /* Fills in compatible profiles for MVC decoding */
1207 static void
1208 fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
1209     guint *n_profiles_ptr, guint dpb_size)
1210 {
1211     const gchar * const vendor_string =
1212         gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
1213
1214     gboolean add_high_profile = FALSE;
1215     struct map {
1216         const gchar *str;
1217         guint str_len;
1218     };
1219     const struct map *m;
1220
1221     // Drivers that support slice level decoding
1222     if (vendor_string && dpb_size <= 16) {
1223         static const struct map drv_names[] = {
1224             { "Intel i965 driver", 17 },
1225             { NULL, 0 }
1226         };
1227         for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
1228             if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
1229                 add_high_profile = TRUE;
1230         }
1231     }
1232
1233     if (add_high_profile)
1234         fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
1235 }
1236
1237 static GstVaapiProfile
1238 get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
1239 {
1240     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1241     GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
1242     GstVaapiProfile profile, profiles[4];
1243     guint i, n_profiles = 0;
1244
1245     profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
1246     if (!profile)
1247         return GST_VAAPI_PROFILE_UNKNOWN;
1248
1249     fill_profiles(profiles, &n_profiles, profile);
1250     switch (profile) {
1251     case GST_VAAPI_PROFILE_H264_BASELINE:
1252         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1253             fill_profiles(profiles, &n_profiles,
1254                 GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
1255             fill_profiles(profiles, &n_profiles,
1256                 GST_VAAPI_PROFILE_H264_MAIN);
1257         }
1258         break;
1259     case GST_VAAPI_PROFILE_H264_EXTENDED:
1260         if (sps->constraint_set1_flag) { // A.2.2 (main profile)
1261             fill_profiles(profiles, &n_profiles,
1262                 GST_VAAPI_PROFILE_H264_MAIN);
1263         }
1264         break;
1265     case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
1266         if (priv->max_views == 2) {
1267             fill_profiles(profiles, &n_profiles,
1268                 GST_VAAPI_PROFILE_H264_STEREO_HIGH);
1269         }
1270         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1271         break;
1272     case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
1273         if (sps->frame_mbs_only_flag) {
1274             fill_profiles(profiles, &n_profiles,
1275                 GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
1276         }
1277         fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
1278         break;
1279     default:
1280         break;
1281     }
1282
1283     /* If the preferred profile (profiles[0]) matches one that we already
1284        found, then just return it now instead of searching for it again */
1285     if (profiles[0] == priv->profile)
1286         return priv->profile;
1287
1288     for (i = 0; i < n_profiles; i++) {
1289         if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
1290             return profiles[i];
1291     }
1292     return GST_VAAPI_PROFILE_UNKNOWN;
1293 }
1294
1295 static GstVaapiDecoderStatus
1296 ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
1297 {
1298     GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
1299     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1300     GstVaapiContextInfo info;
1301     GstVaapiProfile profile;
1302     GstVaapiChromaType chroma_type;
1303     gboolean reset_context = FALSE;
1304     guint mb_width, mb_height, dpb_size, num_views;
1305
1306     num_views = get_num_views(sps);
1307     if (priv->max_views < num_views) {
1308         priv->max_views = num_views;
1309         GST_DEBUG("maximum number of views changed to %u", num_views);
1310     }
1311
1312     dpb_size = get_max_dec_frame_buffering(sps);
1313     if (priv->dpb_size < dpb_size) {
1314         GST_DEBUG("DPB size increased");
1315         reset_context = TRUE;
1316     }
1317
1318     profile = get_profile(decoder, sps, dpb_size);
1319     if (!profile) {
1320         GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
1321         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
1322     }
1323
1324     if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
1325         GST_DEBUG("profile changed");
1326         reset_context = TRUE;
1327         priv->profile = profile;
1328     }
1329
1330     chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
1331     if (!chroma_type) {
1332         GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
1333         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1334     }
1335
1336     if (priv->chroma_type != chroma_type) {
1337         GST_DEBUG("chroma format changed");
1338         reset_context     = TRUE;
1339         priv->chroma_type = chroma_type;
1340     }
1341
1342     mb_width  = sps->pic_width_in_mbs_minus1 + 1;
1343     mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
1344         !sps->frame_mbs_only_flag;
1345     if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
1346         GST_DEBUG("size changed");
1347         reset_context   = TRUE;
1348         priv->mb_width  = mb_width;
1349         priv->mb_height = mb_height;
1350     }
1351
1352     priv->progressive_sequence = sps->frame_mbs_only_flag;
1353     gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
1354
1355     gst_vaapi_decoder_set_pixel_aspect_ratio(
1356         base_decoder,
1357         sps->vui_parameters.par_n,
1358         sps->vui_parameters.par_d
1359     );
1360
1361     if (!reset_context && priv->has_context)
1362         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1363
1364     /* XXX: fix surface size when cropping is implemented */
1365     info.profile    = priv->profile;
1366     info.entrypoint = priv->entrypoint;
1367     info.chroma_type = priv->chroma_type;
1368     info.width      = sps->width;
1369     info.height     = sps->height;
1370     info.ref_frames = dpb_size;
1371
1372     if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
1373         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1374     priv->has_context = TRUE;
1375
1376     /* Reset DPB */
1377     if (!dpb_reset(decoder, dpb_size))
1378         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1379
1380     /* Reset MVC data */
1381     if (!mvc_reset(decoder))
1382         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1383     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1384 }
1385
1386 static void
1387 fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1388     const GstH264SPS *sps)
1389 {
1390     guint i;
1391
1392     /* There are always 6 4x4 scaling lists */
1393     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
1394     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
1395
1396     for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
1397         gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
1398             iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
1399 }
1400
1401 static void
1402 fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
1403     const GstH264SPS *sps)
1404 {
1405     guint i, n;
1406
1407     /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
1408     if (!pps->transform_8x8_mode_flag)
1409         return;
1410
1411     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
1412     g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
1413
1414     n = (sps->chroma_format_idc != 3) ? 2 : 6;
1415     for (i = 0; i < n; i++) {
1416         gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
1417             iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
1418     }
1419 }
1420
1421 static GstVaapiDecoderStatus
1422 ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
1423 {
1424     GstVaapiPicture * const base_picture = &picture->base;
1425     GstH264PPS * const pps = get_pps(decoder);
1426     GstH264SPS * const sps = get_sps(decoder);
1427     VAIQMatrixBufferH264 *iq_matrix;
1428
1429     base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
1430     if (!base_picture->iq_matrix) {
1431         GST_ERROR("failed to allocate IQ matrix");
1432         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
1433     }
1434     iq_matrix = base_picture->iq_matrix->param;
1435
1436     /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
1437        is not large enough to hold lists for 4:4:4 */
1438     if (sps->chroma_format_idc == 3)
1439         return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
1440
1441     fill_iq_matrix_4x4(iq_matrix, pps, sps);
1442     fill_iq_matrix_8x8(iq_matrix, pps, sps);
1443
1444     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1445 }
1446
1447 static inline gboolean
1448 is_valid_state(guint state, guint ref_state)
1449 {
1450     return (state & ref_state) == ref_state;
1451 }
1452
1453 static GstVaapiDecoderStatus
1454 decode_current_picture(GstVaapiDecoderH264 *decoder)
1455 {
1456     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1457     GstVaapiPictureH264 * const picture = priv->current_picture;
1458
1459     if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
1460         goto drop_frame;
1461     priv->decoder_state = 0;
1462
1463     if (!picture)
1464         return GST_VAAPI_DECODER_STATUS_SUCCESS;
1465
1466     if (!exec_ref_pic_marking(decoder, picture))
1467         goto error;
1468     if (!dpb_add(decoder, picture))
1469         goto error;
1470     if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
1471         goto error;
1472     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1473     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1474
1475 error:
1476     /* XXX: fix for cases where first field failed to be decoded */
1477     gst_vaapi_picture_replace(&priv->current_picture, NULL);
1478     return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
1479
1480 drop_frame:
1481     priv->decoder_state = 0;
1482     return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
1483 }
1484
1485 static GstVaapiDecoderStatus
1486 parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1487 {
1488     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1489     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1490     GstH264SPS * const sps = &pi->data.sps;
1491     GstH264ParserResult result;
1492
1493     GST_DEBUG("parse SPS");
1494
1495     priv->parser_state = 0;
1496
1497     /* Variables that don't have inferred values per the H.264
1498        standard but that should get a default value anyway */
1499     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1500
1501     result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
1502     if (result != GST_H264_PARSER_OK)
1503         return get_status(result);
1504
1505     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1506     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1507 }
1508
1509 static GstVaapiDecoderStatus
1510 parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1511 {
1512     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1513     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1514     GstH264SPS * const sps = &pi->data.sps;
1515     GstH264ParserResult result;
1516
1517     GST_DEBUG("parse subset SPS");
1518
1519     /* Variables that don't have inferred values per the H.264
1520        standard but that should get a default value anyway */
1521     sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
1522
1523     result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
1524         TRUE);
1525     if (result != GST_H264_PARSER_OK)
1526         return get_status(result);
1527
1528     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
1529     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1530 }
1531
1532 static GstVaapiDecoderStatus
1533 parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1534 {
1535     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1536     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1537     GstH264PPS * const pps = &pi->data.pps;
1538     GstH264ParserResult result;
1539
1540     GST_DEBUG("parse PPS");
1541
1542     priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
1543
1544     /* Variables that don't have inferred values per the H.264
1545        standard but that should get a default value anyway */
1546     pps->slice_group_map_type = 0;
1547     pps->slice_group_change_rate_minus1 = 0;
1548
1549     result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
1550     if (result != GST_H264_PARSER_OK)
1551         return get_status(result);
1552
1553     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
1554     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1555 }
1556
1557 static GstVaapiDecoderStatus
1558 parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1559 {
1560     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1561     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1562     GArray ** const sei_ptr = &pi->data.sei;
1563     GstH264ParserResult result;
1564
1565     GST_DEBUG("parse SEI");
1566
1567     result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
1568     if (result != GST_H264_PARSER_OK) {
1569         GST_WARNING("failed to parse SEI messages");
1570         return get_status(result);
1571     }
1572     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1573 }
1574
1575 static GstVaapiDecoderStatus
1576 parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1577 {
1578     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1579     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1580     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
1581     GstH264NalUnit * const nalu = &pi->nalu;
1582     GstH264SPS *sps;
1583     GstH264ParserResult result;
1584
1585     GST_DEBUG("parse slice");
1586
1587     priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
1588                            GST_H264_VIDEO_STATE_GOT_PPS);
1589
1590     /* Propagate Prefix NAL unit info, if necessary */
1591     switch (nalu->type) {
1592     case GST_H264_NAL_SLICE:
1593     case GST_H264_NAL_SLICE_IDR: {
1594         GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
1595         if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
1596             /* MVC sequences shall have a Prefix NAL unit immediately
1597                preceding this NAL unit */
1598             pi->nalu.extension_type = prev_pi->nalu.extension_type;
1599             pi->nalu.extension = prev_pi->nalu.extension;
1600         }
1601         else {
1602             /* In the very unlikely case there is no Prefix NAL unit
1603                immediately preceding this NAL unit, try to infer some
1604                defaults (H.7.4.1.1) */
1605             GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
1606             mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
1607             nalu->idr_pic_flag = !mvc->non_idr_flag;
1608             mvc->priority_id = 0;
1609             mvc->view_id = 0;
1610             mvc->temporal_id = 0;
1611             mvc->anchor_pic_flag = 0;
1612             mvc->inter_view_flag = 1;
1613         }
1614         break;
1615     }
1616     }
1617
1618     /* Variables that don't have inferred values per the H.264
1619        standard but that should get a default value anyway */
1620     slice_hdr->cabac_init_idc = 0;
1621     slice_hdr->direct_spatial_mv_pred_flag = 0;
1622
1623     result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
1624         slice_hdr, TRUE, TRUE);
1625     if (result != GST_H264_PARSER_OK)
1626         return get_status(result);
1627
1628     sps = slice_hdr->pps->sequence;
1629
1630     /* Update MVC data */
1631     pi->view_id = get_view_id(&pi->nalu);
1632     pi->voc = get_view_order_index(sps, pi->view_id);
1633
1634     priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
1635     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1636 }
1637
1638 static GstVaapiDecoderStatus
1639 decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1640 {
1641     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1642     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1643     GstH264SPS * const sps = &pi->data.sps;
1644
1645     GST_DEBUG("decode SPS");
1646
1647     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1648     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1649 }
1650
1651 static GstVaapiDecoderStatus
1652 decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1653 {
1654     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1655     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1656     GstH264SPS * const sps = &pi->data.sps;
1657
1658     GST_DEBUG("decode subset SPS");
1659
1660     gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
1661     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1662 }
1663
1664 static GstVaapiDecoderStatus
1665 decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
1666 {
1667     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1668     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
1669     GstH264PPS * const pps = &pi->data.pps;
1670
1671     GST_DEBUG("decode PPS");
1672
1673     gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
1674     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1675 }
1676
1677 static GstVaapiDecoderStatus
1678 decode_sequence_end(GstVaapiDecoderH264 *decoder)
1679 {
1680     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1681     GstVaapiDecoderStatus status;
1682
1683     GST_DEBUG("decode sequence-end");
1684
1685     status = decode_current_picture(decoder);
1686     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
1687         return status;
1688
1689     dpb_flush(decoder, NULL);
1690
1691     /* Reset defaults, should there be a new sequence available next */
1692     priv->max_views = 1;
1693     return GST_VAAPI_DECODER_STATUS_SUCCESS;
1694 }
1695
1696 /* 8.2.1.1 - Decoding process for picture order count type 0 */
1697 static void
1698 init_picture_poc_0(
1699     GstVaapiDecoderH264 *decoder,
1700     GstVaapiPictureH264 *picture,
1701     GstH264SliceHdr     *slice_hdr
1702 )
1703 {
1704     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1705     GstH264SPS * const sps = get_sps(decoder);
1706     const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
1707     gint32 temp_poc;
1708
1709     GST_DEBUG("decode picture order count type 0");
1710
1711     if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
1712         priv->prev_poc_msb = 0;
1713         priv->prev_poc_lsb = 0;
1714     }
1715     else if (priv->prev_pic_has_mmco5) {
1716         priv->prev_poc_msb = 0;
1717         priv->prev_poc_lsb =
1718             (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
1719              0 : priv->field_poc[TOP_FIELD]);
1720     }
1721     else {
1722         priv->prev_poc_msb = priv->poc_msb;
1723         priv->prev_poc_lsb = priv->poc_lsb;
1724     }
1725
1726     // (8-3)
1727     priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
1728     if (priv->poc_lsb < priv->prev_poc_lsb &&
1729         (priv->prev_poc_lsb - priv->poc_lsb) >= (MaxPicOrderCntLsb / 2))
1730         priv->poc_msb = priv->prev_poc_msb + MaxPicOrderCntLsb;
1731     else if (priv->poc_lsb > priv->prev_poc_lsb &&
1732              (priv->poc_lsb - priv->prev_poc_lsb) > (MaxPicOrderCntLsb / 2))
1733         priv->poc_msb = priv->prev_poc_msb - MaxPicOrderCntLsb;
1734     else
1735         priv->poc_msb = priv->prev_poc_msb;
1736
1737     temp_poc = priv->poc_msb + priv->poc_lsb;
1738     switch (picture->structure) {
1739     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1740         // (8-4, 8-5)
1741         priv->field_poc[TOP_FIELD] = temp_poc;
1742         priv->field_poc[BOTTOM_FIELD] = temp_poc +
1743             slice_hdr->delta_pic_order_cnt_bottom;
1744         break;
1745     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1746         // (8-4)
1747         priv->field_poc[TOP_FIELD] = temp_poc;
1748         break;
1749     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1750         // (8-5)
1751         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1752         break;
1753     }
1754 }
1755
1756 /* 8.2.1.2 - Decoding process for picture order count type 1 */
1757 static void
1758 init_picture_poc_1(
1759     GstVaapiDecoderH264 *decoder,
1760     GstVaapiPictureH264 *picture,
1761     GstH264SliceHdr     *slice_hdr
1762 )
1763 {
1764     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1765     GstH264SPS * const sps = get_sps(decoder);
1766     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1767     gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
1768     guint i;
1769
1770     GST_DEBUG("decode picture order count type 1");
1771
1772     if (priv->prev_pic_has_mmco5)
1773         prev_frame_num_offset = 0;
1774     else
1775         prev_frame_num_offset = priv->frame_num_offset;
1776
1777     // (8-6)
1778     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1779         priv->frame_num_offset = 0;
1780     else if (priv->prev_frame_num > priv->frame_num)
1781         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1782     else
1783         priv->frame_num_offset = prev_frame_num_offset;
1784
1785     // (8-7)
1786     if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
1787         abs_frame_num = priv->frame_num_offset + priv->frame_num;
1788     else
1789         abs_frame_num = 0;
1790     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture) && abs_frame_num > 0)
1791         abs_frame_num = abs_frame_num - 1;
1792
1793     if (abs_frame_num > 0) {
1794         gint32 expected_delta_per_poc_cycle;
1795         gint32 poc_cycle_cnt, frame_num_in_poc_cycle;
1796
1797         expected_delta_per_poc_cycle = 0;
1798         for (i = 0; i < sps->num_ref_frames_in_pic_order_cnt_cycle; i++)
1799             expected_delta_per_poc_cycle += sps->offset_for_ref_frame[i];
1800
1801         // (8-8)
1802         poc_cycle_cnt = (abs_frame_num - 1) /
1803             sps->num_ref_frames_in_pic_order_cnt_cycle;
1804         frame_num_in_poc_cycle = (abs_frame_num - 1) %
1805             sps->num_ref_frames_in_pic_order_cnt_cycle;
1806
1807         // (8-9)
1808         expected_poc = poc_cycle_cnt * expected_delta_per_poc_cycle;
1809         for (i = 0; i <= frame_num_in_poc_cycle; i++)
1810             expected_poc += sps->offset_for_ref_frame[i];
1811     }
1812     else
1813         expected_poc = 0;
1814     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1815         expected_poc += sps->offset_for_non_ref_pic;
1816
1817     // (8-10)
1818     switch (picture->structure) {
1819     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
1820         priv->field_poc[TOP_FIELD] = expected_poc +
1821             slice_hdr->delta_pic_order_cnt[0];
1822         priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
1823             sps->offset_for_top_to_bottom_field +
1824             slice_hdr->delta_pic_order_cnt[1];
1825         break;
1826     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
1827         priv->field_poc[TOP_FIELD] = expected_poc +
1828             slice_hdr->delta_pic_order_cnt[0];
1829         break;
1830     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
1831         priv->field_poc[BOTTOM_FIELD] = expected_poc + 
1832             sps->offset_for_top_to_bottom_field +
1833             slice_hdr->delta_pic_order_cnt[0];
1834         break;
1835     }
1836 }
1837
1838 /* 8.2.1.3 - Decoding process for picture order count type 2 */
1839 static void
1840 init_picture_poc_2(
1841     GstVaapiDecoderH264 *decoder,
1842     GstVaapiPictureH264 *picture,
1843     GstH264SliceHdr     *slice_hdr
1844 )
1845 {
1846     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1847     GstH264SPS * const sps = get_sps(decoder);
1848     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1849     gint32 prev_frame_num_offset, temp_poc;
1850
1851     GST_DEBUG("decode picture order count type 2");
1852
1853     if (priv->prev_pic_has_mmco5)
1854         prev_frame_num_offset = 0;
1855     else
1856         prev_frame_num_offset = priv->frame_num_offset;
1857
1858     // (8-11)
1859     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1860         priv->frame_num_offset = 0;
1861     else if (priv->prev_frame_num > priv->frame_num)
1862         priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
1863     else
1864         priv->frame_num_offset = prev_frame_num_offset;
1865
1866     // (8-12)
1867     if (GST_VAAPI_PICTURE_IS_IDR(picture))
1868         temp_poc = 0;
1869     else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
1870         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
1871     else
1872         temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
1873
1874     // (8-13)
1875     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1876         priv->field_poc[TOP_FIELD] = temp_poc;
1877     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1878         priv->field_poc[BOTTOM_FIELD] = temp_poc;
1879 }
1880
1881 /* 8.2.1 - Decoding process for picture order count */
1882 static void
1883 init_picture_poc(
1884     GstVaapiDecoderH264 *decoder,
1885     GstVaapiPictureH264 *picture,
1886     GstH264SliceHdr     *slice_hdr
1887 )
1888 {
1889     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1890     GstH264SPS * const sps = get_sps(decoder);
1891
1892     switch (sps->pic_order_cnt_type) {
1893     case 0:
1894         init_picture_poc_0(decoder, picture, slice_hdr);
1895         break;
1896     case 1:
1897         init_picture_poc_1(decoder, picture, slice_hdr);
1898         break;
1899     case 2:
1900         init_picture_poc_2(decoder, picture, slice_hdr);
1901         break;
1902     }
1903
1904     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
1905         picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
1906     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
1907         picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
1908     picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
1909 }
1910
1911 static int
1912 compare_picture_pic_num_dec(const void *a, const void *b)
1913 {
1914     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1915     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1916
1917     return picB->pic_num - picA->pic_num;
1918 }
1919
1920 static int
1921 compare_picture_long_term_pic_num_inc(const void *a, const void *b)
1922 {
1923     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1924     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1925
1926     return picA->long_term_pic_num - picB->long_term_pic_num;
1927 }
1928
1929 static int
1930 compare_picture_poc_dec(const void *a, const void *b)
1931 {
1932     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1933     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1934
1935     return picB->base.poc - picA->base.poc;
1936 }
1937
1938 static int
1939 compare_picture_poc_inc(const void *a, const void *b)
1940 {
1941     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1942     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1943
1944     return picA->base.poc - picB->base.poc;
1945 }
1946
1947 static int
1948 compare_picture_frame_num_wrap_dec(const void *a, const void *b)
1949 {
1950     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1951     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1952
1953     return picB->frame_num_wrap - picA->frame_num_wrap;
1954 }
1955
1956 static int
1957 compare_picture_long_term_frame_idx_inc(const void *a, const void *b)
1958 {
1959     const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
1960     const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
1961
1962     return picA->long_term_frame_idx - picB->long_term_frame_idx;
1963 }
1964
1965 /* 8.2.4.1 - Decoding process for picture numbers */
1966 static void
1967 init_picture_refs_pic_num(
1968     GstVaapiDecoderH264 *decoder,
1969     GstVaapiPictureH264 *picture,
1970     GstH264SliceHdr     *slice_hdr
1971 )
1972 {
1973     GstVaapiDecoderH264Private * const priv = &decoder->priv;
1974     GstH264SPS * const sps = get_sps(decoder);
1975     const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
1976     guint i;
1977
1978     GST_DEBUG("decode picture numbers");
1979
1980     for (i = 0; i < priv->short_ref_count; i++) {
1981         GstVaapiPictureH264 * const pic = priv->short_ref[i];
1982
1983         // (H.8.2)
1984         if (pic->base.view_id != picture->base.view_id)
1985             continue;
1986
1987         // (8-27)
1988         if (pic->frame_num > priv->frame_num)
1989             pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
1990         else
1991             pic->frame_num_wrap = pic->frame_num;
1992
1993         // (8-28, 8-30, 8-31)
1994         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
1995             pic->pic_num = pic->frame_num_wrap;
1996         else {
1997             if (pic->structure == picture->structure)
1998                 pic->pic_num = 2 * pic->frame_num_wrap + 1;
1999             else
2000                 pic->pic_num = 2 * pic->frame_num_wrap;
2001         }
2002     }
2003
2004     for (i = 0; i < priv->long_ref_count; i++) {
2005         GstVaapiPictureH264 * const pic = priv->long_ref[i];
2006
2007         // (H.8.2)
2008         if (pic->base.view_id != picture->base.view_id)
2009             continue;
2010
2011         // (8-29, 8-32, 8-33)
2012         if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2013             pic->long_term_pic_num = pic->long_term_frame_idx;
2014         else {
2015             if (pic->structure == picture->structure)
2016                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
2017             else
2018                 pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
2019         }
2020     }
2021 }
2022
2023 #define SORT_REF_LIST(list, n, compare_func) \
2024     qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
2025
2026 static void
2027 init_picture_refs_fields_1(
2028     guint                picture_structure,
2029     GstVaapiPictureH264 *RefPicList[32],
2030     guint               *RefPicList_count,
2031     GstVaapiPictureH264 *ref_list[32],
2032     guint                ref_list_count
2033 )
2034 {
2035     guint i, j, n;
2036
2037     i = 0;
2038     j = 0;
2039     n = *RefPicList_count;
2040     do {
2041         g_assert(n < 32);
2042         for (; i < ref_list_count; i++) {
2043             if (ref_list[i]->structure == picture_structure) {
2044                 RefPicList[n++] = ref_list[i++];
2045                 break;
2046             }
2047         }
2048         for (; j < ref_list_count; j++) {
2049             if (ref_list[j]->structure != picture_structure) {
2050                 RefPicList[n++] = ref_list[j++];
2051                 break;
2052             }
2053         }
2054     } while (i < ref_list_count || j < ref_list_count);
2055     *RefPicList_count = n;
2056 }
2057
2058 static inline void
2059 init_picture_refs_fields(
2060     GstVaapiPictureH264 *picture,
2061     GstVaapiPictureH264 *RefPicList[32],
2062     guint               *RefPicList_count,
2063     GstVaapiPictureH264 *short_ref[32],
2064     guint                short_ref_count,
2065     GstVaapiPictureH264 *long_ref[32],
2066     guint                long_ref_count
2067 )
2068 {
2069     guint n = 0;
2070
2071     /* 8.2.4.2.5 - reference picture lists in fields */
2072     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2073         short_ref, short_ref_count);
2074     init_picture_refs_fields_1(picture->structure, RefPicList, &n,
2075         long_ref, long_ref_count);
2076     *RefPicList_count = n;
2077 }
2078
2079 /* Finds the inter-view reference picture with the supplied view id */
2080 static GstVaapiPictureH264 *
2081 find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
2082 {
2083     GPtrArray * const inter_views = decoder->priv.inter_views;
2084     guint i;
2085
2086     for (i = 0; i < inter_views->len; i++) {
2087         GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
2088         if (picture->base.view_id == view_id)
2089             return picture;
2090     }
2091
2092     GST_WARNING("failed to find inter-view reference picture for view_id: %d",
2093         view_id);
2094     return NULL;
2095 }
2096
2097 /* Checks whether the view id exists in the supplied list of view ids */
2098 static gboolean
2099 find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
2100 {
2101     guint i;
2102
2103     for (i = 0; i < num_view_ids; i++) {
2104         if (view_ids[i] == view_id)
2105             return TRUE;
2106     }
2107     return FALSE;
2108 }
2109
2110 static gboolean
2111 find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
2112     gboolean is_anchor)
2113 {
2114     if (is_anchor)
2115         return (find_view_id(view_id, view->anchor_ref_l0,
2116                     view->num_anchor_refs_l0) ||
2117                 find_view_id(view_id, view->anchor_ref_l1,
2118                     view->num_anchor_refs_l1));
2119
2120     return (find_view_id(view_id, view->non_anchor_ref_l0,
2121                 view->num_non_anchor_refs_l0) ||
2122             find_view_id(view_id, view->non_anchor_ref_l1,
2123                 view->num_non_anchor_refs_l1));
2124 }
2125
2126 /* Checks whether the inter-view reference picture with the supplied
2127    view id is used for decoding the current view component picture */
2128 static gboolean
2129 is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
2130     guint16 view_id, GstVaapiPictureH264 *picture)
2131 {
2132     const GstH264SPS * const sps = get_sps(decoder);
2133     gboolean is_anchor;
2134
2135     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2136         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2137         return FALSE;
2138
2139     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2140     return find_view_id_in_view(view_id,
2141         &sps->extension.mvc.view[picture->base.voc], is_anchor);
2142 }
2143
2144 /* Checks whether the supplied inter-view reference picture is used
2145    for decoding the next view component pictures */
2146 static gboolean
2147 is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
2148     GstVaapiPictureH264 *picture)
2149 {
2150     const GstH264SPS * const sps = get_sps(decoder);
2151     gboolean is_anchor;
2152     guint i, num_views;
2153
2154     if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
2155         sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2156         return FALSE;
2157
2158     is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
2159     num_views = sps->extension.mvc.num_views_minus1 + 1;
2160     for (i = picture->base.voc + 1; i < num_views; i++) {
2161         const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
2162         if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
2163             return TRUE;
2164     }
2165     return FALSE;
2166 }
2167
2168 /* H.8.2.1 - Initialization process for inter-view prediction references */
2169 static void
2170 init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
2171     GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
2172     const guint16 *view_ids, guint num_view_ids)
2173 {
2174     guint j, n;
2175
2176     n = *ref_list_count_ptr;
2177     for (j = 0; j < num_view_ids && n < num_refs; j++) {
2178         GstVaapiPictureH264 * const pic =
2179             find_inter_view_reference(decoder, view_ids[j]);
2180         if (pic)
2181             ref_list[n++] = pic;
2182     }
2183     *ref_list_count_ptr = n;
2184 }
2185
2186 static inline void
2187 init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
2188     GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
2189 {
2190     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2191     const GstH264SPS * const sps = get_sps(decoder);
2192     const GstH264SPSExtMVCView *view;
2193
2194     GST_DEBUG("initialize reference picture list for inter-view prediction");
2195
2196     if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
2197         return;
2198     view = &sps->extension.mvc.view[picture->base.voc];
2199
2200 #define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do {          \
2201         init_picture_refs_mvc_1(decoder,                                \
2202             priv->RefPicList##ref_list,                                 \
2203             &priv->RefPicList##ref_list##_count,                        \
2204             slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1,     \
2205             view->view_list##_l##ref_list,                              \
2206             view->num_##view_list##s_l##ref_list);                      \
2207     } while (0)
2208
2209     if (list == 0) {
2210         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2211             INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
2212         else
2213             INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
2214     }
2215     else {
2216         if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
2217             INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
2218         else
2219             INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
2220     }
2221
2222 #undef INVOKE_INIT_PICTURE_REFS_MVC
2223 }
2224
2225 static void
2226 init_picture_refs_p_slice(
2227     GstVaapiDecoderH264 *decoder,
2228     GstVaapiPictureH264 *picture,
2229     GstH264SliceHdr     *slice_hdr
2230 )
2231 {
2232     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2233     GstVaapiPictureH264 **ref_list;
2234     guint i;
2235
2236     GST_DEBUG("decode reference picture list for P and SP slices");
2237
2238     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2239         /* 8.2.4.2.1 - P and SP slices in frames */
2240         if (priv->short_ref_count > 0) {
2241             ref_list = priv->RefPicList0;
2242             for (i = 0; i < priv->short_ref_count; i++)
2243                 ref_list[i] = priv->short_ref[i];
2244             SORT_REF_LIST(ref_list, i, pic_num_dec);
2245             priv->RefPicList0_count += i;
2246         }
2247
2248         if (priv->long_ref_count > 0) {
2249             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2250             for (i = 0; i < priv->long_ref_count; i++)
2251                 ref_list[i] = priv->long_ref[i];
2252             SORT_REF_LIST(ref_list, i, long_term_pic_num_inc);
2253             priv->RefPicList0_count += i;
2254         }
2255     }
2256     else {
2257         /* 8.2.4.2.2 - P and SP slices in fields */
2258         GstVaapiPictureH264 *short_ref[32];
2259         guint short_ref_count = 0;
2260         GstVaapiPictureH264 *long_ref[32];
2261         guint long_ref_count = 0;
2262
2263         if (priv->short_ref_count > 0) {
2264             for (i = 0; i < priv->short_ref_count; i++)
2265                 short_ref[i] = priv->short_ref[i];
2266             SORT_REF_LIST(short_ref, i, frame_num_wrap_dec);
2267             short_ref_count = i;
2268         }
2269
2270         if (priv->long_ref_count > 0) {
2271             for (i = 0; i < priv->long_ref_count; i++)
2272                 long_ref[i] = priv->long_ref[i];
2273             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2274             long_ref_count = i;
2275         }
2276
2277         init_picture_refs_fields(
2278             picture,
2279             priv->RefPicList0, &priv->RefPicList0_count,
2280             short_ref,          short_ref_count,
2281             long_ref,           long_ref_count
2282         );
2283     }
2284
2285     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2286         /* RefPicList0 */
2287         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2288     }
2289 }
2290
2291 static void
2292 init_picture_refs_b_slice(
2293     GstVaapiDecoderH264 *decoder,
2294     GstVaapiPictureH264 *picture,
2295     GstH264SliceHdr     *slice_hdr
2296 )
2297 {
2298     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2299     GstVaapiPictureH264 **ref_list;
2300     guint i, n;
2301
2302     GST_DEBUG("decode reference picture list for B slices");
2303
2304     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2305         /* 8.2.4.2.3 - B slices in frames */
2306
2307         /* RefPicList0 */
2308         if (priv->short_ref_count > 0) {
2309             // 1. Short-term references
2310             ref_list = priv->RefPicList0;
2311             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2312                 if (priv->short_ref[i]->base.poc < picture->base.poc)
2313                     ref_list[n++] = priv->short_ref[i];
2314             }
2315             SORT_REF_LIST(ref_list, n, poc_dec);
2316             priv->RefPicList0_count += n;
2317
2318             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2319             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2320                 if (priv->short_ref[i]->base.poc >= picture->base.poc)
2321                     ref_list[n++] = priv->short_ref[i];
2322             }
2323             SORT_REF_LIST(ref_list, n, poc_inc);
2324             priv->RefPicList0_count += n;
2325         }
2326
2327         if (priv->long_ref_count > 0) {
2328             // 2. Long-term references
2329             ref_list = &priv->RefPicList0[priv->RefPicList0_count];
2330             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2331                 ref_list[n++] = priv->long_ref[i];
2332             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2333             priv->RefPicList0_count += n;
2334         }
2335
2336         /* RefPicList1 */
2337         if (priv->short_ref_count > 0) {
2338             // 1. Short-term references
2339             ref_list = priv->RefPicList1;
2340             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2341                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2342                     ref_list[n++] = priv->short_ref[i];
2343             }
2344             SORT_REF_LIST(ref_list, n, poc_inc);
2345             priv->RefPicList1_count += n;
2346
2347             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2348             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2349                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2350                     ref_list[n++] = priv->short_ref[i];
2351             }
2352             SORT_REF_LIST(ref_list, n, poc_dec);
2353             priv->RefPicList1_count += n;
2354         }
2355
2356         if (priv->long_ref_count > 0) {
2357             // 2. Long-term references
2358             ref_list = &priv->RefPicList1[priv->RefPicList1_count];
2359             for (n = 0, i = 0; i < priv->long_ref_count; i++)
2360                 ref_list[n++] = priv->long_ref[i];
2361             SORT_REF_LIST(ref_list, n, long_term_pic_num_inc);
2362             priv->RefPicList1_count += n;
2363         }
2364     }
2365     else {
2366         /* 8.2.4.2.4 - B slices in fields */
2367         GstVaapiPictureH264 *short_ref0[32];
2368         guint short_ref0_count = 0;
2369         GstVaapiPictureH264 *short_ref1[32];
2370         guint short_ref1_count = 0;
2371         GstVaapiPictureH264 *long_ref[32];
2372         guint long_ref_count = 0;
2373
2374         /* refFrameList0ShortTerm */
2375         if (priv->short_ref_count > 0) {
2376             ref_list = short_ref0;
2377             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2378                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2379                     ref_list[n++] = priv->short_ref[i];
2380             }
2381             SORT_REF_LIST(ref_list, n, poc_dec);
2382             short_ref0_count += n;
2383
2384             ref_list = &short_ref0[short_ref0_count];
2385             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2386                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2387                     ref_list[n++] = priv->short_ref[i];
2388             }
2389             SORT_REF_LIST(ref_list, n, poc_inc);
2390             short_ref0_count += n;
2391         }
2392
2393         /* refFrameList1ShortTerm */
2394         if (priv->short_ref_count > 0) {
2395             ref_list = short_ref1;
2396             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2397                 if (priv->short_ref[i]->base.poc > picture->base.poc)
2398                     ref_list[n++] = priv->short_ref[i];
2399             }
2400             SORT_REF_LIST(ref_list, n, poc_inc);
2401             short_ref1_count += n;
2402
2403             ref_list = &short_ref1[short_ref1_count];
2404             for (n = 0, i = 0; i < priv->short_ref_count; i++) {
2405                 if (priv->short_ref[i]->base.poc <= picture->base.poc)
2406                     ref_list[n++] = priv->short_ref[i];
2407             }
2408             SORT_REF_LIST(ref_list, n, poc_dec);
2409             short_ref1_count += n;
2410         }
2411
2412         /* refFrameListLongTerm */
2413         if (priv->long_ref_count > 0) {
2414             for (i = 0; i < priv->long_ref_count; i++)
2415                 long_ref[i] = priv->long_ref[i];
2416             SORT_REF_LIST(long_ref, i, long_term_frame_idx_inc);
2417             long_ref_count = i;
2418         }
2419
2420         init_picture_refs_fields(
2421             picture,
2422             priv->RefPicList0, &priv->RefPicList0_count,
2423             short_ref0,         short_ref0_count,
2424             long_ref,           long_ref_count
2425         );
2426
2427         init_picture_refs_fields(
2428             picture,
2429             priv->RefPicList1, &priv->RefPicList1_count,
2430             short_ref1,         short_ref1_count,
2431             long_ref,           long_ref_count
2432         );
2433    }
2434
2435     /* Check whether RefPicList1 is identical to RefPicList0, then
2436        swap if necessary */
2437     if (priv->RefPicList1_count > 1 &&
2438         priv->RefPicList1_count == priv->RefPicList0_count &&
2439         memcmp(priv->RefPicList0, priv->RefPicList1,
2440                priv->RefPicList0_count * sizeof(priv->RefPicList0[0])) == 0) {
2441         GstVaapiPictureH264 * const tmp = priv->RefPicList1[0];
2442         priv->RefPicList1[0] = priv->RefPicList1[1];
2443         priv->RefPicList1[1] = tmp;
2444     }
2445
2446     if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
2447         /* RefPicList0 */
2448         init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
2449
2450         /* RefPicList1 */
2451         init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
2452     }
2453 }
2454
2455 #undef SORT_REF_LIST
2456
2457 static gint
2458 find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
2459 {
2460     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2461     guint i;
2462
2463     for (i = 0; i < priv->short_ref_count; i++) {
2464         if (priv->short_ref[i]->pic_num == pic_num)
2465             return i;
2466     }
2467     GST_ERROR("found no short-term reference picture with PicNum = %d",
2468               pic_num);
2469     return -1;
2470 }
2471
2472 static gint
2473 find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
2474 {
2475     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2476     guint i;
2477
2478     for (i = 0; i < priv->long_ref_count; i++) {
2479         if (priv->long_ref[i]->long_term_pic_num == long_term_pic_num)
2480             return i;
2481     }
2482     GST_ERROR("found no long-term reference picture with LongTermPicNum = %d",
2483               long_term_pic_num);
2484     return -1;
2485 }
2486
2487 static void
2488 exec_picture_refs_modification_1(
2489     GstVaapiDecoderH264           *decoder,
2490     GstVaapiPictureH264           *picture,
2491     GstH264SliceHdr               *slice_hdr,
2492     guint                          list
2493 )
2494 {
2495     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2496     GstH264SPS * const sps = get_sps(decoder);
2497     GstH264RefPicListModification *ref_pic_list_modification;
2498     guint num_ref_pic_list_modifications;
2499     GstVaapiPictureH264 **ref_list;
2500     guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
2501     const guint16 *view_ids = NULL;
2502     guint i, j, n, num_refs, num_view_ids = 0;
2503     gint found_ref_idx;
2504     gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
2505
2506     GST_DEBUG("modification process of reference picture list %u", list);
2507
2508     if (list == 0) {
2509         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l0;
2510         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l0;
2511         ref_list                       = priv->RefPicList0;
2512         ref_list_count_ptr             = &priv->RefPicList0_count;
2513         num_refs                       = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
2514
2515         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2516             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2517             const GstH264SPSExtMVCView * const view =
2518                 &sps->extension.mvc.view[picture->base.voc];
2519             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2520                 view_ids = view->anchor_ref_l0;
2521                 num_view_ids = view->num_anchor_refs_l0;
2522             }
2523             else {
2524                 view_ids = view->non_anchor_ref_l0;
2525                 num_view_ids = view->num_non_anchor_refs_l0;
2526             }
2527         }
2528     }
2529     else {
2530         ref_pic_list_modification      = slice_hdr->ref_pic_list_modification_l1;
2531         num_ref_pic_list_modifications = slice_hdr->n_ref_pic_list_modification_l1;
2532         ref_list                       = priv->RefPicList1;
2533         ref_list_count_ptr             = &priv->RefPicList1_count;
2534         num_refs                       = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
2535
2536         if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
2537             sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
2538             const GstH264SPSExtMVCView * const view =
2539                 &sps->extension.mvc.view[picture->base.voc];
2540             if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
2541                 view_ids = view->anchor_ref_l1;
2542                 num_view_ids = view->num_anchor_refs_l1;
2543             }
2544             else {
2545                 view_ids = view->non_anchor_ref_l1;
2546                 num_view_ids = view->num_non_anchor_refs_l1;
2547             }
2548         }
2549     }
2550     ref_list_count = *ref_list_count_ptr;
2551
2552     if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2553         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
2554         CurrPicNum = 2 * slice_hdr->frame_num + 1;              // 2 * frame_num + 1
2555     }
2556     else {
2557         MaxPicNum  = 1 << (sps->log2_max_frame_num_minus4 + 4); // MaxFrameNum
2558         CurrPicNum = slice_hdr->frame_num;                      // frame_num
2559     }
2560
2561     picNumPred = CurrPicNum;
2562     picViewIdxPred = -1;
2563
2564     for (i = 0; i < num_ref_pic_list_modifications; i++) {
2565         GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
2566         if (l->modification_of_pic_nums_idc == 3)
2567             break;
2568
2569         /* 8.2.4.3.1 - Short-term reference pictures */
2570         if (l->modification_of_pic_nums_idc == 0 || l->modification_of_pic_nums_idc == 1) {
2571             gint32 abs_diff_pic_num = l->value.abs_diff_pic_num_minus1 + 1;
2572             gint32 picNum, picNumNoWrap;
2573
2574             // (8-34)
2575             if (l->modification_of_pic_nums_idc == 0) {
2576                 picNumNoWrap = picNumPred - abs_diff_pic_num;
2577                 if (picNumNoWrap < 0)
2578                     picNumNoWrap += MaxPicNum;
2579             }
2580
2581             // (8-35)
2582             else {
2583                 picNumNoWrap = picNumPred + abs_diff_pic_num;
2584                 if (picNumNoWrap >= MaxPicNum)
2585                     picNumNoWrap -= MaxPicNum;
2586             }
2587             picNumPred = picNumNoWrap;
2588
2589             // (8-36)
2590             picNum = picNumNoWrap;
2591             if (picNum > CurrPicNum)
2592                 picNum -= MaxPicNum;
2593
2594             // (8-37)
2595             for (j = num_refs; j > ref_list_idx; j--)
2596                 ref_list[j] = ref_list[j - 1];
2597             found_ref_idx = find_short_term_reference(decoder, picNum);
2598             ref_list[ref_list_idx++] =
2599                 found_ref_idx >= 0 ? priv->short_ref[found_ref_idx] : NULL;
2600             n = ref_list_idx;
2601             for (j = ref_list_idx; j <= num_refs; j++) {
2602                 gint32 PicNumF;
2603                 if (!ref_list[j])
2604                     continue;
2605                 PicNumF =
2606                     GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
2607                     ref_list[j]->pic_num : MaxPicNum;
2608                 if (PicNumF != picNum ||
2609                     ref_list[j]->base.view_id != picture->base.view_id)
2610                     ref_list[n++] = ref_list[j];
2611             }
2612         }
2613
2614         /* 8.2.4.3.2 - Long-term reference pictures */
2615         else if (l->modification_of_pic_nums_idc == 2) {
2616
2617             for (j = num_refs; j > ref_list_idx; j--)
2618                 ref_list[j] = ref_list[j - 1];
2619             found_ref_idx =
2620                 find_long_term_reference(decoder, l->value.long_term_pic_num);
2621             ref_list[ref_list_idx++] =
2622                 found_ref_idx >= 0 ? priv->long_ref[found_ref_idx] : NULL;
2623             n = ref_list_idx;
2624             for (j = ref_list_idx; j <= num_refs; j++) {
2625                 gint32 LongTermPicNumF;
2626                 if (!ref_list[j])
2627                     continue;
2628                 LongTermPicNumF =
2629                     GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
2630                     ref_list[j]->long_term_pic_num : INT_MAX;
2631                 if (LongTermPicNumF != l->value.long_term_pic_num ||
2632                     ref_list[j]->base.view_id != picture->base.view_id)
2633                     ref_list[n++] = ref_list[j];
2634             }
2635         }
2636
2637         /* H.8.2.2.3 - Inter-view prediction reference pictures */
2638         else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
2639                   sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
2640                  (l->modification_of_pic_nums_idc == 4 ||
2641                   l->modification_of_pic_nums_idc == 5)) {
2642             gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
2643             gint32 picViewIdx, targetViewId;
2644
2645             // (H-6)
2646             if (l->modification_of_pic_nums_idc == 4) {
2647                 picViewIdx = picViewIdxPred - abs_diff_view_idx;
2648                 if (picViewIdx < 0)
2649                     picViewIdx += num_view_ids;
2650             }
2651
2652             // (H-7)
2653             else {
2654                 picViewIdx = picViewIdxPred + abs_diff_view_idx;
2655                 if (picViewIdx >= num_view_ids)
2656                     picViewIdx -= num_view_ids;
2657             }
2658             picViewIdxPred = picViewIdx;
2659
2660             // (H-8, H-9)
2661             targetViewId = view_ids[picViewIdx];
2662
2663             // (H-10)
2664             for (j = num_refs; j > ref_list_idx; j--)
2665                 ref_list[j] = ref_list[j - 1];
2666             ref_list[ref_list_idx++] =
2667                 find_inter_view_reference(decoder, targetViewId);
2668             n = ref_list_idx;
2669             for (j = ref_list_idx; j <= num_refs; j++) {
2670                 if (!ref_list[j])
2671                     continue;
2672                 if (ref_list[j]->base.view_id != targetViewId ||
2673                     ref_list[j]->base.poc != picture->base.poc)
2674                     ref_list[n++] = ref_list[j];
2675             }
2676         }
2677     }
2678
2679 #if DEBUG
2680     for (i = 0; i < num_refs; i++)
2681         if (!ref_list[i])
2682             GST_ERROR("list %u entry %u is empty", list, i);
2683 #endif
2684     *ref_list_count_ptr = num_refs;
2685 }
2686
2687 /* 8.2.4.3 - Modification process for reference picture lists */
2688 static void
2689 exec_picture_refs_modification(
2690     GstVaapiDecoderH264 *decoder,
2691     GstVaapiPictureH264 *picture,
2692     GstH264SliceHdr     *slice_hdr
2693 )
2694 {
2695     GST_DEBUG("execute ref_pic_list_modification()");
2696
2697     /* RefPicList0 */
2698     if (!GST_H264_IS_I_SLICE(slice_hdr) && !GST_H264_IS_SI_SLICE(slice_hdr) &&
2699         slice_hdr->ref_pic_list_modification_flag_l0)
2700         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 0);
2701
2702     /* RefPicList1 */
2703     if (GST_H264_IS_B_SLICE(slice_hdr) &&
2704         slice_hdr->ref_pic_list_modification_flag_l1)
2705         exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
2706 }
2707
2708 static void
2709 init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
2710     GstVaapiPictureH264 *picture)
2711 {
2712     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2713     guint i, j, short_ref_count, long_ref_count;
2714
2715     short_ref_count = 0;
2716     long_ref_count  = 0;
2717     if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
2718         for (i = 0; i < priv->dpb_count; i++) {
2719             GstVaapiFrameStore * const fs = priv->dpb[i];
2720             GstVaapiPictureH264 *pic;
2721             if (!gst_vaapi_frame_store_has_frame(fs))
2722                 continue;
2723             pic = fs->buffers[0];
2724             if (pic->base.view_id != picture->base.view_id)
2725                 continue;
2726             if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2727                 priv->short_ref[short_ref_count++] = pic;
2728             else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2729                 priv->long_ref[long_ref_count++] = pic;
2730             pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2731             pic->other_field = fs->buffers[1];
2732         }
2733     }
2734     else {
2735         for (i = 0; i < priv->dpb_count; i++) {
2736             GstVaapiFrameStore * const fs = priv->dpb[i];
2737             for (j = 0; j < fs->num_buffers; j++) {
2738                 GstVaapiPictureH264 * const pic = fs->buffers[j];
2739                 if (pic->base.view_id != picture->base.view_id)
2740                     continue;
2741                 if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
2742                     priv->short_ref[short_ref_count++] = pic;
2743                 else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
2744                     priv->long_ref[long_ref_count++] = pic;
2745                 pic->structure = pic->base.structure;
2746                 pic->other_field = fs->buffers[j ^ 1];
2747             }
2748         }
2749     }
2750
2751     for (i = short_ref_count; i < priv->short_ref_count; i++)
2752         priv->short_ref[i] = NULL;
2753     priv->short_ref_count = short_ref_count;
2754
2755     for (i = long_ref_count; i < priv->long_ref_count; i++)
2756         priv->long_ref[i] = NULL;
2757     priv->long_ref_count = long_ref_count;
2758 }
2759
2760 static void
2761 init_picture_refs(
2762     GstVaapiDecoderH264 *decoder,
2763     GstVaapiPictureH264 *picture,
2764     GstH264SliceHdr     *slice_hdr
2765 )
2766 {
2767     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2768     guint i, num_refs;
2769
2770     init_picture_ref_lists(decoder, picture);
2771     init_picture_refs_pic_num(decoder, picture, slice_hdr);
2772
2773     priv->RefPicList0_count = 0;
2774     priv->RefPicList1_count = 0;
2775
2776     switch (slice_hdr->type % 5) {
2777     case GST_H264_P_SLICE:
2778     case GST_H264_SP_SLICE:
2779         init_picture_refs_p_slice(decoder, picture, slice_hdr);
2780         break;
2781     case GST_H264_B_SLICE:
2782         init_picture_refs_b_slice(decoder, picture, slice_hdr);
2783         break;
2784     default:
2785         break;
2786     }
2787
2788     exec_picture_refs_modification(decoder, picture, slice_hdr);
2789
2790     switch (slice_hdr->type % 5) {
2791     case GST_H264_B_SLICE:
2792         num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
2793         for (i = priv->RefPicList1_count; i < num_refs; i++)
2794             priv->RefPicList1[i] = NULL;
2795         priv->RefPicList1_count = num_refs;
2796
2797         // fall-through
2798     case GST_H264_P_SLICE:
2799     case GST_H264_SP_SLICE:
2800         num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
2801         for (i = priv->RefPicList0_count; i < num_refs; i++)
2802             priv->RefPicList0[i] = NULL;
2803         priv->RefPicList0_count = num_refs;
2804         break;
2805     default:
2806         break;
2807     }
2808 }
2809
2810 static gboolean
2811 init_picture(
2812     GstVaapiDecoderH264 *decoder,
2813     GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
2814 {
2815     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2816     GstVaapiPicture * const base_picture = &picture->base;
2817     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
2818
2819     priv->prev_frame_num        = priv->frame_num;
2820     priv->frame_num             = slice_hdr->frame_num;
2821     picture->frame_num          = priv->frame_num;
2822     picture->frame_num_wrap     = priv->frame_num;
2823     picture->output_flag        = TRUE; /* XXX: conformant to Annex A only */
2824     base_picture->pts           = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
2825     base_picture->type          = GST_VAAPI_PICTURE_TYPE_NONE;
2826     base_picture->view_id       = pi->view_id;
2827     base_picture->voc           = pi->voc;
2828
2829     /* Initialize extensions */
2830     switch (pi->nalu.extension_type) {
2831     case GST_H264_NAL_EXTENSION_MVC: {
2832         GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
2833
2834         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
2835         if (mvc->inter_view_flag)
2836             GST_VAAPI_PICTURE_FLAG_SET(picture,
2837                 GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
2838         if (mvc->anchor_pic_flag)
2839             GST_VAAPI_PICTURE_FLAG_SET(picture,
2840                 GST_VAAPI_PICTURE_FLAG_ANCHOR);
2841         break;
2842     }
2843     }
2844
2845     /* Reset decoder state for IDR pictures */
2846     if (pi->nalu.idr_pic_flag) {
2847         GST_DEBUG("<IDR>");
2848         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
2849         dpb_flush(decoder, picture);
2850     }
2851
2852     /* Initialize picture structure */
2853     if (!slice_hdr->field_pic_flag)
2854         base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
2855     else {
2856         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
2857         if (!slice_hdr->bottom_field_flag)
2858             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
2859         else
2860             base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
2861     }
2862     picture->structure = base_picture->structure;
2863
2864     /* Initialize reference flags */
2865     if (pi->nalu.ref_idc) {
2866         GstH264DecRefPicMarking * const dec_ref_pic_marking =
2867             &slice_hdr->dec_ref_pic_marking;
2868
2869         if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
2870             dec_ref_pic_marking->long_term_reference_flag)
2871             GST_VAAPI_PICTURE_FLAG_SET(picture,
2872                 GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
2873         else
2874             GST_VAAPI_PICTURE_FLAG_SET(picture,
2875                 GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
2876     }
2877
2878     init_picture_poc(decoder, picture, slice_hdr);
2879     return TRUE;
2880 }
2881
2882 /* 8.2.5.3 - Sliding window decoded reference picture marking process */
2883 static gboolean
2884 exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
2885 {
2886     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2887     GstH264SPS * const sps = get_sps(decoder);
2888     GstVaapiPictureH264 *ref_picture;
2889     guint i, m, max_num_ref_frames;
2890
2891     GST_DEBUG("reference picture marking process (sliding window)");
2892
2893     if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
2894         return TRUE;
2895
2896     max_num_ref_frames = sps->num_ref_frames;
2897     if (max_num_ref_frames == 0)
2898         max_num_ref_frames = 1;
2899     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
2900         max_num_ref_frames <<= 1;
2901
2902     if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
2903         return TRUE;
2904     if (priv->short_ref_count < 1)
2905         return FALSE;
2906
2907     for (m = 0, i = 1; i < priv->short_ref_count; i++) {
2908         GstVaapiPictureH264 * const picture = priv->short_ref[i];
2909         if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
2910             m = i;
2911     }
2912
2913     ref_picture = priv->short_ref[m];
2914     gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
2915     ARRAY_REMOVE_INDEX(priv->short_ref, m);
2916
2917     /* Both fields need to be marked as "unused for reference", so
2918        remove the other field from the short_ref[] list as well */
2919     if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
2920         for (i = 0; i < priv->short_ref_count; i++) {
2921             if (priv->short_ref[i] == ref_picture->other_field) {
2922                 ARRAY_REMOVE_INDEX(priv->short_ref, i);
2923                 break;
2924             }
2925         }
2926     }
2927     return TRUE;
2928 }
2929
2930 static inline gint32
2931 get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
2932 {
2933     gint32 pic_num;
2934
2935     if (GST_VAAPI_PICTURE_IS_FRAME(picture))
2936         pic_num = picture->frame_num_wrap;
2937     else
2938         pic_num = 2 * picture->frame_num_wrap + 1;
2939     pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
2940     return pic_num;
2941 }
2942
2943 /* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
2944 static void
2945 exec_ref_pic_marking_adaptive_mmco_1(
2946     GstVaapiDecoderH264  *decoder,
2947     GstVaapiPictureH264  *picture,
2948     GstH264RefPicMarking *ref_pic_marking
2949 )
2950 {
2951     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2952     gint32 i, picNumX;
2953
2954     picNumX = get_picNumX(picture, ref_pic_marking);
2955     i = find_short_term_reference(decoder, picNumX);
2956     if (i < 0)
2957         return;
2958
2959     gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
2960         GST_VAAPI_PICTURE_IS_FRAME(picture));
2961     ARRAY_REMOVE_INDEX(priv->short_ref, i);
2962 }
2963
2964 /* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
2965 static void
2966 exec_ref_pic_marking_adaptive_mmco_2(
2967     GstVaapiDecoderH264  *decoder,
2968     GstVaapiPictureH264  *picture,
2969     GstH264RefPicMarking *ref_pic_marking
2970 )
2971 {
2972     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2973     gint32 i;
2974
2975     i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
2976     if (i < 0)
2977         return;
2978
2979     gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
2980         GST_VAAPI_PICTURE_IS_FRAME(picture));
2981     ARRAY_REMOVE_INDEX(priv->long_ref, i);
2982 }
2983
2984 /* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
2985 static void
2986 exec_ref_pic_marking_adaptive_mmco_3(
2987     GstVaapiDecoderH264  *decoder,
2988     GstVaapiPictureH264  *picture,
2989     GstH264RefPicMarking *ref_pic_marking
2990 )
2991 {
2992     GstVaapiDecoderH264Private * const priv = &decoder->priv;
2993     GstVaapiPictureH264 *ref_picture, *other_field;
2994     gint32 i, picNumX;
2995
2996     for (i = 0; i < priv->long_ref_count; i++) {
2997         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
2998             break;
2999     }
3000     if (i != priv->long_ref_count) {
3001         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3002         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3003     }
3004
3005     picNumX = get_picNumX(picture, ref_pic_marking);
3006     i = find_short_term_reference(decoder, picNumX);
3007     if (i < 0)
3008         return;
3009
3010     ref_picture = priv->short_ref[i];
3011     ARRAY_REMOVE_INDEX(priv->short_ref, i);
3012     priv->long_ref[priv->long_ref_count++] = ref_picture;
3013
3014     ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3015     gst_vaapi_picture_h264_set_reference(ref_picture,
3016         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3017         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3018
3019     /* Assign LongTermFrameIdx to the other field if it was also
3020        marked as "used for long-term reference */
3021     other_field = ref_picture->other_field;
3022     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3023         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3024 }
3025
3026 /* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
3027  * as "unused for reference" */
3028 static void
3029 exec_ref_pic_marking_adaptive_mmco_4(
3030     GstVaapiDecoderH264  *decoder,
3031     GstVaapiPictureH264  *picture,
3032     GstH264RefPicMarking *ref_pic_marking
3033 )
3034 {
3035     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3036     gint32 i, long_term_frame_idx;
3037
3038     long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
3039
3040     for (i = 0; i < priv->long_ref_count; i++) {
3041         if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
3042             continue;
3043         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
3044         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3045         i--;
3046     }
3047 }
3048
3049 /* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
3050 static void
3051 exec_ref_pic_marking_adaptive_mmco_5(
3052     GstVaapiDecoderH264  *decoder,
3053     GstVaapiPictureH264  *picture,
3054     GstH264RefPicMarking *ref_pic_marking
3055 )
3056 {
3057     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3058
3059     dpb_flush(decoder, picture);
3060
3061     priv->prev_pic_has_mmco5 = TRUE;
3062
3063     /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
3064     priv->frame_num = 0;
3065     priv->frame_num_offset = 0;
3066     picture->frame_num = 0;
3067
3068     /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
3069     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
3070         picture->field_poc[TOP_FIELD] -= picture->base.poc;
3071     if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
3072         picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
3073     picture->base.poc = 0;
3074 }
3075
3076 /* 8.2.5.4.6. Assign a long-term frame index to the current picture */
3077 static void
3078 exec_ref_pic_marking_adaptive_mmco_6(
3079     GstVaapiDecoderH264  *decoder,
3080     GstVaapiPictureH264  *picture,
3081     GstH264RefPicMarking *ref_pic_marking
3082 )
3083 {
3084     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3085     GstVaapiPictureH264 *other_field;
3086     guint i;
3087
3088     for (i = 0; i < priv->long_ref_count; i++) {
3089         if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
3090             break;
3091     }
3092     if (i != priv->long_ref_count) {
3093         gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
3094         ARRAY_REMOVE_INDEX(priv->long_ref, i);
3095     }
3096
3097     picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3098     gst_vaapi_picture_h264_set_reference(picture,
3099         GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
3100         GST_VAAPI_PICTURE_IS_COMPLETE(picture));
3101
3102     /* Assign LongTermFrameIdx to the other field if it was also
3103        marked as "used for long-term reference */
3104     other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
3105     if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
3106         other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
3107 }
3108
3109 /* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
3110 static gboolean
3111 exec_ref_pic_marking_adaptive(
3112     GstVaapiDecoderH264     *decoder,
3113     GstVaapiPictureH264     *picture,
3114     GstH264DecRefPicMarking *dec_ref_pic_marking
3115 )
3116 {
3117     guint i;
3118
3119     GST_DEBUG("reference picture marking process (adaptive memory control)");
3120
3121     typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
3122         GstVaapiDecoderH264  *decoder,
3123         GstVaapiPictureH264  *picture,
3124         GstH264RefPicMarking *ref_pic_marking
3125     );
3126
3127     static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
3128         NULL,
3129         exec_ref_pic_marking_adaptive_mmco_1,
3130         exec_ref_pic_marking_adaptive_mmco_2,
3131         exec_ref_pic_marking_adaptive_mmco_3,
3132         exec_ref_pic_marking_adaptive_mmco_4,
3133         exec_ref_pic_marking_adaptive_mmco_5,
3134         exec_ref_pic_marking_adaptive_mmco_6,
3135     };
3136
3137     for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
3138         GstH264RefPicMarking * const ref_pic_marking =
3139             &dec_ref_pic_marking->ref_pic_marking[i];
3140
3141         const guint mmco = ref_pic_marking->memory_management_control_operation;
3142         if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
3143             mmco_funcs[mmco](decoder, picture, ref_pic_marking);
3144         else {
3145             GST_ERROR("unhandled MMCO %u", mmco);
3146             return FALSE;
3147         }
3148     }
3149     return TRUE;
3150 }
3151
3152 /* 8.2.5 - Execute reference picture marking process */
3153 static gboolean
3154 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3155 {
3156     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3157
3158     priv->prev_pic_has_mmco5 = FALSE;
3159     priv->prev_pic_structure = picture->structure;
3160
3161     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
3162         g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
3163
3164     if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
3165         return TRUE;
3166
3167     if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
3168         GstH264DecRefPicMarking * const dec_ref_pic_marking =
3169             &picture->last_slice_hdr->dec_ref_pic_marking;
3170         if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
3171             if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
3172                 return FALSE;
3173         }
3174         else {
3175             if (!exec_ref_pic_marking_sliding_window(decoder))
3176                 return FALSE;
3177         }
3178     }
3179     return TRUE;
3180 }
3181
3182 static void
3183 vaapi_init_picture(VAPictureH264 *pic)
3184 {
3185     pic->picture_id           = VA_INVALID_ID;
3186     pic->frame_idx            = 0;
3187     pic->flags                = VA_PICTURE_H264_INVALID;
3188     pic->TopFieldOrderCnt     = 0;
3189     pic->BottomFieldOrderCnt  = 0;
3190 }
3191
3192 static void
3193 vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
3194     guint picture_structure)
3195 {
3196     if (!picture_structure)
3197         picture_structure = picture->structure;
3198
3199     pic->picture_id = picture->base.surface_id;
3200     pic->flags = 0;
3201
3202     if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
3203         pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
3204         pic->frame_idx = picture->long_term_frame_idx;
3205     }
3206     else {
3207         if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
3208             pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
3209         pic->frame_idx = picture->frame_num;
3210     }
3211
3212     switch (picture_structure) {
3213     case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
3214         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3215         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3216         break;
3217     case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
3218         pic->flags |= VA_PICTURE_H264_TOP_FIELD;
3219         pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
3220         pic->BottomFieldOrderCnt = 0;
3221         break;
3222     case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
3223         pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
3224         pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
3225         pic->TopFieldOrderCnt = 0;
3226         break;
3227     }
3228 }
3229
3230 static void
3231 vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
3232     GstVaapiPictureH264 *picture)
3233 {
3234     vaapi_fill_picture(pic, picture, 0);
3235
3236     /* H.8.4 - MVC inter prediction and inter-view prediction process */
3237     if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
3238         /* The inter-view reference components and inter-view only
3239            reference components that are included in the reference
3240            picture lists are considered as not being marked as "used for
3241            short-term reference" or "used for long-term reference" */
3242         pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
3243                         VA_PICTURE_H264_LONG_TERM_REFERENCE);
3244     }
3245 }
3246
3247 static gboolean
3248 fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
3249 {
3250     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3251     GstVaapiPicture * const base_picture = &picture->base;
3252     GstH264PPS * const pps = get_pps(decoder);
3253     GstH264SPS * const sps = get_sps(decoder);
3254     VAPictureParameterBufferH264 * const pic_param = base_picture->param;
3255     guint i, n;
3256
3257     /* Fill in VAPictureParameterBufferH264 */
3258     vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
3259
3260     for (i = 0, n = 0; i < priv->dpb_count; i++) {
3261         GstVaapiFrameStore * const fs = priv->dpb[i];
3262         if ((gst_vaapi_frame_store_has_reference(fs) &&
3263              fs->view_id == picture->base.view_id) ||
3264             (gst_vaapi_frame_store_has_inter_view(fs) &&
3265              is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
3266             vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
3267                 fs->buffers[0], fs->structure);
3268         if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
3269             break;
3270     }
3271     for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
3272         vaapi_init_picture(&pic_param->ReferenceFrames[n]);
3273
3274 #define COPY_FIELD(s, f) \
3275     pic_param->f = (s)->f
3276
3277 #define COPY_BFM(a, s, f) \
3278     pic_param->a.bits.f = (s)->f
3279
3280     pic_param->picture_width_in_mbs_minus1  = priv->mb_width - 1;
3281     pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
3282     pic_param->frame_num                    = priv->frame_num;
3283
3284     COPY_FIELD(sps, bit_depth_luma_minus8);
3285     COPY_FIELD(sps, bit_depth_chroma_minus8);
3286     COPY_FIELD(sps, num_ref_frames);
3287     COPY_FIELD(pps, num_slice_groups_minus1);
3288     COPY_FIELD(pps, slice_group_map_type);
3289     COPY_FIELD(pps, slice_group_change_rate_minus1);
3290     COPY_FIELD(pps, pic_init_qp_minus26);
3291     COPY_FIELD(pps, pic_init_qs_minus26);
3292     COPY_FIELD(pps, chroma_qp_index_offset);
3293     COPY_FIELD(pps, second_chroma_qp_index_offset);
3294
3295     pic_param->seq_fields.value                                         = 0; /* reset all bits */
3296     pic_param->seq_fields.bits.residual_colour_transform_flag           = sps->separate_colour_plane_flag;
3297     pic_param->seq_fields.bits.MinLumaBiPredSize8x8                     = sps->level_idc >= 31; /* A.3.3.2 */
3298
3299     COPY_BFM(seq_fields, sps, chroma_format_idc);
3300     COPY_BFM(seq_fields, sps, gaps_in_frame_num_value_allowed_flag);
3301     COPY_BFM(seq_fields, sps, frame_mbs_only_flag); 
3302     COPY_BFM(seq_fields, sps, mb_adaptive_frame_field_flag); 
3303     COPY_BFM(seq_fields, sps, direct_8x8_inference_flag); 
3304     COPY_BFM(seq_fields, sps, log2_max_frame_num_minus4);
3305     COPY_BFM(seq_fields, sps, pic_order_cnt_type);
3306     COPY_BFM(seq_fields, sps, log2_max_pic_order_cnt_lsb_minus4);
3307     COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
3308
3309     pic_param->pic_fields.value                                         = 0; /* reset all bits */
3310     pic_param->pic_fields.bits.field_pic_flag                           = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
3311     pic_param->pic_fields.bits.reference_pic_flag                       = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
3312
3313     COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
3314     COPY_BFM(pic_fields, pps, weighted_pred_flag);
3315     COPY_BFM(pic_fields, pps, weighted_bipred_idc);
3316     COPY_BFM(pic_fields, pps, transform_8x8_mode_flag);
3317     COPY_BFM(pic_fields, pps, constrained_intra_pred_flag);
3318     COPY_BFM(pic_fields, pps, pic_order_present_flag);
3319     COPY_BFM(pic_fields, pps, deblocking_filter_control_present_flag);
3320     COPY_BFM(pic_fields, pps, redundant_pic_cnt_present_flag);
3321     return TRUE;
3322 }
3323
3324 /* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
3325 static gboolean
3326 is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3327 {
3328     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3329     GstH264PPS * const pps = slice_hdr->pps;
3330     GstH264SPS * const sps = pps->sequence;
3331     GstH264SliceHdr *prev_slice_hdr;
3332
3333     if (!prev_pi)
3334         return TRUE;
3335     prev_slice_hdr = &prev_pi->data.slice_hdr;
3336
3337 #define CHECK_EXPR(expr, field_name) do {              \
3338         if (!(expr)) {                                 \
3339             GST_DEBUG(field_name " differs in value"); \
3340             return TRUE;                               \
3341         }                                              \
3342     } while (0)
3343
3344 #define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
3345     CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
3346
3347     /* view_id differs in value and VOIdx of current slice_hdr is less
3348        than the VOIdx of the prev_slice_hdr */
3349     CHECK_VALUE(pi, prev_pi, view_id);
3350
3351     /* frame_num differs in value, regardless of inferred values to 0 */
3352     CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
3353
3354     /* pic_parameter_set_id differs in value */
3355     CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
3356
3357     /* field_pic_flag differs in value */
3358     CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
3359
3360     /* bottom_field_flag is present in both and differs in value */
3361     if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
3362         CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
3363
3364     /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
3365     CHECK_EXPR((pi->nalu.ref_idc != 0) ==
3366                (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
3367
3368     /* POC type is 0 for both and either pic_order_cnt_lsb differs in
3369        value or delta_pic_order_cnt_bottom differs in value */
3370     if (sps->pic_order_cnt_type == 0) {
3371         CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
3372         if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
3373             CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
3374     }
3375
3376     /* POC type is 1 for both and either delta_pic_order_cnt[0]
3377        differs in value or delta_pic_order_cnt[1] differs in value */
3378     else if (sps->pic_order_cnt_type == 1) {
3379         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
3380         CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
3381     }
3382
3383     /* IdrPicFlag differs in value */
3384     CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
3385
3386     /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
3387     if (pi->nalu.idr_pic_flag)
3388         CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
3389
3390 #undef CHECK_EXPR
3391 #undef CHECK_VALUE
3392     return FALSE;
3393 }
3394
3395 /* Detection of a new access unit, assuming we are already in presence
3396    of a new picture */
3397 static inline gboolean
3398 is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
3399 {
3400     if (!prev_pi || prev_pi->view_id == pi->view_id)
3401         return TRUE;
3402     return pi->voc < prev_pi->voc;
3403 }
3404
3405 /* Finds the first field picture corresponding to the supplied picture */
3406 static GstVaapiPictureH264 *
3407 find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
3408 {
3409     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3410     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3411     GstVaapiFrameStore *fs;
3412
3413     if (!slice_hdr->field_pic_flag)
3414         return NULL;
3415
3416     fs = priv->prev_frames[pi->voc];
3417     if (!fs || gst_vaapi_frame_store_has_frame(fs))
3418         return NULL;
3419
3420     if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
3421         return fs->buffers[0];
3422     return NULL;
3423 }
3424
3425 static GstVaapiDecoderStatus
3426 decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3427 {
3428     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3429     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3430     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3431     GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
3432     GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
3433     GstVaapiPictureH264 *picture, *first_field;
3434     GstVaapiDecoderStatus status;
3435
3436     g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3437     g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
3438
3439     /* Only decode base stream for MVC */
3440     switch (sps->profile_idc) {
3441     case GST_H264_PROFILE_MULTIVIEW_HIGH:
3442     case GST_H264_PROFILE_STEREO_HIGH:
3443         if (0) {
3444             GST_DEBUG("drop picture from substream");
3445             return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
3446         }
3447         break;
3448     }
3449
3450     status = ensure_context(decoder, sps);
3451     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3452         return status;
3453
3454     priv->decoder_state = 0;
3455
3456     first_field = find_first_field(decoder, pi);
3457     if (first_field) {
3458         /* Re-use current picture where the first field was decoded */
3459         picture = gst_vaapi_picture_h264_new_field(first_field);
3460         if (!picture) {
3461             GST_ERROR("failed to allocate field picture");
3462             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3463         }
3464     }
3465     else {
3466         /* Create new picture */
3467         picture = gst_vaapi_picture_h264_new(decoder);
3468         if (!picture) {
3469             GST_ERROR("failed to allocate picture");
3470             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3471         }
3472     }
3473     gst_vaapi_picture_replace(&priv->current_picture, picture);
3474     gst_vaapi_picture_unref(picture);
3475
3476     /* Clear inter-view references list if this is the primary coded
3477        picture of the current access unit */
3478     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3479         g_ptr_array_set_size(priv->inter_views, 0);
3480
3481     /* Update cropping rectangle */
3482     if (sps->frame_cropping_flag) {
3483         GstVaapiRectangle crop_rect;
3484         crop_rect.x = sps->crop_rect_x;
3485         crop_rect.y = sps->crop_rect_y;
3486         crop_rect.width = sps->crop_rect_width;
3487         crop_rect.height = sps->crop_rect_height;
3488         gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
3489     }
3490
3491     status = ensure_quant_matrix(decoder, picture);
3492     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
3493         GST_ERROR("failed to reset quantizer matrix");
3494         return status;
3495     }
3496
3497     if (!init_picture(decoder, picture, pi))
3498         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3499     if (!fill_picture(decoder, picture))
3500         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3501
3502     priv->decoder_state = pi->state;
3503     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3504 }
3505
3506 static inline guint
3507 get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
3508 {
3509     guint epb_count;
3510
3511     epb_count = slice_hdr->n_emulation_prevention_bytes;
3512     return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
3513 }
3514
3515 static gboolean
3516 fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
3517     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3518 {
3519     VASliceParameterBufferH264 * const slice_param = slice->param;
3520     GstH264PPS * const pps = get_pps(decoder);
3521     GstH264SPS * const sps = get_sps(decoder);
3522     GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
3523     guint num_weight_tables = 0;
3524     gint i, j;
3525
3526     if (pps->weighted_pred_flag &&
3527         (GST_H264_IS_P_SLICE(slice_hdr) || GST_H264_IS_SP_SLICE(slice_hdr)))
3528         num_weight_tables = 1;
3529     else if (pps->weighted_bipred_idc == 1 && GST_H264_IS_B_SLICE(slice_hdr))
3530         num_weight_tables = 2;
3531     else
3532         num_weight_tables = 0;
3533
3534     slice_param->luma_log2_weight_denom   = 0;
3535     slice_param->chroma_log2_weight_denom = 0;
3536     slice_param->luma_weight_l0_flag      = 0;
3537     slice_param->chroma_weight_l0_flag    = 0;
3538     slice_param->luma_weight_l1_flag      = 0;
3539     slice_param->chroma_weight_l1_flag    = 0;
3540
3541     if (num_weight_tables < 1)
3542         return TRUE;
3543
3544     slice_param->luma_log2_weight_denom   = w->luma_log2_weight_denom;
3545     slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
3546
3547     slice_param->luma_weight_l0_flag = 1;
3548     for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3549         slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
3550         slice_param->luma_offset_l0[i] = w->luma_offset_l0[i];
3551     }
3552
3553     slice_param->chroma_weight_l0_flag = sps->chroma_array_type != 0;
3554     if (slice_param->chroma_weight_l0_flag) {
3555         for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
3556             for (j = 0; j < 2; j++) {
3557                 slice_param->chroma_weight_l0[i][j] = w->chroma_weight_l0[i][j];
3558                 slice_param->chroma_offset_l0[i][j] = w->chroma_offset_l0[i][j];
3559             }
3560         }
3561     }
3562
3563     if (num_weight_tables < 2)
3564         return TRUE;
3565
3566     slice_param->luma_weight_l1_flag = 1;
3567     for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3568         slice_param->luma_weight_l1[i] = w->luma_weight_l1[i];
3569         slice_param->luma_offset_l1[i] = w->luma_offset_l1[i];
3570     }
3571
3572     slice_param->chroma_weight_l1_flag = sps->chroma_array_type != 0;
3573     if (slice_param->chroma_weight_l1_flag) {
3574         for (i = 0; i <= slice_param->num_ref_idx_l1_active_minus1; i++) {
3575             for (j = 0; j < 2; j++) {
3576                 slice_param->chroma_weight_l1[i][j] = w->chroma_weight_l1[i][j];
3577                 slice_param->chroma_offset_l1[i][j] = w->chroma_offset_l1[i][j];
3578             }
3579         }
3580     }
3581     return TRUE;
3582 }
3583
3584 static gboolean
3585 fill_RefPicList(GstVaapiDecoderH264 *decoder,
3586     GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
3587 {
3588     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3589     VASliceParameterBufferH264 * const slice_param = slice->param;
3590     guint i, num_ref_lists = 0;
3591
3592     slice_param->num_ref_idx_l0_active_minus1 = 0;
3593     slice_param->num_ref_idx_l1_active_minus1 = 0;
3594
3595     if (GST_H264_IS_B_SLICE(slice_hdr))
3596         num_ref_lists = 2;
3597     else if (GST_H264_IS_I_SLICE(slice_hdr))
3598         num_ref_lists = 0;
3599     else
3600         num_ref_lists = 1;
3601
3602     if (num_ref_lists < 1)
3603         return TRUE;
3604
3605     slice_param->num_ref_idx_l0_active_minus1 =
3606         slice_hdr->num_ref_idx_l0_active_minus1;
3607
3608     for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
3609         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
3610             priv->RefPicList0[i]);
3611     for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
3612         vaapi_init_picture(&slice_param->RefPicList0[i]);
3613
3614     if (num_ref_lists < 2)
3615         return TRUE;
3616
3617     slice_param->num_ref_idx_l1_active_minus1 =
3618         slice_hdr->num_ref_idx_l1_active_minus1;
3619
3620     for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
3621         vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
3622             priv->RefPicList1[i]);
3623     for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
3624         vaapi_init_picture(&slice_param->RefPicList1[i]);
3625     return TRUE;
3626 }
3627
3628 static gboolean
3629 fill_slice(GstVaapiDecoderH264 *decoder,
3630     GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
3631 {
3632     VASliceParameterBufferH264 * const slice_param = slice->param;
3633     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3634
3635     /* Fill in VASliceParameterBufferH264 */
3636     slice_param->slice_data_bit_offset =
3637         get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
3638     slice_param->first_mb_in_slice              = slice_hdr->first_mb_in_slice;
3639     slice_param->slice_type                     = slice_hdr->type % 5;
3640     slice_param->direct_spatial_mv_pred_flag    = slice_hdr->direct_spatial_mv_pred_flag;
3641     slice_param->cabac_init_idc                 = slice_hdr->cabac_init_idc;
3642     slice_param->slice_qp_delta                 = slice_hdr->slice_qp_delta;
3643     slice_param->disable_deblocking_filter_idc  = slice_hdr->disable_deblocking_filter_idc;
3644     slice_param->slice_alpha_c0_offset_div2     = slice_hdr->slice_alpha_c0_offset_div2;
3645     slice_param->slice_beta_offset_div2         = slice_hdr->slice_beta_offset_div2;
3646
3647     if (!fill_RefPicList(decoder, slice, slice_hdr))
3648         return FALSE;
3649     if (!fill_pred_weight_table(decoder, slice, slice_hdr))
3650         return FALSE;
3651     return TRUE;
3652 }
3653
3654 static GstVaapiDecoderStatus
3655 decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3656 {
3657     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3658     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3659     GstVaapiPictureH264 * const picture = priv->current_picture;
3660     GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
3661     GstVaapiSlice *slice;
3662     GstBuffer * const buffer =
3663         GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
3664     GstMapInfo map_info;
3665
3666     GST_DEBUG("slice (%u bytes)", pi->nalu.size);
3667
3668     if (!is_valid_state(pi->state,
3669             GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
3670         GST_WARNING("failed to receive enough headers to decode slice");
3671         return GST_VAAPI_DECODER_STATUS_SUCCESS;
3672     }
3673
3674     if (!ensure_pps(decoder, slice_hdr->pps)) {
3675         GST_ERROR("failed to activate PPS");
3676         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3677     }
3678
3679     if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
3680         GST_ERROR("failed to activate SPS");
3681         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3682     }
3683
3684     if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
3685         GST_ERROR("failed to map buffer");
3686         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3687     }
3688
3689     /* Check wether this is the first/last slice in the current access unit */
3690     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
3691         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
3692     if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
3693         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
3694
3695     slice = GST_VAAPI_SLICE_NEW(H264, decoder,
3696         (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
3697     gst_buffer_unmap(buffer, &map_info);
3698     if (!slice) {
3699         GST_ERROR("failed to allocate slice");
3700         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3701     }
3702
3703     init_picture_refs(decoder, picture, slice_hdr);
3704     if (!fill_slice(decoder, slice, pi)) {
3705         gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
3706         return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
3707     }
3708
3709     gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
3710     picture->last_slice_hdr = slice_hdr;
3711     priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
3712     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3713 }
3714
3715 static inline gint
3716 scan_for_start_code(GstAdapter *adapter, guint ofs, guint size, guint32 *scp)
3717 {
3718     return (gint)gst_adapter_masked_scan_uint32_peek(adapter,
3719                                                      0xffffff00, 0x00000100,
3720                                                      ofs, size,
3721                                                      scp);
3722 }
3723
3724 static GstVaapiDecoderStatus
3725 decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
3726 {
3727     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3728     GstVaapiParserInfoH264 * const pi = unit->parsed_info;
3729     GstVaapiDecoderStatus status;
3730
3731     priv->decoder_state |= pi->state;
3732     switch (pi->nalu.type) {
3733     case GST_H264_NAL_SPS:
3734         status = decode_sps(decoder, unit);
3735         break;
3736     case GST_H264_NAL_SUBSET_SPS:
3737         status = decode_subset_sps(decoder, unit);
3738         break;
3739     case GST_H264_NAL_PPS:
3740         status = decode_pps(decoder, unit);
3741         break;
3742     case GST_H264_NAL_SLICE_EXT:
3743     case GST_H264_NAL_SLICE_IDR:
3744         /* fall-through. IDR specifics are handled in init_picture() */
3745     case GST_H264_NAL_SLICE:
3746         status = decode_slice(decoder, unit);
3747         break;
3748     case GST_H264_NAL_SEQ_END:
3749     case GST_H264_NAL_STREAM_END:
3750         status = decode_sequence_end(decoder);
3751         break;
3752     case GST_H264_NAL_SEI:
3753         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3754         break;
3755     default:
3756         GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
3757         status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3758         break;
3759     }
3760     return status;
3761 }
3762
3763 static GstVaapiDecoderStatus
3764 gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
3765     const guchar *buf, guint buf_size)
3766 {
3767     GstVaapiDecoderH264 * const decoder =
3768         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3769     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3770     GstVaapiDecoderStatus status;
3771     GstVaapiDecoderUnit unit;
3772     GstVaapiParserInfoH264 *pi = NULL;
3773     GstH264ParserResult result;
3774     guint i, ofs, num_sps, num_pps;
3775
3776     unit.parsed_info = NULL;
3777
3778     if (buf_size < 8)
3779         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3780
3781     if (buf[0] != 1) {
3782         GST_ERROR("failed to decode codec-data, not in avcC format");
3783         return GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
3784     }
3785
3786     priv->nal_length_size = (buf[4] & 0x03) + 1;
3787
3788     num_sps = buf[5] & 0x1f;
3789     ofs = 6;
3790
3791     for (i = 0; i < num_sps; i++) {
3792         pi = gst_vaapi_parser_info_h264_new();
3793         if (!pi)
3794             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3795         unit.parsed_info = pi;
3796
3797         result = gst_h264_parser_identify_nalu_avc(
3798             priv->parser,
3799             buf, ofs, buf_size, 2,
3800             &pi->nalu
3801         );
3802         if (result != GST_H264_PARSER_OK) {
3803             status = get_status(result);
3804             goto cleanup;
3805         }
3806
3807         status = parse_sps(decoder, &unit);
3808         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3809             goto cleanup;
3810         ofs = pi->nalu.offset + pi->nalu.size;
3811
3812         status = decode_sps(decoder, &unit);
3813         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3814             goto cleanup;
3815         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3816     }
3817
3818     num_pps = buf[ofs];
3819     ofs++;
3820
3821     for (i = 0; i < num_pps; i++) {
3822         pi = gst_vaapi_parser_info_h264_new();
3823         if (!pi)
3824             return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3825         unit.parsed_info = pi;
3826
3827         result = gst_h264_parser_identify_nalu_avc(
3828             priv->parser,
3829             buf, ofs, buf_size, 2,
3830             &pi->nalu
3831         );
3832         if (result != GST_H264_PARSER_OK) {
3833             status = get_status(result);
3834             goto cleanup;
3835         }
3836
3837         status = parse_pps(decoder, &unit);
3838         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3839             goto cleanup;
3840         ofs = pi->nalu.offset + pi->nalu.size;
3841
3842         status = decode_pps(decoder, &unit);
3843         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3844             goto cleanup;
3845         gst_vaapi_parser_info_h264_replace(&pi, NULL);
3846     }
3847
3848     priv->is_avcC = TRUE;
3849     status = GST_VAAPI_DECODER_STATUS_SUCCESS;
3850
3851 cleanup:
3852     gst_vaapi_parser_info_h264_replace(&pi, NULL);
3853     return status;
3854 }
3855
3856 static GstVaapiDecoderStatus
3857 ensure_decoder(GstVaapiDecoderH264 *decoder)
3858 {
3859     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3860     GstVaapiDecoderStatus status;
3861
3862     if (!priv->is_opened) {
3863         priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
3864         if (!priv->is_opened)
3865             return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
3866
3867         status = gst_vaapi_decoder_decode_codec_data(
3868             GST_VAAPI_DECODER_CAST(decoder));
3869         if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3870             return status;
3871     }
3872     return GST_VAAPI_DECODER_STATUS_SUCCESS;
3873 }
3874
3875 static GstVaapiDecoderStatus
3876 gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
3877     GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
3878 {
3879     GstVaapiDecoderH264 * const decoder =
3880         GST_VAAPI_DECODER_H264_CAST(base_decoder);
3881     GstVaapiDecoderH264Private * const priv = &decoder->priv;
3882     GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
3883     GstVaapiParserInfoH264 *pi;
3884     GstVaapiDecoderStatus status;
3885     GstH264ParserResult result;
3886     guchar *buf;
3887     guint i, size, buf_size, nalu_size, flags;
3888     guint32 start_code;
3889     gint ofs, ofs2;
3890     gboolean at_au_end = FALSE;
3891
3892     status = ensure_decoder(decoder);
3893     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3894         return status;
3895
3896     switch (priv->stream_alignment) {
3897     case GST_VAAPI_STREAM_ALIGN_H264_NALU:
3898     case GST_VAAPI_STREAM_ALIGN_H264_AU:
3899         size = gst_adapter_available_fast(adapter);
3900         break;
3901     default:
3902         size = gst_adapter_available(adapter);
3903         break;
3904     }
3905
3906     if (priv->is_avcC) {
3907         if (size < priv->nal_length_size)
3908             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3909
3910         buf = (guchar *)&start_code;
3911         g_assert(priv->nal_length_size <= sizeof(start_code));
3912         gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
3913
3914         nalu_size = 0;
3915         for (i = 0; i < priv->nal_length_size; i++)
3916             nalu_size = (nalu_size << 8) | buf[i];
3917
3918         buf_size = priv->nal_length_size + nalu_size;
3919         if (size < buf_size)
3920             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3921         else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3922             at_au_end = (buf_size == size);
3923     }
3924     else {
3925         if (size < 4)
3926             return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3927
3928         if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
3929             buf_size = size;
3930         else {
3931             ofs = scan_for_start_code(adapter, 0, size, NULL);
3932             if (ofs < 0)
3933                 return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3934
3935             if (ofs > 0) {
3936                 gst_adapter_flush(adapter, ofs);
3937                 size -= ofs;
3938             }
3939
3940             ofs2 = ps->input_offset2 - ofs - 4;
3941             if (ofs2 < 4)
3942                 ofs2 = 4;
3943
3944             ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
3945                 scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
3946             if (ofs < 0) {
3947                 // Assume the whole NAL unit is present if end-of-stream
3948                 // or stream buffers aligned on access unit boundaries
3949                 if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
3950                     at_au_end = TRUE;
3951                 else if (!at_eos) {
3952                     ps->input_offset2 = size;
3953                     return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3954                 }
3955                 ofs = size;
3956             }
3957             buf_size = ofs;
3958         }
3959     }
3960     ps->input_offset2 = 0;
3961
3962     buf = (guchar *)gst_adapter_map(adapter, buf_size);
3963     if (!buf)
3964         return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
3965
3966     unit->size = buf_size;
3967
3968     pi = gst_vaapi_parser_info_h264_new();
3969     if (!pi)
3970         return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
3971
3972     gst_vaapi_decoder_unit_set_parsed_info(unit,
3973         pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
3974
3975     if (priv->is_avcC)
3976         result = gst_h264_parser_identify_nalu_avc(priv->parser,
3977             buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
3978     else
3979         result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
3980             buf, 0, buf_size, &pi->nalu);
3981     status = get_status(result);
3982     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
3983         return status;
3984
3985     switch (pi->nalu.type) {
3986     case GST_H264_NAL_SPS:
3987         status = parse_sps(decoder, unit);
3988         break;
3989     case GST_H264_NAL_SUBSET_SPS:
3990         status = parse_subset_sps(decoder, unit);
3991         break;
3992     case GST_H264_NAL_PPS:
3993         status = parse_pps(decoder, unit);
3994         break;
3995     case GST_H264_NAL_SEI:
3996         status = parse_sei(decoder, unit);
3997         break;
3998     case GST_H264_NAL_SLICE_EXT:
3999         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4000             status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4001             break;
4002         }
4003         /* fall-through */
4004     case GST_H264_NAL_SLICE_IDR:
4005     case GST_H264_NAL_SLICE:
4006         status = parse_slice(decoder, unit);
4007         break;
4008     default:
4009         status = GST_VAAPI_DECODER_STATUS_SUCCESS;
4010         break;
4011     }
4012     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4013         return status;
4014
4015     flags = 0;
4016     if (at_au_end) {
4017         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
4018             GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4019     }
4020     switch (pi->nalu.type) {
4021     case GST_H264_NAL_AU_DELIMITER:
4022         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4023         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4024         /* fall-through */
4025     case GST_H264_NAL_FILLER_DATA:
4026         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4027         break;
4028     case GST_H264_NAL_STREAM_END:
4029         flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
4030         /* fall-through */
4031     case GST_H264_NAL_SEQ_END:
4032         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
4033         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4034         break;
4035     case GST_H264_NAL_SPS:
4036     case GST_H264_NAL_SUBSET_SPS:
4037     case GST_H264_NAL_PPS:
4038     case GST_H264_NAL_SEI:
4039         flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4040         flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4041         break;
4042     case GST_H264_NAL_SLICE_EXT:
4043         if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
4044             flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4045             break;
4046         }
4047         /* fall-through */
4048     case GST_H264_NAL_SLICE_IDR:
4049     case GST_H264_NAL_SLICE:
4050         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
4051         if (priv->prev_pi &&
4052             (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
4053             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4054                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4055         }
4056         else if (is_new_picture(pi, priv->prev_slice_pi)) {
4057             flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4058             if (is_new_access_unit(pi, priv->prev_slice_pi))
4059                 flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
4060         }
4061         gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
4062         break;
4063     case GST_H264_NAL_SPS_EXT:
4064     case GST_H264_NAL_SLICE_AUX:
4065         /* skip SPS extension and auxiliary slice for now */
4066         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
4067         break;
4068     case GST_H264_NAL_PREFIX_UNIT:
4069         /* skip Prefix NAL units for now */
4070         flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
4071             GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4072             GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4073         break;
4074     default:
4075         if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
4076             flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
4077                 GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
4078         break;
4079     }
4080     if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
4081         priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
4082     GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
4083
4084     pi->nalu.data = NULL;
4085     pi->state = priv->parser_state;
4086     pi->flags = flags;
4087     gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
4088     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4089 }
4090
4091 static GstVaapiDecoderStatus
4092 gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
4093     GstVaapiDecoderUnit *unit)
4094 {
4095     GstVaapiDecoderH264 * const decoder =
4096         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4097     GstVaapiDecoderStatus status;
4098
4099     status = ensure_decoder(decoder);
4100     if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
4101         return status;
4102     return decode_unit(decoder, unit);
4103 }
4104
4105 static GstVaapiDecoderStatus
4106 gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
4107     GstVaapiDecoderUnit *unit)
4108 {
4109     GstVaapiDecoderH264 * const decoder =
4110         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4111
4112     return decode_picture(decoder, unit);
4113 }
4114
4115 static GstVaapiDecoderStatus
4116 gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
4117 {
4118     GstVaapiDecoderH264 * const decoder =
4119         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4120
4121     return decode_current_picture(decoder);
4122 }
4123
4124 static GstVaapiDecoderStatus
4125 gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
4126 {
4127     GstVaapiDecoderH264 * const decoder =
4128         GST_VAAPI_DECODER_H264_CAST(base_decoder);
4129
4130     dpb_flush(decoder, NULL);
4131     return GST_VAAPI_DECODER_STATUS_SUCCESS;
4132 }
4133
4134 static void
4135 gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
4136 {
4137     GstVaapiMiniObjectClass * const object_class =
4138         GST_VAAPI_MINI_OBJECT_CLASS(klass);
4139     GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
4140
4141     object_class->size          = sizeof(GstVaapiDecoderH264);
4142     object_class->finalize      = (GDestroyNotify)gst_vaapi_decoder_finalize;
4143
4144     decoder_class->create       = gst_vaapi_decoder_h264_create;
4145     decoder_class->destroy      = gst_vaapi_decoder_h264_destroy;
4146     decoder_class->parse        = gst_vaapi_decoder_h264_parse;
4147     decoder_class->decode       = gst_vaapi_decoder_h264_decode;
4148     decoder_class->start_frame  = gst_vaapi_decoder_h264_start_frame;
4149     decoder_class->end_frame    = gst_vaapi_decoder_h264_end_frame;
4150     decoder_class->flush        = gst_vaapi_decoder_h264_flush;
4151
4152     decoder_class->decode_codec_data =
4153         gst_vaapi_decoder_h264_decode_codec_data;
4154 }
4155
4156 static inline const GstVaapiDecoderClass *
4157 gst_vaapi_decoder_h264_class(void)
4158 {
4159     static GstVaapiDecoderH264Class g_class;
4160     static gsize g_class_init = FALSE;
4161
4162     if (g_once_init_enter(&g_class_init)) {
4163         gst_vaapi_decoder_h264_class_init(&g_class);
4164         g_once_init_leave(&g_class_init, TRUE);
4165     }
4166     return GST_VAAPI_DECODER_CLASS(&g_class);
4167 }
4168
4169 /**
4170  * gst_vaapi_decoder_h264_set_alignment:
4171  * @decoder: a #GstVaapiDecoderH264
4172  * @alignment: the #GstVaapiStreamAlignH264
4173  *
4174  * Specifies how stream buffers are aligned / fed, i.e. the boundaries
4175  * of each buffer that is supplied to the decoder. This could be no
4176  * specific alignment, NAL unit boundaries, or access unit boundaries.
4177  */
4178 void
4179 gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
4180     GstVaapiStreamAlignH264 alignment)
4181 {
4182     g_return_if_fail(decoder != NULL);
4183
4184     decoder->priv.stream_alignment = alignment;
4185 }
4186
4187 /**
4188  * gst_vaapi_decoder_h264_new:
4189  * @display: a #GstVaapiDisplay
4190  * @caps: a #GstCaps holding codec information
4191  *
4192  * Creates a new #GstVaapiDecoder for MPEG-2 decoding.  The @caps can
4193  * hold extra information like codec-data and pictured coded size.
4194  *
4195  * Return value: the newly allocated #GstVaapiDecoder object
4196  */
4197 GstVaapiDecoder *
4198 gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
4199 {
4200     return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
4201 }