/*
* gstvaapidecoder_h264.c - H.264 decoder
*
- * Copyright (C) 2011-2012 Intel Corporation
+ * Copyright (C) 2011-2014 Intel Corporation
+ * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
#include "gstvaapidecoder_priv.h"
#include "gstvaapidisplay_priv.h"
#include "gstvaapiobject_priv.h"
+#include "gstvaapiutils_h264_priv.h"
#define DEBUG 1
#include "gstvaapidebug.h"
+/* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
+#define USE_STRICT_DPB_ORDERING 0
+
+typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
+typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
+typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
+typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
+typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
-typedef struct _GstVaapiPictureH264Class GstVaapiPictureH264Class;
-typedef struct _GstVaapiSliceH264 GstVaapiSliceH264;
-typedef struct _GstVaapiSliceH264Class GstVaapiSliceH264Class;
+
+// Used for field_poc[]
+#define TOP_FIELD 0
+#define BOTTOM_FIELD 1
+
+/* ------------------------------------------------------------------------- */
+/* --- H.264 Parser Info --- */
+/* ------------------------------------------------------------------------- */
+
+/*
+ * Extended decoder unit flags:
+ *
+ * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
+ * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
+ */
+enum {
+ /* This flag does not strictly follow the definitions (7.4.1.2.3)
+ for detecting the start of an access unit as we are only
+ interested in knowing if the current slice is the first one or
+ the last one in the current access unit */
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
+ GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
+ GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
+
+ GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
+};
+
+#define GST_VAAPI_PARSER_INFO_H264(obj) \
+ ((GstVaapiParserInfoH264 *)(obj))
+
+struct _GstVaapiParserInfoH264 {
+ GstVaapiMiniObject parent_instance;
+ GstH264NalUnit nalu;
+ union {
+ GstH264SPS sps;
+ GstH264PPS pps;
+ GArray *sei;
+ GstH264SliceHdr slice_hdr;
+ } data;
+ guint state;
+ guint flags; // Same as decoder unit flags (persistent)
+ guint view_id; // View ID of slice
+ guint voc; // View order index (VOIdx) of slice
+};
+
+static void
+gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
+{
+ switch (pi->nalu.type) {
+ case GST_H264_NAL_SPS:
+ case GST_H264_NAL_SUBSET_SPS:
+ gst_h264_sps_clear(&pi->data.sps);
+ break;
+ case GST_H264_NAL_SEI:
+ if (pi->data.sei) {
+ g_array_unref(pi->data.sei);
+ pi->data.sei = NULL;
+ }
+ break;
+ }
+}
+
+static inline const GstVaapiMiniObjectClass *
+gst_vaapi_parser_info_h264_class(void)
+{
+ static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
+ .size = sizeof(GstVaapiParserInfoH264),
+ .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
+ };
+ return &GstVaapiParserInfoH264Class;
+}
+
+static inline GstVaapiParserInfoH264 *
+gst_vaapi_parser_info_h264_new(void)
+{
+ return (GstVaapiParserInfoH264 *)
+ gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
+}
+
+#define gst_vaapi_parser_info_h264_ref(pi) \
+ gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
+
+#define gst_vaapi_parser_info_h264_unref(pi) \
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
+
+#define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
+ gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
+ (GstVaapiMiniObject *)(new_pi))
/* ------------------------------------------------------------------------- */
/* --- H.264 Pictures --- */
/* ------------------------------------------------------------------------- */
-#define GST_VAAPI_TYPE_PICTURE_H264 \
- (gst_vaapi_picture_h264_get_type())
+/*
+ * Extended picture flags:
+ *
+ * @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
+ * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
+ * may be used for inter-view prediction
+ * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
+ * i.e. a picture that is decoded with only inter-view prediction,
+ * and not inter prediction
+ * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
+ * access unit (AU)
+ * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
+ * access unit (AU)
+ * @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
+ * "used for short-term reference"
+ * @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
+ * "used for long-term reference"
+ * @GST_VAAPI_PICTURE_FLAGS_REFERENCE: mask covering any kind of
+ * reference picture (short-term reference or long-term reference)
+ */
+enum {
+ GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
+ GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
+ GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
+ GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
+ GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
+
+ GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
+ GST_VAAPI_PICTURE_FLAG_REFERENCE),
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
+ GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
+ GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
+ GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
+};
-#define GST_VAAPI_PICTURE_H264_CAST(obj) \
- ((GstVaapiPictureH264 *)(obj))
+#define GST_VAAPI_PICTURE_IS_IDR(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR))
-#define GST_VAAPI_PICTURE_H264(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj), \
- GST_VAAPI_TYPE_PICTURE_H264, \
- GstVaapiPictureH264))
+#define GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture) \
+ ((GST_VAAPI_PICTURE_FLAGS(picture) & \
+ GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
+ GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE)
-#define GST_VAAPI_PICTURE_H264_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass), \
- GST_VAAPI_TYPE_PICTURE_H264, \
- GstVaapiPictureH264Class))
+#define GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture) \
+ ((GST_VAAPI_PICTURE_FLAGS(picture) & \
+ GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
-#define GST_VAAPI_IS_PICTURE_H264(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_VAAPI_TYPE_PICTURE_H264))
+#define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
-#define GST_VAAPI_IS_PICTURE_H264_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass), GST_VAAPI_TYPE_PICTURE_H264))
+#define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
-#define GST_VAAPI_PICTURE_H264_GET_CLASS(obj) \
- (G_TYPE_INSTANCE_GET_CLASS((obj), \
- GST_VAAPI_TYPE_PICTURE_H264, \
- GstVaapiPictureH264Class))
+#define GST_VAAPI_PICTURE_H264(picture) \
+ ((GstVaapiPictureH264 *)(picture))
struct _GstVaapiPictureH264 {
GstVaapiPicture base;
- VAPictureH264 info;
- gint32 poc;
+ GstH264SliceHdr *last_slice_hdr;
+ guint structure;
+ gint32 field_poc[2];
gint32 frame_num; // Original frame_num from slice_header()
gint32 frame_num_wrap; // Temporary for ref pic marking: FrameNumWrap
+ gint32 long_term_frame_idx; // Temporary for ref pic marking: LongTermFrameIdx
gint32 pic_num; // Temporary for ref pic marking: PicNum
gint32 long_term_pic_num; // Temporary for ref pic marking: LongTermPicNum
- guint is_idr : 1;
- guint is_long_term : 1;
- guint field_pic_flag : 1;
- guint bottom_field_flag : 1;
- guint has_mmco_5 : 1;
+ GstVaapiPictureH264 *other_field; // Temporary for ref pic marking: other field in the same frame store
guint output_flag : 1;
guint output_needed : 1;
};
-struct _GstVaapiPictureH264Class {
- /*< private >*/
- GstVaapiPictureClass parent_class;
-};
+GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264, gst_vaapi_picture_h264);
-GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiPictureH264,
- gst_vaapi_picture_h264,
- GST_VAAPI_TYPE_PICTURE)
-
-static void
-gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *decoder)
+void
+gst_vaapi_picture_h264_destroy(GstVaapiPictureH264 *picture)
{
+ gst_vaapi_picture_destroy(GST_VAAPI_PICTURE(picture));
}
-static gboolean
+gboolean
gst_vaapi_picture_h264_create(
GstVaapiPictureH264 *picture,
const GstVaapiCodecObjectConstructorArgs *args
)
{
+ if (!gst_vaapi_picture_create(GST_VAAPI_PICTURE(picture), args))
+ return FALSE;
+
+ picture->field_poc[0] = G_MAXINT32;
+ picture->field_poc[1] = G_MAXINT32;
+ picture->output_needed = FALSE;
return TRUE;
}
-static void
-gst_vaapi_picture_h264_init(GstVaapiPictureH264 *picture)
+static inline GstVaapiPictureH264 *
+gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
{
- VAPictureH264 *va_pic;
-
- va_pic = &picture->info;
- va_pic->flags = 0;
- va_pic->TopFieldOrderCnt = 0;
- va_pic->BottomFieldOrderCnt = 0;
+ return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
+ &GstVaapiPictureH264Class,
+ GST_VAAPI_CODEC_BASE(decoder),
+ NULL, sizeof(VAPictureParameterBufferH264),
+ NULL, 0,
+ 0);
+}
- picture->poc = 0;
- picture->is_long_term = FALSE;
- picture->is_idr = FALSE;
- picture->has_mmco_5 = FALSE;
- picture->output_needed = FALSE;
+static inline void
+gst_vaapi_picture_h264_set_reference(
+ GstVaapiPictureH264 *picture,
+ guint reference_flags,
+ gboolean other_field
+)
+{
+ if (!picture)
+ return;
+ GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
+ GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
+
+ if (!other_field || !(picture = picture->other_field))
+ return;
+ GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
+ GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
}
static inline GstVaapiPictureH264 *
-gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
+gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
{
- GstVaapiCodecObject *object;
-
- g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), NULL);
+ g_return_val_if_fail(picture, NULL);
- object = gst_vaapi_codec_object_new(
- GST_VAAPI_TYPE_PICTURE_H264,
- GST_VAAPI_CODEC_BASE(decoder),
- NULL, sizeof(VAPictureParameterBufferH264),
- NULL, 0
- );
- if (!object)
- return NULL;
- return GST_VAAPI_PICTURE_H264_CAST(object);
+ return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
}
/* ------------------------------------------------------------------------- */
-/* --- Slices --- */
+/* --- Frame Buffers (DPB) --- */
/* ------------------------------------------------------------------------- */
-#define GST_VAAPI_TYPE_SLICE_H264 \
- (gst_vaapi_slice_h264_get_type())
+struct _GstVaapiFrameStore {
+ /*< private >*/
+ GstVaapiMiniObject parent_instance;
-#define GST_VAAPI_SLICE_H264_CAST(obj) \
- ((GstVaapiSliceH264 *)(obj))
+ guint view_id;
+ guint structure;
+ GstVaapiPictureH264 *buffers[2];
+ guint num_buffers;
+ guint output_needed;
+};
-#define GST_VAAPI_SLICE_H264(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj), \
- GST_VAAPI_TYPE_SLICE_H264, \
- GstVaapiSliceH264))
+static void
+gst_vaapi_frame_store_finalize(gpointer object)
+{
+ GstVaapiFrameStore * const fs = object;
+ guint i;
-#define GST_VAAPI_SLICE_H264_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass), \
- GST_VAAPI_TYPE_SLICE_H264, \
- GstVaapiSliceH264Class))
+ for (i = 0; i < fs->num_buffers; i++)
+ gst_vaapi_picture_replace(&fs->buffers[i], NULL);
+}
-#define GST_VAAPI_IS_SLICE_H264(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_VAAPI_TYPE_SLICE_H264))
+static GstVaapiFrameStore *
+gst_vaapi_frame_store_new(GstVaapiPictureH264 *picture)
+{
+ GstVaapiFrameStore *fs;
-#define GST_VAAPI_IS_SLICE_H264_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass), GST_VAAPI_TYPE_SLICE_H264))
+ static const GstVaapiMiniObjectClass GstVaapiFrameStoreClass = {
+ sizeof(GstVaapiFrameStore),
+ gst_vaapi_frame_store_finalize
+ };
-#define GST_VAAPI_SLICE_H264_GET_CLASS(obj) \
- (G_TYPE_INSTANCE_GET_CLASS((obj), \
- GST_VAAPI_TYPE_SLICE_H264, \
- GstVaapiSliceH264Class))
+ fs = (GstVaapiFrameStore *)
+ gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
+ if (!fs)
+ return NULL;
-struct _GstVaapiSliceH264 {
- GstVaapiSlice base;
- GstH264SliceHdr slice_hdr; // parsed slice_header()
-};
+ fs->view_id = picture->base.view_id;
+ fs->structure = picture->structure;
+ fs->buffers[0] = gst_vaapi_picture_ref(picture);
+ fs->buffers[1] = NULL;
+ fs->num_buffers = 1;
+ fs->output_needed = picture->output_needed;
+ return fs;
+}
-struct _GstVaapiSliceH264Class {
- /*< private >*/
- GstVaapiSliceClass parent_class;
-};
+static gboolean
+gst_vaapi_frame_store_add(GstVaapiFrameStore *fs, GstVaapiPictureH264 *picture)
+{
+ guint field;
-GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiSliceH264,
- gst_vaapi_slice_h264,
- GST_VAAPI_TYPE_SLICE)
+ g_return_val_if_fail(fs->num_buffers == 1, FALSE);
+ g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
+ g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
-static void
-gst_vaapi_slice_h264_destroy(GstVaapiSliceH264 *slice)
-{
+ gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
+ if (picture->output_flag) {
+ picture->output_needed = TRUE;
+ fs->output_needed++;
+ }
+
+ fs->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+
+ field = picture->structure == GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD ?
+ TOP_FIELD : BOTTOM_FIELD;
+ g_return_val_if_fail(fs->buffers[0]->field_poc[field] == G_MAXINT32, FALSE);
+ fs->buffers[0]->field_poc[field] = picture->field_poc[field];
+ g_return_val_if_fail(picture->field_poc[!field] == G_MAXINT32, FALSE);
+ picture->field_poc[!field] = fs->buffers[0]->field_poc[!field];
+ return TRUE;
}
static gboolean
-gst_vaapi_slice_h264_create(
- GstVaapiSliceH264 *slice,
- const GstVaapiCodecObjectConstructorArgs *args
-)
+gst_vaapi_frame_store_split_fields(GstVaapiFrameStore *fs)
{
+ GstVaapiPictureH264 * const first_field = fs->buffers[0];
+ GstVaapiPictureH264 *second_field;
+
+ g_return_val_if_fail(fs->num_buffers == 1, FALSE);
+
+ first_field->base.structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
+ GST_VAAPI_PICTURE_FLAG_SET(first_field, GST_VAAPI_PICTURE_FLAG_INTERLACED);
+
+ second_field = gst_vaapi_picture_h264_new_field(first_field);
+ if (!second_field)
+ return FALSE;
+ gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], second_field);
+ gst_vaapi_picture_unref(second_field);
+
+ second_field->frame_num = first_field->frame_num;
+ second_field->field_poc[0] = first_field->field_poc[0];
+ second_field->field_poc[1] = first_field->field_poc[1];
+ second_field->output_flag = first_field->output_flag;
+ if (second_field->output_flag) {
+ second_field->output_needed = TRUE;
+ fs->output_needed++;
+ }
return TRUE;
}
-static void
-gst_vaapi_slice_h264_init(GstVaapiSliceH264 *slice)
+static inline gboolean
+gst_vaapi_frame_store_has_frame(GstVaapiFrameStore *fs)
{
+ return fs->structure == GST_VAAPI_PICTURE_STRUCTURE_FRAME;
}
-static inline GstVaapiSliceH264 *
-gst_vaapi_slice_h264_new(
- GstVaapiDecoderH264 *decoder,
- const guint8 *data,
- guint data_size
-)
+static inline gboolean
+gst_vaapi_frame_store_has_reference(GstVaapiFrameStore *fs)
{
- GstVaapiCodecObject *object;
+ guint i;
- g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), NULL);
+ for (i = 0; i < fs->num_buffers; i++) {
+ if (GST_VAAPI_PICTURE_IS_REFERENCE(fs->buffers[i]))
+ return TRUE;
+ }
+ return FALSE;
+}
- object = gst_vaapi_codec_object_new(
- GST_VAAPI_TYPE_SLICE_H264,
- GST_VAAPI_CODEC_BASE(decoder),
- NULL, sizeof(VASliceParameterBufferH264),
- data, data_size
- );
- if (!object)
- return NULL;
- return GST_VAAPI_SLICE_H264_CAST(object);
+static gboolean
+gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
+{
+ guint i;
+
+ for (i = 0; i < fs->num_buffers; i++) {
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
+ return TRUE;
+ }
+ return FALSE;
}
+#define gst_vaapi_frame_store_ref(fs) \
+ gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
+
+#define gst_vaapi_frame_store_unref(fs) \
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(fs))
+
+#define gst_vaapi_frame_store_replace(old_fs_p, new_fs) \
+ gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_fs_p), \
+ (GstVaapiMiniObject *)(new_fs))
+
/* ------------------------------------------------------------------------- */
/* --- H.264 Decoder --- */
/* ------------------------------------------------------------------------- */
-G_DEFINE_TYPE(GstVaapiDecoderH264,
- gst_vaapi_decoder_h264,
- GST_VAAPI_TYPE_DECODER)
+#define GST_VAAPI_DECODER_H264_CAST(decoder) \
+ ((GstVaapiDecoderH264 *)(decoder))
-#define GST_VAAPI_DECODER_H264_GET_PRIVATE(obj) \
- (G_TYPE_INSTANCE_GET_PRIVATE((obj), \
- GST_VAAPI_TYPE_DECODER_H264, \
- GstVaapiDecoderH264Private))
+typedef enum {
+ GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
+ GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
+ GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
-// Used for field_poc[]
-#define TOP_FIELD 0
-#define BOTTOM_FIELD 1
+ GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
+ GST_H264_VIDEO_STATE_GOT_SPS |
+ GST_H264_VIDEO_STATE_GOT_PPS),
+ GST_H264_VIDEO_STATE_VALID_PICTURE = (
+ GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
+ GST_H264_VIDEO_STATE_GOT_SLICE)
+} GstH264VideoState;
struct _GstVaapiDecoderH264Private {
- GstAdapter *adapter;
GstH264NalParser *parser;
- GstH264SPS *sps;
- GstH264SPS last_sps;
- GstH264PPS *pps;
- GstH264PPS last_pps;
+ guint parser_state;
+ guint decoder_state;
+ GstVaapiStreamAlignH264 stream_alignment;
GstVaapiPictureH264 *current_picture;
- GstVaapiPictureH264 *dpb[16];
+ GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
+ GstVaapiParserInfoH264 *active_sps;
+ GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
+ GstVaapiParserInfoH264 *active_pps;
+ GstVaapiParserInfoH264 *prev_pi;
+ GstVaapiParserInfoH264 *prev_slice_pi;
+ GstVaapiFrameStore **prev_frames;
+ guint prev_frames_alloc;
+ GstVaapiFrameStore **dpb;
guint dpb_count;
guint dpb_size;
+ guint dpb_size_max;
+ guint max_views;
GstVaapiProfile profile;
+ GstVaapiEntrypoint entrypoint;
+ GstVaapiChromaType chroma_type;
+ GPtrArray *inter_views;
GstVaapiPictureH264 *short_ref[32];
guint short_ref_count;
GstVaapiPictureH264 *long_ref[32];
GstVaapiPictureH264 *RefPicList1[32];
guint RefPicList1_count;
guint nal_length_size;
- guint width;
- guint height;
- guint mb_x;
- guint mb_y;
guint mb_width;
guint mb_height;
- guint8 scaling_list_4x4[6][16];
- guint8 scaling_list_8x8[6][64];
gint32 field_poc[2]; // 0:TopFieldOrderCnt / 1:BottomFieldOrderCnt
gint32 poc_msb; // PicOrderCntMsb
gint32 poc_lsb; // pic_order_cnt_lsb (from slice_header())
gint32 prev_poc_msb; // prevPicOrderCntMsb
gint32 prev_poc_lsb; // prevPicOrderCntLsb
gint32 frame_num_offset; // FrameNumOffset
- gint32 prev_frame_num_offset; // prevFrameNumOffset
gint32 frame_num; // frame_num (from slice_header())
gint32 prev_frame_num; // prevFrameNum
- guint is_constructed : 1;
+ gboolean prev_pic_has_mmco5; // prevMmco5Pic
+ gboolean prev_pic_structure; // previous picture structure
guint is_opened : 1;
- guint is_avc : 1;
+ guint is_avcC : 1;
guint has_context : 1;
+ guint progressive_sequence : 1;
+};
+
+/**
+ * GstVaapiDecoderH264:
+ *
+ * A decoder based on H264.
+ */
+struct _GstVaapiDecoderH264 {
+ /*< private >*/
+ GstVaapiDecoder parent_instance;
+ GstVaapiDecoderH264Private priv;
+};
+
+/**
+ * GstVaapiDecoderH264Class:
+ *
+ * A decoder class based on H264.
+ */
+struct _GstVaapiDecoderH264Class {
+ /*< private >*/
+ GstVaapiDecoderClass parent_class;
};
static gboolean
-decode_picture_end(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
+exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
-static void
-clear_references(
- GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 **pictures,
- guint *picture_count
-);
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture);
+
+static inline gboolean
+is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
+ GstVaapiFrameStore *fs)
+{
+ return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
+}
+
+/* Determines if the supplied profile is one of the MVC set */
+static gboolean
+is_mvc_profile(GstH264Profile profile)
+{
+ return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
+ profile == GST_H264_PROFILE_STEREO_HIGH;
+}
+
+/* Determines the view_id from the supplied NAL unit */
+static inline guint
+get_view_id(GstH264NalUnit *nalu)
+{
+ return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
+}
+
+/* Determines the view order index (VOIdx) from the supplied view_id */
+static gint
+get_view_order_index(GstH264SPS *sps, guint16 view_id)
+{
+ GstH264SPSExtMVC *mvc;
+ gint i;
+
+ if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return 0;
+
+ mvc = &sps->extension.mvc;
+ for (i = 0; i <= mvc->num_views_minus1; i++) {
+ if (mvc->view[i].view_id == view_id)
+ return i;
+ }
+ GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
+ return -1;
+}
+
+/* Determines NumViews */
+static guint
+get_num_views(GstH264SPS *sps)
+{
+ return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
+ sps->extension.mvc.num_views_minus1 : 0);
+}
/* Get number of reference frames to use */
static guint
get_max_dec_frame_buffering(GstH264SPS *sps)
{
- guint max_dec_frame_buffering, MaxDpbMbs, PicSizeMbs;
+ guint num_views, max_dpb_frames;
+ guint max_dec_frame_buffering, PicSizeMbs;
+ GstVaapiLevelH264 level;
+ const GstVaapiH264LevelLimits *level_limits;
/* Table A-1 - Level limits */
- switch (sps->level_idc) {
- case 10: MaxDpbMbs = 396; break;
- case 11: MaxDpbMbs = 900; break;
- case 12: MaxDpbMbs = 2376; break;
- case 13: MaxDpbMbs = 2376; break;
- case 20: MaxDpbMbs = 2376; break;
- case 21: MaxDpbMbs = 4752; break;
- case 22: MaxDpbMbs = 8100; break;
- case 30: MaxDpbMbs = 8100; break;
- case 31: MaxDpbMbs = 18000; break;
- case 32: MaxDpbMbs = 20480; break;
- case 40: MaxDpbMbs = 32768; break;
- case 41: MaxDpbMbs = 32768; break;
- case 42: MaxDpbMbs = 34816; break;
- case 50: MaxDpbMbs = 110400; break;
- case 51: MaxDpbMbs = 184320; break;
- default:
- g_assert(0 && "unhandled level");
- break;
+ if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
+ level = GST_VAAPI_LEVEL_H264_L1b;
+ else
+ level = gst_vaapi_utils_h264_get_level(sps->level_idc);
+ level_limits = gst_vaapi_utils_h264_get_level_limits(level);
+ if (G_UNLIKELY(!level_limits)) {
+ GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
+ max_dec_frame_buffering = 16;
}
-
- PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
- (sps->pic_height_in_map_units_minus1 + 1) *
- (sps->frame_mbs_only_flag ? 1 : 2));
- max_dec_frame_buffering = MaxDpbMbs / PicSizeMbs;
+ else {
+ PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
+ (sps->pic_height_in_map_units_minus1 + 1) *
+ (sps->frame_mbs_only_flag ? 1 : 2));
+ max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
+ }
+ if (is_mvc_profile(sps->profile_idc))
+ max_dec_frame_buffering <<= 1;
/* VUI parameters */
if (sps->vui_parameters_present_flag) {
else {
switch (sps->profile_idc) {
case 44: // CAVLC 4:4:4 Intra profile
- case 86: // Scalable High profile
- case 100: // High profile
- case 110: // High 10 profile
- case 122: // High 4:2:2 profile
- case 244: // High 4:4:4 Predictive profile
+ case GST_H264_PROFILE_SCALABLE_HIGH:
+ case GST_H264_PROFILE_HIGH:
+ case GST_H264_PROFILE_HIGH10:
+ case GST_H264_PROFILE_HIGH_422:
+ case GST_H264_PROFILE_HIGH_444:
if (sps->constraint_set3_flag)
max_dec_frame_buffering = 0;
break;
}
}
- if (max_dec_frame_buffering > 16)
- max_dec_frame_buffering = 16;
+ num_views = get_num_views(sps);
+ max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
+ if (max_dec_frame_buffering > max_dpb_frames)
+ max_dec_frame_buffering = max_dpb_frames;
else if (max_dec_frame_buffering < sps->num_ref_frames)
max_dec_frame_buffering = sps->num_ref_frames;
return MAX(1, max_dec_frame_buffering);
}
static void
+array_remove_index_fast(void *array, guint *array_length_ptr, guint index)
+{
+ gpointer * const entries = array;
+ guint num_entries = *array_length_ptr;
+
+ g_return_if_fail(index < num_entries);
+
+ if (index != --num_entries)
+ entries[index] = entries[num_entries];
+ entries[num_entries] = NULL;
+ *array_length_ptr = num_entries;
+}
+
+#if 1
+static inline void
+array_remove_index(void *array, guint *array_length_ptr, guint index)
+{
+ array_remove_index_fast(array, array_length_ptr, index);
+}
+#else
+static void
+array_remove_index(void *array, guint *array_length_ptr, guint index)
+{
+ gpointer * const entries = array;
+ const guint num_entries = *array_length_ptr - 1;
+ guint i;
+
+ g_return_if_fail(index <= num_entries);
+
+ for (i = index; i < num_entries; i++)
+ entries[i] = entries[i + 1];
+ entries[num_entries] = NULL;
+ *array_length_ptr = num_entries;
+}
+#endif
+
+#define ARRAY_REMOVE_INDEX(array, index) \
+ array_remove_index(array, &array##_count, index)
+
+static void
dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- guint num_pictures = --priv->dpb_count;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i, num_frames = --priv->dpb_count;
- if (index != num_pictures)
- gst_vaapi_picture_replace(&priv->dpb[index], priv->dpb[num_pictures]);
- gst_vaapi_picture_replace(&priv->dpb[num_pictures], NULL);
+ if (USE_STRICT_DPB_ORDERING) {
+ for (i = index; i < num_frames; i++)
+ gst_vaapi_frame_store_replace(&priv->dpb[i], priv->dpb[i + 1]);
+ }
+ else if (index != num_frames)
+ gst_vaapi_frame_store_replace(&priv->dpb[index], priv->dpb[num_frames]);
+ gst_vaapi_frame_store_replace(&priv->dpb[num_frames], NULL);
}
-static inline gboolean
-dpb_output(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+static gboolean
+dpb_output(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiFrameStore *fs,
+ GstVaapiPictureH264 *picture
+)
{
- /* XXX: update cropping rectangle */
picture->output_needed = FALSE;
+
+ if (fs) {
+ if (--fs->output_needed > 0)
+ return TRUE;
+ picture = fs->buffers[0];
+ }
return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
}
+static inline void
+dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+
+ if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
+ dpb_remove_index(decoder, i);
+}
+
+/* Finds the frame store holding the supplied picture */
+static gint
+dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ gint i, j;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ for (j = 0; j < fs->num_buffers; j++) {
+ if (fs->buffers[j] == picture)
+ return i;
+ }
+ }
+ return -1;
+}
+
+/* Finds the picture with the lowest POC that needs to be output */
+static gint
+dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 **found_picture_ptr)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture = NULL;
+ guint i, j, found_index;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (!fs->output_needed)
+ continue;
+ if (picture && picture->base.view_id != fs->view_id)
+ continue;
+ for (j = 0; j < fs->num_buffers; j++) {
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (!pic->output_needed)
+ continue;
+ if (!found_picture || found_picture->base.poc > pic->base.poc ||
+ (found_picture->base.poc == pic->base.poc &&
+ found_picture->base.voc > pic->base.voc))
+ found_picture = pic, found_index = i;
+ }
+ }
+
+ if (found_picture_ptr)
+ *found_picture_ptr = found_picture;
+ return found_picture ? found_index : -1;
+}
+
+/* Finds the picture with the lowest VOC that needs to be output */
+static gint
+dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 **found_picture_ptr)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture = NULL;
+ guint i, j, found_index;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (!fs->output_needed || fs->view_id == picture->base.view_id)
+ continue;
+ for (j = 0; j < fs->num_buffers; j++) {
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (!pic->output_needed || pic->base.poc != picture->base.poc)
+ continue;
+ if (!found_picture || found_picture->base.voc > pic->base.voc)
+ found_picture = pic, found_index = i;
+ }
+ }
+
+ if (found_picture_ptr)
+ *found_picture_ptr = found_picture;
+ return found_picture ? found_index : -1;
+}
+
static gboolean
-dpb_bump(GstVaapiDecoderH264 *decoder)
+dpb_output_other_views(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, guint voc)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- guint i, lowest_poc_index;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
gboolean success;
- for (i = 0; i < priv->dpb_count; i++) {
- if (priv->dpb[i]->output_needed)
+ if (priv->max_views == 1)
+ return TRUE;
+
+ /* Emit all other view components that were in the same access
+ unit than the picture we have just found */
+ found_picture = picture;
+ for (;;) {
+ found_index = dpb_find_lowest_voc(decoder, found_picture,
+ &found_picture);
+ if (found_index < 0 || found_picture->base.voc >= voc)
break;
+ success = dpb_output(decoder, priv->dpb[found_index], found_picture);
+ dpb_evict(decoder, found_picture, found_index);
+ if (!success)
+ return FALSE;
}
- if (i == priv->dpb_count)
+ return TRUE;
+}
+
+static gboolean
+dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
+ gboolean success;
+
+ found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
+ if (found_index < 0)
return FALSE;
- lowest_poc_index = i++;
- for (; i < priv->dpb_count; i++) {
- GstVaapiPictureH264 * const picture = priv->dpb[i];
- if (picture->output_needed && picture->poc < priv->dpb[lowest_poc_index]->poc)
- lowest_poc_index = i;
- }
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
+
+ success = dpb_output(decoder, priv->dpb[found_index], found_picture);
+ dpb_evict(decoder, found_picture, found_index);
+ if (priv->max_views == 1)
+ return success;
- success = dpb_output(decoder, priv->dpb[lowest_poc_index]);
- if (!GST_VAAPI_PICTURE_IS_REFERENCE(priv->dpb[lowest_poc_index]))
- dpb_remove_index(decoder, lowest_poc_index);
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
return success;
}
static void
-dpb_flush(GstVaapiDecoderH264 *decoder)
+dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i, n;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ if (picture && picture->base.view_id != priv->dpb[i]->view_id)
+ continue;
+ gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
+ }
+
+ for (i = 0, n = 0; i < priv->dpb_count; i++) {
+ if (priv->dpb[i])
+ priv->dpb[n++] = priv->dpb[i];
+ }
+ priv->dpb_count = n;
+
+ /* Clear previous frame buffers only if this is a "flush-all" operation,
+ or if the picture is the first one in the access unit */
+ if (!picture || GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_AU_START)) {
+ for (i = 0; i < priv->max_views; i++)
+ gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
+ }
+}
- while (dpb_bump(decoder))
+static void
+dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ while (dpb_bump(decoder, picture))
;
- clear_references(decoder, priv->dpb, &priv->dpb_count);
+ dpb_clear(decoder, picture);
+}
+
+static void
+dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const gboolean is_last_picture = /* in the access unit */
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+ guint i;
+
+ // Remove all unused inter-view only reference components of the current AU
+ i = 0;
+ while (i < priv->dpb_count) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (fs->view_id != picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
+ (is_last_picture ||
+ !is_inter_view_reference_for_next_frames(decoder, fs)))
+ dpb_remove_index(decoder, i);
+ else
+ i++;
+ }
}
static gboolean
dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiFrameStore *fs;
guint i;
+ if (priv->max_views > 1)
+ dpb_prune_mvc(decoder, picture);
+
// Remove all unused pictures
- if (picture->is_idr)
- dpb_flush(decoder);
- else {
+ if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
i = 0;
while (i < priv->dpb_count) {
- GstVaapiPictureH264 * const picture = priv->dpb[i];
- if (!picture->output_needed &&
- !GST_VAAPI_PICTURE_IS_REFERENCE(picture))
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (fs->view_id == picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
dpb_remove_index(decoder, i);
else
i++;
}
}
+ // Check if picture is the second field and the first field is still in DPB
+ if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
+ !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
+ const gint found_index = dpb_find_picture(decoder,
+ GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
+ if (found_index >= 0)
+ return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
+ }
+
+ // Create new frame store, and split fields if necessary
+ fs = gst_vaapi_frame_store_new(picture);
+ if (!fs)
+ return FALSE;
+ gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
+ gst_vaapi_frame_store_unref(fs);
+
+ if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
+ if (!gst_vaapi_frame_store_split_fields(fs))
+ return FALSE;
+ }
+
// C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
while (priv->dpb_count == priv->dpb_size) {
- if (!dpb_bump(decoder))
+ if (!dpb_bump(decoder, picture))
return FALSE;
}
- gst_vaapi_picture_replace(&priv->dpb[priv->dpb_count++], picture);
- if (picture->output_flag)
- picture->output_needed = TRUE;
}
// C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
else {
- if (!picture->output_flag)
+ const gboolean StoreInterViewOnlyRefFlag =
+ !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_AU_END) &&
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
return TRUE;
while (priv->dpb_count == priv->dpb_size) {
- for (i = 0; i < priv->dpb_count; i++) {
- if (priv->dpb[i]->output_needed &&
- priv->dpb[i]->poc < picture->poc)
- break;
+ if (!StoreInterViewOnlyRefFlag) {
+ if (dpb_find_lowest_poc(decoder, picture, NULL) < 0)
+ return dpb_output(decoder, NULL, picture);
}
- if (i == priv->dpb_count)
- return dpb_output(decoder, picture);
- if (!dpb_bump(decoder))
+ if (!dpb_bump(decoder, picture))
return FALSE;
}
- gst_vaapi_picture_replace(&priv->dpb[priv->dpb_count++], picture);
+ }
+
+ gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
+ if (picture->output_flag) {
picture->output_needed = TRUE;
+ fs->output_needed++;
}
return TRUE;
}
-static inline void
-dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+static gboolean
+dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+
+ if (dpb_size < priv->dpb_count)
+ return FALSE;
+
+ if (dpb_size > priv->dpb_size_max) {
+ priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
+ if (!priv->dpb)
+ return FALSE;
+ memset(&priv->dpb[priv->dpb_size_max], 0,
+ (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
+ priv->dpb_size_max = dpb_size;
+ }
+
+ if (priv->dpb_size < dpb_size)
+ priv->dpb_size = dpb_size;
+ else if (dpb_size < priv->dpb_count)
+ return FALSE;
- priv->dpb_size = get_max_dec_frame_buffering(sps);
GST_DEBUG("DPB size %u", priv->dpb_size);
+ return TRUE;
+}
+
+static void
+unref_inter_view(GstVaapiPictureH264 *picture)
+{
+ if (!picture)
+ return;
+ GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ gst_vaapi_picture_unref(picture);
+}
+
+/* Resets MVC resources */
+static gboolean
+mvc_reset(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i;
+
+ // Resize array of inter-view references
+ if (!priv->inter_views) {
+ priv->inter_views = g_ptr_array_new_full(priv->max_views,
+ (GDestroyNotify)unref_inter_view);
+ if (!priv->inter_views)
+ return FALSE;
+ }
+
+ // Resize array of previous frame buffers
+ for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
+ gst_vaapi_picture_replace(&priv->prev_frames[i], NULL);
+
+ priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
+ sizeof(*priv->prev_frames));
+ if (!priv->prev_frames) {
+ priv->prev_frames_alloc = 0;
+ return FALSE;
+ }
+ for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
+ priv->prev_frames[i] = NULL;
+ priv->prev_frames_alloc = priv->max_views;
+ return TRUE;
}
static GstVaapiDecoderStatus
return status;
}
-static inline GstH264DecRefPicMarking *
-get_dec_ref_pic_marking(GstVaapiPictureH264 *picture_h264)
-{
- GstVaapiPicture * const picture = GST_VAAPI_PICTURE_CAST(picture_h264);
- GstVaapiSliceH264 *slice;
-
- slice = g_ptr_array_index(picture->slices, picture->slices->len - 1);
- return &slice->slice_hdr.dec_ref_pic_marking;
-}
-
static void
gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
- clear_references(decoder, priv->short_ref, &priv->short_ref_count);
- clear_references(decoder, priv->long_ref, &priv->long_ref_count );
- clear_references(decoder, priv->dpb, &priv->dpb_count );
+ gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
+
+ dpb_clear(decoder, NULL);
+
+ if (priv->inter_views) {
+ g_ptr_array_unref(priv->inter_views);
+ priv->inter_views = NULL;
+ }
if (priv->parser) {
gst_h264_nal_parser_free(priv->parser);
priv->parser = NULL;
}
-
- if (priv->adapter) {
- gst_adapter_clear(priv->adapter);
- g_object_unref(priv->adapter);
- priv->adapter = NULL;
- }
}
static gboolean
-gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder, GstBuffer *buffer)
+gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gst_vaapi_decoder_h264_close(decoder);
- priv->adapter = gst_adapter_new();
- if (!priv->adapter)
- return FALSE;
-
priv->parser = gst_h264_nal_parser_new();
if (!priv->parser)
return FALSE;
}
static void
-gst_vaapi_decoder_h264_destroy(GstVaapiDecoderH264 *decoder)
+gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
{
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i;
+
gst_vaapi_decoder_h264_close(decoder);
+
+ g_free(priv->dpb);
+ priv->dpb = NULL;
+ priv->dpb_size = 0;
+
+ g_free(priv->prev_frames);
+ priv->prev_frames = NULL;
+ priv->prev_frames_alloc = 0;
+
+ for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
+ gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
+
+ for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
+ gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
}
static gboolean
-gst_vaapi_decoder_h264_create(GstVaapiDecoderH264 *decoder)
+gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
{
- if (!GST_VAAPI_DECODER_CODEC(decoder))
- return FALSE;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+
+ priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
+ priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+ priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+ priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ priv->progressive_sequence = TRUE;
return TRUE;
}
-static GstVaapiDecoderStatus
-ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+/* Activates the supplied PPS */
+static GstH264PPS *
+ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiProfile profiles[2];
- GstVaapiEntrypoint entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
- guint i, n_profiles = 0;
- gboolean success, reset_context = FALSE;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
- if (!priv->has_context || priv->sps->profile_idc != sps->profile_idc) {
- GST_DEBUG("profile changed");
- reset_context = TRUE;
+ gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
+ return pi ? &pi->data.pps : NULL;
+}
- switch (sps->profile_idc) {
- case 66:
- profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_BASELINE;
- break;
- case 77:
- profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_MAIN;
- // fall-through
- case 100:
- profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
- break;
- default:
- GST_DEBUG("unsupported profile %d", sps->profile_idc);
- return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
+/* Returns the active PPS */
+static inline GstH264PPS *
+get_pps(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
+
+ return pi ? &pi->data.pps : NULL;
+}
+
+/* Activate the supplied SPS */
+static GstH264SPS *
+ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
+
+ gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
+ return pi ? &pi->data.sps : NULL;
+}
+
+/* Returns the active SPS */
+static inline GstH264SPS *
+get_sps(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
+
+ return pi ? &pi->data.sps : NULL;
+}
+
+static void
+fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
+ GstVaapiProfile profile)
+{
+ guint n_profiles = *n_profiles_ptr;
+
+ profiles[n_profiles++] = profile;
+ switch (profile) {
+ case GST_VAAPI_PROFILE_H264_MAIN:
+ profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
+ break;
+ default:
+ break;
+ }
+ *n_profiles_ptr = n_profiles;
+}
+
+/* Fills in compatible profiles for MVC decoding */
+static void
+fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
+ guint *n_profiles_ptr, guint dpb_size)
+{
+ const gchar * const vendor_string =
+ gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
+
+ gboolean add_high_profile = FALSE;
+ struct map {
+ const gchar *str;
+ guint str_len;
+ };
+ const struct map *m;
+
+ // Drivers that support slice level decoding
+ if (vendor_string && dpb_size <= 16) {
+ static const struct map drv_names[] = {
+ { "Intel i965 driver", 17 },
+ { NULL, 0 }
+ };
+ for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
+ if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
+ add_high_profile = TRUE;
}
+ }
- for (i = 0; i < n_profiles; i++) {
- success = gst_vaapi_display_has_decoder(
- GST_VAAPI_DECODER_DISPLAY(decoder),
- profiles[i],
- entrypoint
- );
- if (success)
- break;
+ if (add_high_profile)
+ fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
+}
+
+static GstVaapiProfile
+get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
+ GstVaapiProfile profile, profiles[4];
+ guint i, n_profiles = 0;
+
+ profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
+ if (!profile)
+ return GST_VAAPI_PROFILE_UNKNOWN;
+
+ fill_profiles(profiles, &n_profiles, profile);
+ switch (profile) {
+ case GST_VAAPI_PROFILE_H264_BASELINE:
+ if (sps->constraint_set1_flag) { // A.2.2 (main profile)
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MAIN);
}
- if (i == n_profiles)
- return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
- priv->profile = profiles[i];
+ break;
+ case GST_VAAPI_PROFILE_H264_EXTENDED:
+ if (sps->constraint_set1_flag) { // A.2.2 (main profile)
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MAIN);
+ }
+ break;
+ case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
+ if (priv->max_views == 2) {
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_STEREO_HIGH);
+ }
+ fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
+ break;
+ case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
+ if (sps->frame_mbs_only_flag) {
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
+ }
+ fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
+ break;
+ default:
+ break;
}
- if (!priv->has_context ||
- priv->sps->chroma_format_idc != sps->chroma_format_idc) {
- GST_DEBUG("chroma format changed");
+ /* If the preferred profile (profiles[0]) matches one that we already
+ found, then just return it now instead of searching for it again */
+ if (profiles[0] == priv->profile)
+ return priv->profile;
+
+ for (i = 0; i < n_profiles; i++) {
+ if (gst_vaapi_display_has_decoder(display, profiles[i], priv->entrypoint))
+ return profiles[i];
+ }
+ return GST_VAAPI_PROFILE_UNKNOWN;
+}
+
+static GstVaapiDecoderStatus
+ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+{
+ GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiContextInfo info;
+ GstVaapiProfile profile;
+ GstVaapiChromaType chroma_type;
+ gboolean reset_context = FALSE;
+ guint mb_width, mb_height, dpb_size;
+
+ dpb_size = get_max_dec_frame_buffering(sps);
+ if (priv->dpb_size < dpb_size) {
+ GST_DEBUG("DPB size increased");
reset_context = TRUE;
+ }
- /* XXX: theoritically, we could handle 4:2:2 format */
- if (sps->chroma_format_idc != 1)
- return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
+ profile = get_profile(decoder, sps, dpb_size);
+ if (!profile) {
+ GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
}
- if (!priv->has_context ||
- priv->sps->width != sps->width ||
- priv->sps->height != sps->height) {
- GST_DEBUG("size changed");
- reset_context = TRUE;
+ if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
+ GST_DEBUG("profile changed");
+ reset_context = TRUE;
+ priv->profile = profile;
+ }
- priv->width = sps->width;
- priv->height = sps->height;
- priv->mb_width = sps->pic_width_in_mbs_minus1 + 1;
- priv->mb_height = sps->pic_height_in_map_units_minus1 + 1;
- priv->mb_height *= 2 - sps->frame_mbs_only_flag;
+ chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
+ if (!chroma_type) {
+ GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
+ }
+
+ if (priv->chroma_type != chroma_type) {
+ GST_DEBUG("chroma format changed");
+ reset_context = TRUE;
+ priv->chroma_type = chroma_type;
}
- if (reset_context) {
- GstVaapiContextInfo info;
+ mb_width = sps->pic_width_in_mbs_minus1 + 1;
+ mb_height = (sps->pic_height_in_map_units_minus1 + 1) <<
+ !sps->frame_mbs_only_flag;
+ if (priv->mb_width != mb_width || priv->mb_height != mb_height) {
+ GST_DEBUG("size changed");
+ reset_context = TRUE;
+ priv->mb_width = mb_width;
+ priv->mb_height = mb_height;
+ }
+
+ priv->progressive_sequence = sps->frame_mbs_only_flag;
+ gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
+
+ gst_vaapi_decoder_set_pixel_aspect_ratio(
+ base_decoder,
+ sps->vui_parameters.par_n,
+ sps->vui_parameters.par_d
+ );
+
+ if (!reset_context && priv->has_context)
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+ /* XXX: fix surface size when cropping is implemented */
+ info.profile = priv->profile;
+ info.entrypoint = priv->entrypoint;
+ info.chroma_type = priv->chroma_type;
+ info.width = sps->width;
+ info.height = sps->height;
+ info.ref_frames = dpb_size;
+
+ if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ priv->has_context = TRUE;
+
+ /* Reset DPB */
+ if (!dpb_reset(decoder, dpb_size))
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+
+ /* Reset MVC data */
+ if (!mvc_reset(decoder))
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static void
+fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
+ const GstH264SPS *sps)
+{
+ guint i;
+
+ /* There are always 6 4x4 scaling lists */
+ g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
+ g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
+
+ for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
+ gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
+ iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
+}
+
+static void
+fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
+ const GstH264SPS *sps)
+{
+ guint i, n;
- info.profile = priv->profile;
- info.entrypoint = entrypoint;
- info.width = priv->width;
- info.height = priv->height;
- info.ref_frames = get_max_dec_frame_buffering(sps);
+ /* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
+ if (!pps->transform_8x8_mode_flag)
+ return;
- if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- priv->has_context = TRUE;
+ g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
+ g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
- /* Reset DPB */
- dpb_reset(decoder, sps);
+ n = (sps->chroma_format_idc != 3) ? 2 : 6;
+ for (i = 0; i < n; i++) {
+ gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
+ iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
}
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
+ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiPicture * const base_picture = &picture->base;
+ GstH264PPS * const pps = get_pps(decoder);
+ GstH264SPS * const sps = get_sps(decoder);
+ VAIQMatrixBufferH264 *iq_matrix;
- if (priv->pps != pps) {
- memcpy(priv->scaling_list_4x4, pps->scaling_lists_4x4,
- sizeof(priv->scaling_list_4x4));
- memcpy(priv->scaling_list_8x8, pps->scaling_lists_8x8,
- sizeof(priv->scaling_list_8x8));
+ base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
+ if (!base_picture->iq_matrix) {
+ GST_ERROR("failed to allocate IQ matrix");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
+ iq_matrix = base_picture->iq_matrix->param;
+
+ /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
+ is not large enough to hold lists for 4:4:4 */
+ if (sps->chroma_format_idc == 3)
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
+
+ fill_iq_matrix_4x4(iq_matrix, pps, sps);
+ fill_iq_matrix_8x8(iq_matrix, pps, sps);
+
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static gboolean
+static inline gboolean
+is_valid_state(guint state, guint ref_state)
+{
+ return (state & ref_state) == ref_state;
+}
+
+static GstVaapiDecoderStatus
decode_current_picture(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 * const picture = priv->current_picture;
- gboolean success = FALSE;
+
+ if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
+ goto drop_frame;
+ priv->decoder_state = 0;
if (!picture)
- return TRUE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
- if (!decode_picture_end(decoder, picture))
- goto end;
+ if (!exec_ref_pic_marking(decoder, picture))
+ goto error;
+ if (!dpb_add(decoder, picture))
+ goto error;
if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
- goto end;
- success = TRUE;
-end:
+ goto error;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
- return success;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+error:
+ /* XXX: fix for cases where first field failed to be decoded */
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+drop_frame:
+ priv->decoder_state = 0;
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
}
static GstVaapiDecoderStatus
-decode_sps(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu)
+parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SPS * const sps = &priv->last_sps;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
GstH264ParserResult result;
- GST_DEBUG("decode SPS");
+ GST_DEBUG("parse SPS");
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ priv->parser_state = 0;
- memset(sps, 0, sizeof(*sps));
- result = gst_h264_parser_parse_sps(priv->parser, nalu, sps, TRUE);
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+
+ result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
if (result != GST_H264_PARSER_OK)
return get_status(result);
- return ensure_context(decoder, sps);
+ /* Reset defaults */
+ priv->max_views = 1;
+
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_pps(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu)
+parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = &priv->last_pps;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
GstH264ParserResult result;
- GST_DEBUG("decode PPS");
+ GST_DEBUG("parse subset SPS");
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+
+ result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
+ TRUE);
+ if (result != GST_H264_PARSER_OK)
+ return get_status(result);
+
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264PPS * const pps = &pi->data.pps;
+ GstH264ParserResult result;
+
+ GST_DEBUG("parse PPS");
+
+ priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ pps->slice_group_map_type = 0;
+ pps->slice_group_change_rate_minus1 = 0;
- memset(pps, 0, sizeof(*pps));
- result = gst_h264_parser_parse_pps(priv->parser, nalu, pps);
+ result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
if (result != GST_H264_PARSER_OK)
return get_status(result);
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-decode_sei(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu)
+parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SEIMessage sei;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GArray ** const sei_ptr = &pi->data.sei;
GstH264ParserResult result;
- GST_DEBUG("decode SEI");
+ GST_DEBUG("parse SEI");
- memset(&sei, 0, sizeof(sei));
- result = gst_h264_parser_parse_sei(priv->parser, nalu, &sei);
+ result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
if (result != GST_H264_PARSER_OK) {
- GST_WARNING("failed to decode SEI, payload type:%d", sei.payloadType);
+ GST_WARNING("failed to parse SEI messages");
+ return get_status(result);
+ }
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstH264NalUnit * const nalu = &pi->nalu;
+ GstH264SPS *sps;
+ GstH264ParserResult result;
+ guint num_views;
+
+ GST_DEBUG("parse slice");
+
+ priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
+ GST_H264_VIDEO_STATE_GOT_PPS);
+
+ /* Propagate Prefix NAL unit info, if necessary */
+ switch (nalu->type) {
+ case GST_H264_NAL_SLICE:
+ case GST_H264_NAL_SLICE_IDR: {
+ GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
+ if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
+ /* MVC sequences shall have a Prefix NAL unit immediately
+ preceding this NAL unit */
+ pi->nalu.extension_type = prev_pi->nalu.extension_type;
+ pi->nalu.extension = prev_pi->nalu.extension;
+ }
+ else {
+ /* In the very unlikely case there is no Prefix NAL unit
+ immediately preceding this NAL unit, try to infer some
+ defaults (H.7.4.1.1) */
+ GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
+ mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
+ nalu->idr_pic_flag = !mvc->non_idr_flag;
+ mvc->priority_id = 0;
+ mvc->view_id = 0;
+ mvc->temporal_id = 0;
+ mvc->anchor_pic_flag = 0;
+ mvc->inter_view_flag = 1;
+ }
+ break;
+ }
+ }
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ slice_hdr->cabac_init_idc = 0;
+ slice_hdr->direct_spatial_mv_pred_flag = 0;
+
+ result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
+ slice_hdr, TRUE, TRUE);
+ if (result != GST_H264_PARSER_OK)
return get_status(result);
+
+ sps = slice_hdr->pps->sequence;
+
+ /* Update MVC data */
+ num_views = get_num_views(sps);
+ if (priv->max_views < num_views) {
+ priv->max_views = num_views;
+ GST_DEBUG("maximum number of views changed to %u", num_views);
}
+ pi->view_id = get_view_id(&pi->nalu);
+ pi->voc = get_view_order_index(sps, pi->view_id);
+
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+
+ GST_DEBUG("decode SPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+
+ GST_DEBUG("decode subset SPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264PPS * const pps = &pi->data.pps;
+ GST_DEBUG("decode PPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderStatus status;
GST_DEBUG("decode sequence-end");
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- dpb_flush(decoder);
- return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
+ status = decode_current_picture(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ dpb_flush(decoder, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
/* 8.2.1.1 - Decoding process for picture order count type 0 */
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
+ gint32 temp_poc;
GST_DEBUG("decode picture order count type 0");
+ if (GST_VAAPI_PICTURE_IS_IDR(picture)) {
+ priv->prev_poc_msb = 0;
+ priv->prev_poc_lsb = 0;
+ }
+ else if (priv->prev_pic_has_mmco5) {
+ priv->prev_poc_msb = 0;
+ priv->prev_poc_lsb =
+ (priv->prev_pic_structure == GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD ?
+ 0 : priv->field_poc[TOP_FIELD]);
+ }
+ else {
+ priv->prev_poc_msb = priv->poc_msb;
+ priv->prev_poc_lsb = priv->poc_lsb;
+ }
+
// (8-3)
priv->poc_lsb = slice_hdr->pic_order_cnt_lsb;
if (priv->poc_lsb < priv->prev_poc_lsb &&
else
priv->poc_msb = priv->prev_poc_msb;
- // (8-4)
- if (!slice_hdr->field_pic_flag || !slice_hdr->bottom_field_flag)
- priv->field_poc[TOP_FIELD] = priv->poc_msb + priv->poc_lsb;
-
- // (8-5)
- if (!slice_hdr->field_pic_flag)
- priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
+ temp_poc = priv->poc_msb + priv->poc_lsb;
+ switch (picture->structure) {
+ case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
+ // (8-4, 8-5)
+ priv->field_poc[TOP_FIELD] = temp_poc;
+ priv->field_poc[BOTTOM_FIELD] = temp_poc +
slice_hdr->delta_pic_order_cnt_bottom;
- else if (slice_hdr->bottom_field_flag)
- priv->field_poc[BOTTOM_FIELD] = priv->poc_msb + priv->poc_lsb;
+ break;
+ case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
+ // (8-4)
+ priv->field_poc[TOP_FIELD] = temp_poc;
+ break;
+ case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
+ // (8-5)
+ priv->field_poc[BOTTOM_FIELD] = temp_poc;
+ break;
+ }
}
/* 8.2.1.2 - Decoding process for picture order count type 1 */
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
- gint32 abs_frame_num, expected_poc;
+ gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
guint i;
GST_DEBUG("decode picture order count type 1");
+ if (priv->prev_pic_has_mmco5)
+ prev_frame_num_offset = 0;
+ else
+ prev_frame_num_offset = priv->frame_num_offset;
+
// (8-6)
- if (picture->is_idr)
+ if (GST_VAAPI_PICTURE_IS_IDR(picture))
priv->frame_num_offset = 0;
else if (priv->prev_frame_num > priv->frame_num)
- priv->frame_num_offset = priv->prev_frame_num_offset + MaxFrameNum;
+ priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
else
- priv->frame_num_offset = priv->prev_frame_num_offset;
+ priv->frame_num_offset = prev_frame_num_offset;
// (8-7)
if (sps->num_ref_frames_in_pic_order_cnt_cycle != 0)
expected_poc += sps->offset_for_non_ref_pic;
// (8-10)
- if (!slice_hdr->field_pic_flag) {
+ switch (picture->structure) {
+ case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
priv->field_poc[TOP_FIELD] = expected_poc +
slice_hdr->delta_pic_order_cnt[0];
priv->field_poc[BOTTOM_FIELD] = priv->field_poc[TOP_FIELD] +
sps->offset_for_top_to_bottom_field +
slice_hdr->delta_pic_order_cnt[1];
- }
- else if (!slice_hdr->bottom_field_flag)
+ break;
+ case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
priv->field_poc[TOP_FIELD] = expected_poc +
slice_hdr->delta_pic_order_cnt[0];
- else
+ break;
+ case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
priv->field_poc[BOTTOM_FIELD] = expected_poc +
- sps->offset_for_top_to_bottom_field + slice_hdr->delta_pic_order_cnt[0];
+ sps->offset_for_top_to_bottom_field +
+ slice_hdr->delta_pic_order_cnt[0];
+ break;
+ }
}
/* 8.2.1.3 - Decoding process for picture order count type 2 */
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
- guint temp_poc;
+ gint32 prev_frame_num_offset, temp_poc;
GST_DEBUG("decode picture order count type 2");
+ if (priv->prev_pic_has_mmco5)
+ prev_frame_num_offset = 0;
+ else
+ prev_frame_num_offset = priv->frame_num_offset;
+
// (8-11)
- if (picture->is_idr)
+ if (GST_VAAPI_PICTURE_IS_IDR(picture))
priv->frame_num_offset = 0;
else if (priv->prev_frame_num > priv->frame_num)
- priv->frame_num_offset = priv->prev_frame_num_offset + MaxFrameNum;
+ priv->frame_num_offset = prev_frame_num_offset + MaxFrameNum;
else
- priv->frame_num_offset = priv->prev_frame_num_offset;
+ priv->frame_num_offset = prev_frame_num_offset;
// (8-12)
- if (picture->is_idr)
+ if (GST_VAAPI_PICTURE_IS_IDR(picture))
temp_poc = 0;
else if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
temp_poc = 2 * (priv->frame_num_offset + priv->frame_num) - 1;
temp_poc = 2 * (priv->frame_num_offset + priv->frame_num);
// (8-13)
- if (!slice_hdr->field_pic_flag) {
+ if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
priv->field_poc[TOP_FIELD] = temp_poc;
+ if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
priv->field_poc[BOTTOM_FIELD] = temp_poc;
- }
- else if (slice_hdr->bottom_field_flag)
- priv->field_poc[BOTTOM_FIELD] = temp_poc;
- else
- priv->field_poc[TOP_FIELD] = temp_poc;
}
/* 8.2.1 - Decoding process for picture order count */
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- VAPictureH264 * const pic = &picture->info;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
switch (sps->pic_order_cnt_type) {
case 0:
break;
}
- if (!(pic->flags & VA_PICTURE_H264_BOTTOM_FIELD))
- pic->TopFieldOrderCnt = priv->field_poc[TOP_FIELD];
- if (!(pic->flags & VA_PICTURE_H264_TOP_FIELD))
- pic->BottomFieldOrderCnt = priv->field_poc[BOTTOM_FIELD];
- picture->poc = MIN(pic->TopFieldOrderCnt, pic->BottomFieldOrderCnt);
+ if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
+ picture->field_poc[TOP_FIELD] = priv->field_poc[TOP_FIELD];
+ if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
+ picture->field_poc[BOTTOM_FIELD] = priv->field_poc[BOTTOM_FIELD];
+ picture->base.poc = MIN(picture->field_poc[0], picture->field_poc[1]);
}
static int
const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
- return picB->poc - picA->poc;
+ return picB->base.poc - picA->base.poc;
}
static int
const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
- return picA->poc - picB->poc;
+ return picA->base.poc - picB->base.poc;
}
static int
const GstVaapiPictureH264 * const picA = *(GstVaapiPictureH264 **)a;
const GstVaapiPictureH264 * const picB = *(GstVaapiPictureH264 **)b;
- return picA->info.frame_idx - picB->info.frame_idx;
+ return picA->long_term_frame_idx - picB->long_term_frame_idx;
}
/* 8.2.4.1 - Decoding process for picture numbers */
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
- const guint field_flags = VA_PICTURE_H264_TOP_FIELD | VA_PICTURE_H264_BOTTOM_FIELD;
guint i;
GST_DEBUG("decode picture numbers");
for (i = 0; i < priv->short_ref_count; i++) {
GstVaapiPictureH264 * const pic = priv->short_ref[i];
+ // (H.8.2)
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+
// (8-27)
if (pic->frame_num > priv->frame_num)
pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
pic->frame_num_wrap = pic->frame_num;
// (8-28, 8-30, 8-31)
- if (!pic->field_pic_flag)
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture))
pic->pic_num = pic->frame_num_wrap;
else {
- if (((picture->info.flags ^ pic->info.flags) & field_flags) == 0)
+ if (pic->structure == picture->structure)
pic->pic_num = 2 * pic->frame_num_wrap + 1;
else
pic->pic_num = 2 * pic->frame_num_wrap;
for (i = 0; i < priv->long_ref_count; i++) {
GstVaapiPictureH264 * const pic = priv->long_ref[i];
+ // (H.8.2)
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+
// (8-29, 8-32, 8-33)
- if (!pic->field_pic_flag)
- pic->long_term_pic_num = pic->info.frame_idx;
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture))
+ pic->long_term_pic_num = pic->long_term_frame_idx;
else {
- if (((picture->info.flags ^ pic->info.flags) & field_flags) == 0)
- pic->long_term_pic_num = 2 * pic->info.frame_idx + 1;
+ if (pic->structure == picture->structure)
+ pic->long_term_pic_num = 2 * pic->long_term_frame_idx + 1;
else
- pic->long_term_pic_num = 2 * pic->info.frame_idx;
+ pic->long_term_pic_num = 2 * pic->long_term_frame_idx;
}
}
}
qsort(list, n, sizeof(*(list)), compare_picture_##compare_func)
static void
+init_picture_refs_fields_1(
+ guint picture_structure,
+ GstVaapiPictureH264 *RefPicList[32],
+ guint *RefPicList_count,
+ GstVaapiPictureH264 *ref_list[32],
+ guint ref_list_count
+)
+{
+ guint i, j, n;
+
+ i = 0;
+ j = 0;
+ n = *RefPicList_count;
+ do {
+ g_assert(n < 32);
+ for (; i < ref_list_count; i++) {
+ if (ref_list[i]->structure == picture_structure) {
+ RefPicList[n++] = ref_list[i++];
+ break;
+ }
+ }
+ for (; j < ref_list_count; j++) {
+ if (ref_list[j]->structure != picture_structure) {
+ RefPicList[n++] = ref_list[j++];
+ break;
+ }
+ }
+ } while (i < ref_list_count || j < ref_list_count);
+ *RefPicList_count = n;
+}
+
+static inline void
+init_picture_refs_fields(
+ GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 *RefPicList[32],
+ guint *RefPicList_count,
+ GstVaapiPictureH264 *short_ref[32],
+ guint short_ref_count,
+ GstVaapiPictureH264 *long_ref[32],
+ guint long_ref_count
+)
+{
+ guint n = 0;
+
+ /* 8.2.4.2.5 - reference picture lists in fields */
+ init_picture_refs_fields_1(picture->structure, RefPicList, &n,
+ short_ref, short_ref_count);
+ init_picture_refs_fields_1(picture->structure, RefPicList, &n,
+ long_ref, long_ref_count);
+ *RefPicList_count = n;
+}
+
+/* Finds the inter-view reference picture with the supplied view id */
+static GstVaapiPictureH264 *
+find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
+{
+ GPtrArray * const inter_views = decoder->priv.inter_views;
+ guint i;
+
+ for (i = 0; i < inter_views->len; i++) {
+ GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
+ if (picture->base.view_id == view_id)
+ return picture;
+ }
+
+ GST_WARNING("failed to find inter-view reference picture for view_id: %d",
+ view_id);
+ return NULL;
+}
+
+/* Checks whether the view id exists in the supplied list of view ids */
+static gboolean
+find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
+{
+ guint i;
+
+ for (i = 0; i < num_view_ids; i++) {
+ if (view_ids[i] == view_id)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
+ gboolean is_anchor)
+{
+ if (is_anchor)
+ return (find_view_id(view_id, view->anchor_ref_l0,
+ view->num_anchor_refs_l0) ||
+ find_view_id(view_id, view->anchor_ref_l1,
+ view->num_anchor_refs_l1));
+
+ return (find_view_id(view_id, view->non_anchor_ref_l0,
+ view->num_non_anchor_refs_l0) ||
+ find_view_id(view_id, view->non_anchor_ref_l1,
+ view->num_non_anchor_refs_l1));
+}
+
+/* Checks whether the inter-view reference picture with the supplied
+ view id is used for decoding the current view component picture */
+static gboolean
+is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
+ guint16 view_id, GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ return find_view_id_in_view(view_id,
+ &sps->extension.mvc.view[picture->base.voc], is_anchor);
+}
+
+/* Checks whether the supplied inter-view reference picture is used
+ for decoding the next view component pictures */
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+ guint i, num_views;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ num_views = sps->extension.mvc.num_views_minus1 + 1;
+ for (i = picture->base.voc + 1; i < num_views; i++) {
+ const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
+ if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/* H.8.2.1 - Initialization process for inter-view prediction references */
+static void
+init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
+ const guint16 *view_ids, guint num_view_ids)
+{
+ guint j, n;
+
+ n = *ref_list_count_ptr;
+ for (j = 0; j < num_view_ids && n < num_refs; j++) {
+ GstVaapiPictureH264 * const pic =
+ find_inter_view_reference(decoder, view_ids[j]);
+ if (pic)
+ ref_list[n++] = pic;
+ }
+ *ref_list_count_ptr = n;
+}
+
+static inline void
+init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const GstH264SPS * const sps = get_sps(decoder);
+ const GstH264SPSExtMVCView *view;
+
+ GST_DEBUG("initialize reference picture list for inter-view prediction");
+
+ if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return;
+ view = &sps->extension.mvc.view[picture->base.voc];
+
+#define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
+ init_picture_refs_mvc_1(decoder, \
+ priv->RefPicList##ref_list, \
+ &priv->RefPicList##ref_list##_count, \
+ slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
+ view->view_list##_l##ref_list, \
+ view->num_##view_list##s_l##ref_list); \
+ } while (0)
+
+ if (list == 0) {
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
+ INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
+ else
+ INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
+ }
+ else {
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
+ INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
+ else
+ INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
+ }
+
+#undef INVOKE_INIT_PICTURE_REFS_MVC
+}
+
+static void
init_picture_refs_p_slice(
GstVaapiDecoderH264 *decoder,
GstVaapiPictureH264 *picture,
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 **ref_list;
guint i;
GST_DEBUG("decode reference picture list for P and SP slices");
- if (!picture->field_pic_flag) {
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
/* 8.2.4.2.1 - P and SP slices in frames */
if (priv->short_ref_count > 0) {
ref_list = priv->RefPicList0;
GstVaapiPictureH264 *long_ref[32];
guint long_ref_count = 0;
- // XXX: handle second field if current field is marked as
- // "used for short-term reference"
if (priv->short_ref_count > 0) {
for (i = 0; i < priv->short_ref_count; i++)
short_ref[i] = priv->short_ref[i];
short_ref_count = i;
}
- // XXX: handle second field if current field is marked as
- // "used for long-term reference"
if (priv->long_ref_count > 0) {
for (i = 0; i < priv->long_ref_count; i++)
long_ref[i] = priv->long_ref[i];
long_ref_count = i;
}
- // XXX: handle 8.2.4.2.5
+ init_picture_refs_fields(
+ picture,
+ priv->RefPicList0, &priv->RefPicList0_count,
+ short_ref, short_ref_count,
+ long_ref, long_ref_count
+ );
+ }
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
+ /* RefPicList0 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
}
}
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 **ref_list;
guint i, n;
GST_DEBUG("decode reference picture list for B slices");
- if (!picture->field_pic_flag) {
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
/* 8.2.4.2.3 - B slices in frames */
/* RefPicList0 */
// 1. Short-term references
ref_list = priv->RefPicList0;
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc < picture->poc)
+ if (priv->short_ref[i]->base.poc < picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_dec);
ref_list = &priv->RefPicList0[priv->RefPicList0_count];
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc >= picture->poc)
+ if (priv->short_ref[i]->base.poc >= picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_inc);
// 1. Short-term references
ref_list = priv->RefPicList1;
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc > picture->poc)
+ if (priv->short_ref[i]->base.poc > picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_inc);
ref_list = &priv->RefPicList1[priv->RefPicList1_count];
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc <= picture->poc)
+ if (priv->short_ref[i]->base.poc <= picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_dec);
if (priv->short_ref_count > 0) {
ref_list = short_ref0;
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc <= picture->poc)
+ if (priv->short_ref[i]->base.poc <= picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_dec);
ref_list = &short_ref0[short_ref0_count];
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc > picture->poc)
+ if (priv->short_ref[i]->base.poc > picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_inc);
if (priv->short_ref_count > 0) {
ref_list = short_ref1;
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc > picture->poc)
+ if (priv->short_ref[i]->base.poc > picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_inc);
ref_list = &short_ref1[short_ref1_count];
for (n = 0, i = 0; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->poc <= picture->poc)
+ if (priv->short_ref[i]->base.poc <= picture->base.poc)
ref_list[n++] = priv->short_ref[i];
}
SORT_REF_LIST(ref_list, n, poc_dec);
long_ref_count = i;
}
- // XXX: handle 8.2.4.2.5
- }
+ init_picture_refs_fields(
+ picture,
+ priv->RefPicList0, &priv->RefPicList0_count,
+ short_ref0, short_ref0_count,
+ long_ref, long_ref_count
+ );
+
+ init_picture_refs_fields(
+ picture,
+ priv->RefPicList1, &priv->RefPicList1_count,
+ short_ref1, short_ref1_count,
+ long_ref, long_ref_count
+ );
+ }
/* Check whether RefPicList1 is identical to RefPicList0, then
swap if necessary */
priv->RefPicList1[0] = priv->RefPicList1[1];
priv->RefPicList1[1] = tmp;
}
-}
-#undef SORT_REF_LIST
-
-static void
-clear_references(
- GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 **pictures,
- guint *picture_count
-)
-{
- const guint num_pictures = *picture_count;
- guint i;
+ if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
+ /* RefPicList0 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
- for (i = 0; i < num_pictures; i++)
- gst_vaapi_picture_replace(&pictures[i], NULL);
- *picture_count = 0;
+ /* RefPicList1 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
+ }
}
-static gboolean
-remove_reference_at(
- GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 **pictures,
- guint *picture_count,
- guint index
-)
-{
- guint num_pictures = *picture_count;
-
- g_return_val_if_fail(index < num_pictures, FALSE);
-
- GST_VAAPI_PICTURE_FLAG_UNSET(pictures[index], GST_VAAPI_PICTURE_FLAG_REFERENCE);
- if (index != --num_pictures)
- gst_vaapi_picture_replace(&pictures[index], pictures[num_pictures]);
- gst_vaapi_picture_replace(&pictures[num_pictures], NULL);
- *picture_count = num_pictures;
- return TRUE;
-}
+#undef SORT_REF_LIST
static gint
find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i;
for (i = 0; i < priv->short_ref_count; i++) {
static gint
find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i;
for (i = 0; i < priv->long_ref_count; i++) {
guint list
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
GstH264RefPicListModification *ref_pic_list_modification;
guint num_ref_pic_list_modifications;
GstVaapiPictureH264 **ref_list;
guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
- guint i, j, n, num_refs;
+ const guint16 *view_ids = NULL;
+ guint i, j, n, num_refs, num_view_ids = 0;
gint found_ref_idx;
- gint32 MaxPicNum, CurrPicNum, picNumPred;
+ gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
GST_DEBUG("modification process of reference picture list %u", list);
ref_list = priv->RefPicList0;
ref_list_count_ptr = &priv->RefPicList0_count;
num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+ const GstH264SPSExtMVCView * const view =
+ &sps->extension.mvc.view[picture->base.voc];
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
+ view_ids = view->anchor_ref_l0;
+ num_view_ids = view->num_anchor_refs_l0;
+ }
+ else {
+ view_ids = view->non_anchor_ref_l0;
+ num_view_ids = view->num_non_anchor_refs_l0;
+ }
+ }
}
else {
ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
ref_list = priv->RefPicList1;
ref_list_count_ptr = &priv->RefPicList1_count;
num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+ const GstH264SPSExtMVCView * const view =
+ &sps->extension.mvc.view[picture->base.voc];
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
+ view_ids = view->anchor_ref_l1;
+ num_view_ids = view->num_anchor_refs_l1;
+ }
+ else {
+ view_ids = view->non_anchor_ref_l1;
+ num_view_ids = view->num_non_anchor_refs_l1;
+ }
+ }
}
ref_list_count = *ref_list_count_ptr;
- if (picture->field_pic_flag) {
+ if (!GST_VAAPI_PICTURE_IS_FRAME(picture)) {
MaxPicNum = 1 << (sps->log2_max_frame_num_minus4 + 5); // 2 * MaxFrameNum
CurrPicNum = 2 * slice_hdr->frame_num + 1; // 2 * frame_num + 1
}
}
picNumPred = CurrPicNum;
+ picViewIdxPred = -1;
for (i = 0; i < num_ref_pic_list_modifications; i++) {
GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
gint32 PicNumF;
if (!ref_list[j])
continue;
- PicNumF = ref_list[j]->is_long_term ?
- MaxPicNum : ref_list[j]->pic_num;
- if (PicNumF != picNum)
+ PicNumF =
+ GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
+ ref_list[j]->pic_num : MaxPicNum;
+ if (PicNumF != picNum ||
+ ref_list[j]->base.view_id != picture->base.view_id)
ref_list[n++] = ref_list[j];
}
}
/* 8.2.4.3.2 - Long-term reference pictures */
- else {
+ else if (l->modification_of_pic_nums_idc == 2) {
for (j = num_refs; j > ref_list_idx; j--)
ref_list[j] = ref_list[j - 1];
gint32 LongTermPicNumF;
if (!ref_list[j])
continue;
- LongTermPicNumF = ref_list[j]->is_long_term ?
+ LongTermPicNumF =
+ GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
ref_list[j]->long_term_pic_num : INT_MAX;
- if (LongTermPicNumF != l->value.long_term_pic_num)
+ if (LongTermPicNumF != l->value.long_term_pic_num ||
+ ref_list[j]->base.view_id != picture->base.view_id)
+ ref_list[n++] = ref_list[j];
+ }
+ }
+
+ /* H.8.2.2.3 - Inter-view prediction reference pictures */
+ else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
+ (l->modification_of_pic_nums_idc == 4 ||
+ l->modification_of_pic_nums_idc == 5)) {
+ gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
+ gint32 picViewIdx, targetViewId;
+
+ // (H-6)
+ if (l->modification_of_pic_nums_idc == 4) {
+ picViewIdx = picViewIdxPred - abs_diff_view_idx;
+ if (picViewIdx < 0)
+ picViewIdx += num_view_ids;
+ }
+
+ // (H-7)
+ else {
+ picViewIdx = picViewIdxPred + abs_diff_view_idx;
+ if (picViewIdx >= num_view_ids)
+ picViewIdx -= num_view_ids;
+ }
+ picViewIdxPred = picViewIdx;
+
+ // (H-8, H-9)
+ targetViewId = view_ids[picViewIdx];
+
+ // (H-10)
+ for (j = num_refs; j > ref_list_idx; j--)
+ ref_list[j] = ref_list[j - 1];
+ ref_list[ref_list_idx++] =
+ find_inter_view_reference(decoder, targetViewId);
+ n = ref_list_idx;
+ for (j = ref_list_idx; j <= num_refs; j++) {
+ if (!ref_list[j])
+ continue;
+ if (ref_list[j]->base.view_id != targetViewId ||
+ ref_list[j]->base.poc != picture->base.poc)
ref_list[n++] = ref_list[j];
}
}
exec_picture_refs_modification_1(decoder, picture, slice_hdr, 1);
}
-static gboolean
+static void
+init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i, j, short_ref_count, long_ref_count;
+
+ short_ref_count = 0;
+ long_ref_count = 0;
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ GstVaapiPictureH264 *pic;
+ if (!gst_vaapi_frame_store_has_frame(fs))
+ continue;
+ pic = fs->buffers[0];
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
+ priv->short_ref[short_ref_count++] = pic;
+ else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
+ priv->long_ref[long_ref_count++] = pic;
+ pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ pic->other_field = fs->buffers[1];
+ }
+ }
+ else {
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ for (j = 0; j < fs->num_buffers; j++) {
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
+ priv->short_ref[short_ref_count++] = pic;
+ else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
+ priv->long_ref[long_ref_count++] = pic;
+ pic->structure = pic->base.structure;
+ pic->other_field = fs->buffers[j ^ 1];
+ }
+ }
+ }
+
+ for (i = short_ref_count; i < priv->short_ref_count; i++)
+ priv->short_ref[i] = NULL;
+ priv->short_ref_count = short_ref_count;
+
+ for (i = long_ref_count; i < priv->long_ref_count; i++)
+ priv->long_ref[i] = NULL;
+ priv->long_ref_count = long_ref_count;
+}
+
+static void
init_picture_refs(
GstVaapiDecoderH264 *decoder,
GstVaapiPictureH264 *picture,
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiPicture * const base_picture = &picture->base;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i, num_refs;
+ init_picture_ref_lists(decoder, picture);
init_picture_refs_pic_num(decoder, picture, slice_hdr);
priv->RefPicList0_count = 0;
priv->RefPicList1_count = 0;
- switch (base_picture->type) {
- case GST_VAAPI_PICTURE_TYPE_P:
- case GST_VAAPI_PICTURE_TYPE_SP:
+ switch (slice_hdr->type % 5) {
+ case GST_H264_P_SLICE:
+ case GST_H264_SP_SLICE:
init_picture_refs_p_slice(decoder, picture, slice_hdr);
break;
- case GST_VAAPI_PICTURE_TYPE_B:
+ case GST_H264_B_SLICE:
init_picture_refs_b_slice(decoder, picture, slice_hdr);
break;
default:
exec_picture_refs_modification(decoder, picture, slice_hdr);
- switch (base_picture->type) {
- case GST_VAAPI_PICTURE_TYPE_B:
+ switch (slice_hdr->type % 5) {
+ case GST_H264_B_SLICE:
num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
for (i = priv->RefPicList1_count; i < num_refs; i++)
priv->RefPicList1[i] = NULL;
priv->RefPicList1_count = num_refs;
// fall-through
- case GST_VAAPI_PICTURE_TYPE_P:
- case GST_VAAPI_PICTURE_TYPE_SP:
+ case GST_H264_P_SLICE:
+ case GST_H264_SP_SLICE:
num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
for (i = priv->RefPicList0_count; i < num_refs; i++)
priv->RefPicList0[i] = NULL;
default:
break;
}
- return TRUE;
}
static gboolean
init_picture(
GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 *picture,
- GstH264SliceHdr *slice_hdr,
- GstH264NalUnit *nalu
-)
+ GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPicture * const base_picture = &picture->base;
- VAPictureH264 *pic;
- guint i;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ priv->prev_frame_num = priv->frame_num;
priv->frame_num = slice_hdr->frame_num;
picture->frame_num = priv->frame_num;
picture->frame_num_wrap = priv->frame_num;
- picture->is_idr = nalu->type == GST_H264_NAL_SLICE_IDR;
- picture->field_pic_flag = slice_hdr->field_pic_flag;
- picture->bottom_field_flag = slice_hdr->bottom_field_flag;
picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
- base_picture->pts = gst_adapter_prev_timestamp(priv->adapter, NULL);
+ base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
+ base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
+ base_picture->view_id = pi->view_id;
+ base_picture->voc = pi->voc;
+
+ /* Initialize extensions */
+ switch (pi->nalu.extension_type) {
+ case GST_H264_NAL_EXTENSION_MVC: {
+ GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
+
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
+ if (mvc->inter_view_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ if (mvc->anchor_pic_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_ANCHOR);
+ break;
+ }
+ }
/* Reset decoder state for IDR pictures */
- if (picture->is_idr) {
+ if (pi->nalu.idr_pic_flag) {
GST_DEBUG("<IDR>");
- clear_references(decoder, priv->short_ref, &priv->short_ref_count);
- clear_references(decoder, priv->long_ref, &priv->long_ref_count );
- priv->prev_poc_msb = 0;
- priv->prev_poc_lsb = 0;
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
+ dpb_flush(decoder, picture);
}
- /* Initialize VA picture info */
- pic = &picture->info;
- pic->picture_id = picture->base.surface_id;
- pic->frame_idx = priv->frame_num;
- if (picture->field_pic_flag) {
- if (picture->bottom_field_flag)
- pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+ /* Initialize picture structure */
+ if (!slice_hdr->field_pic_flag)
+ base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ else {
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_INTERLACED);
+ if (!slice_hdr->bottom_field_flag)
+ base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD;
else
- pic->flags |= VA_PICTURE_H264_TOP_FIELD;
- }
-
- /* Initialize base picture */
- switch (slice_hdr->type % 5) {
- case GST_H264_P_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_P;
- break;
- case GST_H264_B_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_B;
- break;
- case GST_H264_I_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_I;
- break;
- case GST_H264_SP_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_SP;
- break;
- case GST_H264_SI_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_SI;
- break;
+ base_picture->structure = GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD;
}
+ picture->structure = base_picture->structure;
- if (nalu->ref_idc) {
+ /* Initialize reference flags */
+ if (pi->nalu.ref_idc) {
GstH264DecRefPicMarking * const dec_ref_pic_marking =
&slice_hdr->dec_ref_pic_marking;
- GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_REFERENCE);
- if (picture->is_idr) {
- if (dec_ref_pic_marking->long_term_reference_flag)
- picture->is_long_term = TRUE;
- }
- else if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
- for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
- GstH264RefPicMarking * const ref_pic_marking =
- &dec_ref_pic_marking->ref_pic_marking[i];
- switch (ref_pic_marking->memory_management_control_operation) {
- case 3:
- case 6:
- picture->is_long_term = TRUE;
- pic->frame_idx = ref_pic_marking->long_term_frame_idx;
- break;
- case 5:
- picture->has_mmco_5 = TRUE;
- break;
- }
- }
- }
- if (picture->is_long_term)
- pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
+
+ if (GST_VAAPI_PICTURE_IS_IDR(picture) &&
+ dec_ref_pic_marking->long_term_reference_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE);
else
- pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE);
}
init_picture_poc(decoder, picture, slice_hdr);
- if (!init_picture_refs(decoder, picture, slice_hdr)) {
- GST_ERROR("failed to initialize references");
- return FALSE;
- }
return TRUE;
}
static gboolean
exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SPS * const sps = priv->sps;
- guint i, max_num_ref_frames, lowest_frame_num_index;
- gint32 lowest_frame_num;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
+ GstVaapiPictureH264 *ref_picture;
+ guint i, m, max_num_ref_frames;
GST_DEBUG("reference picture marking process (sliding window)");
+ if (!GST_VAAPI_PICTURE_IS_FIRST_FIELD(priv->current_picture))
+ return TRUE;
+
max_num_ref_frames = sps->num_ref_frames;
if (max_num_ref_frames == 0)
max_num_ref_frames = 1;
+ if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture))
+ max_num_ref_frames <<= 1;
if (priv->short_ref_count + priv->long_ref_count < max_num_ref_frames)
return TRUE;
if (priv->short_ref_count < 1)
return FALSE;
- lowest_frame_num = priv->short_ref[0]->frame_num_wrap;
- lowest_frame_num_index = 0;
- for (i = 1; i < priv->short_ref_count; i++) {
- if (priv->short_ref[i]->frame_num_wrap < lowest_frame_num) {
- lowest_frame_num = priv->short_ref[i]->frame_num_wrap;
- lowest_frame_num_index = i;
- }
+ for (m = 0, i = 1; i < priv->short_ref_count; i++) {
+ GstVaapiPictureH264 * const picture = priv->short_ref[i];
+ if (picture->frame_num_wrap < priv->short_ref[m]->frame_num_wrap)
+ m = i;
}
- remove_reference_at(
- decoder,
- priv->short_ref, &priv->short_ref_count,
- lowest_frame_num_index
- );
+ ref_picture = priv->short_ref[m];
+ gst_vaapi_picture_h264_set_reference(ref_picture, 0, TRUE);
+ ARRAY_REMOVE_INDEX(priv->short_ref, m);
+
+ /* Both fields need to be marked as "unused for reference", so
+ remove the other field from the short_ref[] list as well */
+ if (!GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture) && ref_picture->other_field) {
+ for (i = 0; i < priv->short_ref_count; i++) {
+ if (priv->short_ref[i] == ref_picture->other_field) {
+ ARRAY_REMOVE_INDEX(priv->short_ref, i);
+ break;
+ }
+ }
+ }
return TRUE;
}
+static inline gint32
+get_picNumX(GstVaapiPictureH264 *picture, GstH264RefPicMarking *ref_pic_marking)
+{
+ gint32 pic_num;
+
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture))
+ pic_num = picture->frame_num_wrap;
+ else
+ pic_num = 2 * picture->frame_num_wrap + 1;
+ pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
+ return pic_num;
+}
+
+/* 8.2.5.4.1. Mark short-term reference picture as "unused for reference" */
+static void
+exec_ref_pic_marking_adaptive_mmco_1(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ gint32 i, picNumX;
+
+ picNumX = get_picNumX(picture, ref_pic_marking);
+ i = find_short_term_reference(decoder, picNumX);
+ if (i < 0)
+ return;
+
+ gst_vaapi_picture_h264_set_reference(priv->short_ref[i], 0,
+ GST_VAAPI_PICTURE_IS_FRAME(picture));
+ ARRAY_REMOVE_INDEX(priv->short_ref, i);
+}
+
+/* 8.2.5.4.2. Mark long-term reference picture as "unused for reference" */
+static void
+exec_ref_pic_marking_adaptive_mmco_2(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ gint32 i;
+
+ i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
+ if (i < 0)
+ return;
+
+ gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0,
+ GST_VAAPI_PICTURE_IS_FRAME(picture));
+ ARRAY_REMOVE_INDEX(priv->long_ref, i);
+}
+
+/* 8.2.5.4.3. Assign LongTermFrameIdx to a short-term reference picture */
+static void
+exec_ref_pic_marking_adaptive_mmco_3(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *ref_picture, *other_field;
+ gint32 i, picNumX;
+
+ for (i = 0; i < priv->long_ref_count; i++) {
+ if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
+ break;
+ }
+ if (i != priv->long_ref_count) {
+ gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
+ ARRAY_REMOVE_INDEX(priv->long_ref, i);
+ }
+
+ picNumX = get_picNumX(picture, ref_pic_marking);
+ i = find_short_term_reference(decoder, picNumX);
+ if (i < 0)
+ return;
+
+ ref_picture = priv->short_ref[i];
+ ARRAY_REMOVE_INDEX(priv->short_ref, i);
+ priv->long_ref[priv->long_ref_count++] = ref_picture;
+
+ ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+ gst_vaapi_picture_h264_set_reference(ref_picture,
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
+ GST_VAAPI_PICTURE_IS_COMPLETE(picture));
+
+ /* Assign LongTermFrameIdx to the other field if it was also
+ marked as "used for long-term reference */
+ other_field = ref_picture->other_field;
+ if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
+ other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+}
+
+/* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
+ * as "unused for reference" */
+static void
+exec_ref_pic_marking_adaptive_mmco_4(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ gint32 i, long_term_frame_idx;
+
+ long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
+
+ for (i = 0; i < priv->long_ref_count; i++) {
+ if (priv->long_ref[i]->long_term_frame_idx <= long_term_frame_idx)
+ continue;
+ gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, FALSE);
+ ARRAY_REMOVE_INDEX(priv->long_ref, i);
+ i--;
+ }
+}
+
+/* 8.2.5.4.5. Mark all reference pictures as "unused for reference" */
+static void
+exec_ref_pic_marking_adaptive_mmco_5(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+
+ dpb_flush(decoder, picture);
+
+ priv->prev_pic_has_mmco5 = TRUE;
+
+ /* The picture shall be inferred to have had frame_num equal to 0 (7.4.3) */
+ priv->frame_num = 0;
+ priv->frame_num_offset = 0;
+ picture->frame_num = 0;
+
+ /* Update TopFieldOrderCnt and BottomFieldOrderCnt (8.2.1) */
+ if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD)
+ picture->field_poc[TOP_FIELD] -= picture->base.poc;
+ if (picture->structure != GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD)
+ picture->field_poc[BOTTOM_FIELD] -= picture->base.poc;
+ picture->base.poc = 0;
+}
+
+/* 8.2.5.4.6. Assign a long-term frame index to the current picture */
+static void
+exec_ref_pic_marking_adaptive_mmco_6(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *other_field;
+ guint i;
+
+ for (i = 0; i < priv->long_ref_count; i++) {
+ if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
+ break;
+ }
+ if (i != priv->long_ref_count) {
+ gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
+ ARRAY_REMOVE_INDEX(priv->long_ref, i);
+ }
+
+ picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+ gst_vaapi_picture_h264_set_reference(picture,
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
+ GST_VAAPI_PICTURE_IS_COMPLETE(picture));
+
+ /* Assign LongTermFrameIdx to the other field if it was also
+ marked as "used for long-term reference */
+ other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
+ if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
+ other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
+}
+
/* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
static gboolean
exec_ref_pic_marking_adaptive(
GstH264DecRefPicMarking *dec_ref_pic_marking
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- gint32 pic_num, ref_idx;
guint i;
GST_DEBUG("reference picture marking process (adaptive memory control)");
+ typedef void (*exec_ref_pic_marking_adaptive_mmco_func)(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264RefPicMarking *ref_pic_marking
+ );
+
+ static const exec_ref_pic_marking_adaptive_mmco_func mmco_funcs[] = {
+ NULL,
+ exec_ref_pic_marking_adaptive_mmco_1,
+ exec_ref_pic_marking_adaptive_mmco_2,
+ exec_ref_pic_marking_adaptive_mmco_3,
+ exec_ref_pic_marking_adaptive_mmco_4,
+ exec_ref_pic_marking_adaptive_mmco_5,
+ exec_ref_pic_marking_adaptive_mmco_6,
+ };
+
for (i = 0; i < dec_ref_pic_marking->n_ref_pic_marking; i++) {
GstH264RefPicMarking * const ref_pic_marking =
&dec_ref_pic_marking->ref_pic_marking[i];
- switch (ref_pic_marking->memory_management_control_operation) {
- case 1:
- // Mark short-term reference picture as "unused for reference"
- if (!picture->field_pic_flag)
- pic_num = picture->frame_num_wrap;
- else
- pic_num = 2 * picture->frame_num_wrap + 1;
- pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
- ref_idx = find_short_term_reference(decoder, pic_num);
- if (ref_idx < 0)
- break;
- remove_reference_at(
- decoder,
- priv->short_ref, &priv->short_ref_count,
- ref_idx
- );
- break;
- case 2:
- // Mark long-term reference picture as "unused for reference"
- pic_num = picture->long_term_pic_num;
- ref_idx = find_long_term_reference(decoder, pic_num);
- if (ref_idx < 0)
- break;
- remove_reference_at(
- decoder,
- priv->long_ref, &priv->long_ref_count,
- ref_idx
- );
- break;
- case 3:
- // Assign LongTermFrameIdx to a short-term reference picture
- if (!picture->field_pic_flag)
- pic_num = picture->frame_num_wrap;
- else
- pic_num = 2 * picture->frame_num_wrap + 1;
- pic_num -= ref_pic_marking->difference_of_pic_nums_minus1 + 1;
- ref_idx = find_short_term_reference(decoder, pic_num);
- if (ref_idx < 0)
- break;
- break;
- case 5:
- // Mark all reference pictures as "unused for reference"
- clear_references(decoder, priv->short_ref, &priv->short_ref_count);
- clear_references(decoder, priv->long_ref, &priv->long_ref_count );
- break;
- default:
- g_assert(0 && "unhandled MMCO");
- break;
+ const guint mmco = ref_pic_marking->memory_management_control_operation;
+ if (mmco < G_N_ELEMENTS(mmco_funcs) && mmco_funcs[mmco])
+ mmco_funcs[mmco](decoder, picture, ref_pic_marking);
+ else {
+ GST_ERROR("unhandled MMCO %u", mmco);
+ return FALSE;
}
}
return TRUE;
static gboolean
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiPictureH264 **picture_ptr;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+
+ priv->prev_pic_has_mmco5 = FALSE;
+ priv->prev_pic_structure = picture->structure;
+
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
+ g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
return TRUE;
- if (!picture->is_idr) {
+ if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
GstH264DecRefPicMarking * const dec_ref_pic_marking =
- get_dec_ref_pic_marking(picture);
+ &picture->last_slice_hdr->dec_ref_pic_marking;
if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
return FALSE;
return FALSE;
}
}
-
- if (picture->is_long_term)
- picture_ptr = &priv->long_ref[priv->long_ref_count++];
- else
- picture_ptr = &priv->short_ref[priv->short_ref_count++];
- gst_vaapi_picture_replace(picture_ptr, picture);
return TRUE;
}
-/* Update picture order count */
static void
-exit_picture_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+vaapi_init_picture(VAPictureH264 *pic)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SPS * const sps = priv->sps;
-
- switch (sps->pic_order_cnt_type) {
- case 0:
- if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
- break;
- if (picture->has_mmco_5) {
- priv->prev_poc_msb = 0;
- if (!picture->field_pic_flag || !picture->bottom_field_flag)
- priv->prev_poc_lsb = picture->info.TopFieldOrderCnt;
- else
- priv->prev_poc_lsb = 0;
- }
- else {
- priv->prev_poc_msb = priv->poc_msb;
- priv->prev_poc_lsb = priv->poc_lsb;
- }
- break;
- case 1:
- case 2:
- priv->prev_frame_num = priv->frame_num;
- if (picture->has_mmco_5)
- priv->prev_frame_num_offset = 0;
- else
- priv->prev_frame_num_offset = priv->frame_num_offset;
- break;
- }
+ pic->picture_id = VA_INVALID_ID;
+ pic->frame_idx = 0;
+ pic->flags = VA_PICTURE_H264_INVALID;
+ pic->TopFieldOrderCnt = 0;
+ pic->BottomFieldOrderCnt = 0;
}
-static inline gboolean
-exit_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+static void
+vaapi_fill_picture(VAPictureH264 *pic, GstVaapiPictureH264 *picture,
+ guint picture_structure)
{
- /* Update picture order count */
- exit_picture_poc(decoder, picture);
+ if (!picture_structure)
+ picture_structure = picture->structure;
- /* Decoded reference picture marking process */
- if (!exec_ref_pic_marking(decoder, picture))
- return FALSE;
- return TRUE;
+ pic->picture_id = picture->base.surface_id;
+ pic->flags = 0;
+
+ if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture)) {
+ pic->flags |= VA_PICTURE_H264_LONG_TERM_REFERENCE;
+ pic->frame_idx = picture->long_term_frame_idx;
+ }
+ else {
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
+ pic->flags |= VA_PICTURE_H264_SHORT_TERM_REFERENCE;
+ pic->frame_idx = picture->frame_num;
+ }
+
+ switch (picture_structure) {
+ case GST_VAAPI_PICTURE_STRUCTURE_FRAME:
+ pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
+ pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
+ break;
+ case GST_VAAPI_PICTURE_STRUCTURE_TOP_FIELD:
+ pic->flags |= VA_PICTURE_H264_TOP_FIELD;
+ pic->TopFieldOrderCnt = picture->field_poc[TOP_FIELD];
+ pic->BottomFieldOrderCnt = 0;
+ break;
+ case GST_VAAPI_PICTURE_STRUCTURE_BOTTOM_FIELD:
+ pic->flags |= VA_PICTURE_H264_BOTTOM_FIELD;
+ pic->BottomFieldOrderCnt = picture->field_poc[BOTTOM_FIELD];
+ pic->TopFieldOrderCnt = 0;
+ break;
+ }
}
static void
-vaapi_init_picture(VAPictureH264 *pic)
+vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
+ GstVaapiPictureH264 *picture)
{
- pic->picture_id = VA_INVALID_ID;
- pic->frame_idx = 0;
- pic->flags = VA_PICTURE_H264_INVALID;
- pic->TopFieldOrderCnt = 0;
- pic->BottomFieldOrderCnt = 0;
+ vaapi_fill_picture(pic, picture, 0);
+
+ /* H.8.4 - MVC inter prediction and inter-view prediction process */
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
+ /* The inter-view reference components and inter-view only
+ reference components that are included in the reference
+ picture lists are considered as not being marked as "used for
+ short-term reference" or "used for long-term reference" */
+ pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
+ VA_PICTURE_H264_LONG_TERM_REFERENCE);
+ }
}
static gboolean
-fill_picture(
- GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 *picture,
- GstH264SliceHdr *slice_hdr,
- GstH264NalUnit *nalu
-)
+fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPicture * const base_picture = &picture->base;
- GstH264SPS * const sps = priv->sps;
- GstH264PPS * const pps = priv->pps;
+ GstH264PPS * const pps = get_pps(decoder);
+ GstH264SPS * const sps = get_sps(decoder);
VAPictureParameterBufferH264 * const pic_param = base_picture->param;
guint i, n;
/* Fill in VAPictureParameterBufferH264 */
- pic_param->CurrPic = picture->info;
- for (i = 0, n = 0; i < priv->short_ref_count; i++)
- pic_param->ReferenceFrames[n++] = priv->short_ref[i]->info;
- for (i = 0; i < priv->long_ref_count; i++)
- pic_param->ReferenceFrames[n++] = priv->long_ref[i]->info;
+ vaapi_fill_picture(&pic_param->CurrPic, picture, 0);
+
+ for (i = 0, n = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if ((gst_vaapi_frame_store_has_reference(fs) &&
+ fs->view_id == picture->base.view_id) ||
+ (gst_vaapi_frame_store_has_inter_view(fs) &&
+ is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
+ vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
+ fs->buffers[0], fs->structure);
+ if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
+ break;
+ }
for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
vaapi_init_picture(&pic_param->ReferenceFrames[n]);
#define COPY_BFM(a, s, f) \
pic_param->a.bits.f = (s)->f
- pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
- pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
- pic_param->frame_num = priv->frame_num;
+ pic_param->picture_width_in_mbs_minus1 = priv->mb_width - 1;
+ pic_param->picture_height_in_mbs_minus1 = priv->mb_height - 1;
+ pic_param->frame_num = priv->frame_num;
COPY_FIELD(sps, bit_depth_luma_minus8);
COPY_FIELD(sps, bit_depth_chroma_minus8);
COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
pic_param->pic_fields.value = 0; /* reset all bits */
- pic_param->pic_fields.bits.field_pic_flag = slice_hdr->field_pic_flag;
+ pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
return TRUE;
}
+/* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
static gboolean
-fill_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- VAIQMatrixBufferH264 * const iq_matrix = picture->base.iq_matrix->param;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstH264PPS * const pps = slice_hdr->pps;
+ GstH264SPS * const sps = pps->sequence;
+ GstH264SliceHdr *prev_slice_hdr;
- /* XXX: we can only support 4:2:0 or 4:2:2 since ScalingLists8x8[]
- is not large enough to hold lists for 4:4:4 */
- if (priv->sps->chroma_format_idc == 3 &&
- sizeof(iq_matrix->ScalingList8x8) != sizeof(priv->scaling_list_8x8))
- return FALSE;
+ if (!prev_pi)
+ return TRUE;
+ prev_slice_hdr = &prev_pi->data.slice_hdr;
- /* Fill in VAIQMatrixBufferH264 */
- memcpy(iq_matrix->ScalingList4x4, priv->scaling_list_4x4,
- sizeof(iq_matrix->ScalingList4x4));
- memcpy(iq_matrix->ScalingList8x8, priv->scaling_list_8x8,
- sizeof(iq_matrix->ScalingList8x8));
- return TRUE;
+#define CHECK_EXPR(expr, field_name) do { \
+ if (!(expr)) { \
+ GST_DEBUG(field_name " differs in value"); \
+ return TRUE; \
+ } \
+ } while (0)
+
+#define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
+ CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
+
+ /* view_id differs in value and VOIdx of current slice_hdr is less
+ than the VOIdx of the prev_slice_hdr */
+ CHECK_VALUE(pi, prev_pi, view_id);
+
+ /* frame_num differs in value, regardless of inferred values to 0 */
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
+
+ /* pic_parameter_set_id differs in value */
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, pps);
+
+ /* field_pic_flag differs in value */
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, field_pic_flag);
+
+ /* bottom_field_flag is present in both and differs in value */
+ if (slice_hdr->field_pic_flag && prev_slice_hdr->field_pic_flag)
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
+
+ /* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
+ CHECK_EXPR((pi->nalu.ref_idc != 0) ==
+ (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
+
+ /* POC type is 0 for both and either pic_order_cnt_lsb differs in
+ value or delta_pic_order_cnt_bottom differs in value */
+ if (sps->pic_order_cnt_type == 0) {
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, pic_order_cnt_lsb);
+ if (pps->pic_order_present_flag && !slice_hdr->field_pic_flag)
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt_bottom);
+ }
+
+ /* POC type is 1 for both and either delta_pic_order_cnt[0]
+ differs in value or delta_pic_order_cnt[1] differs in value */
+ else if (sps->pic_order_cnt_type == 1) {
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[0]);
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, delta_pic_order_cnt[1]);
+ }
+
+ /* IdrPicFlag differs in value */
+ CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
+
+ /* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
+ if (pi->nalu.idr_pic_flag)
+ CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
+
+#undef CHECK_EXPR
+#undef CHECK_VALUE
+ return FALSE;
+}
+
+/* Detection of a new access unit, assuming we are already in presence
+ of a new picture */
+static inline gboolean
+is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
+{
+ if (!prev_pi || prev_pi->view_id == pi->view_id)
+ return TRUE;
+ return pi->voc < prev_pi->voc;
+}
+
+/* Finds the first field picture corresponding to the supplied picture */
+static GstVaapiPictureH264 *
+find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstVaapiFrameStore *fs;
+
+ if (!slice_hdr->field_pic_flag)
+ return NULL;
+
+ fs = priv->prev_frames[pi->voc];
+ if (!fs || gst_vaapi_frame_store_has_frame(fs))
+ return NULL;
+
+ if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
+ return fs->buffers[0];
+ return NULL;
}
static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu, GstH264SliceHdr *slice_hdr)
+decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiPictureH264 *picture;
- GstVaapiDecoderStatus status;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
+ GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
+ GstVaapiPictureH264 *picture, *first_field;
+ GstVaapiDecoderStatus status;
+
+ g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+ g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+
+ /* Only decode base stream for MVC */
+ switch (sps->profile_idc) {
+ case GST_H264_PROFILE_MULTIVIEW_HIGH:
+ case GST_H264_PROFILE_STEREO_HIGH:
+ if (0) {
+ GST_DEBUG("drop picture from substream");
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+ }
+ break;
+ }
status = ensure_context(decoder, sps);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- GST_ERROR("failed to reset context");
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- }
- if (priv->current_picture && !decode_current_picture(decoder))
- return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ priv->decoder_state = 0;
- picture = gst_vaapi_picture_h264_new(decoder);
- if (!picture) {
- GST_ERROR("failed to allocate picture");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ first_field = find_first_field(decoder, pi);
+ if (first_field) {
+ /* Re-use current picture where the first field was decoded */
+ picture = gst_vaapi_picture_h264_new_field(first_field);
+ if (!picture) {
+ GST_ERROR("failed to allocate field picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
}
- priv->current_picture = picture;
-
- picture->base.iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
- if (!picture->base.iq_matrix) {
- GST_ERROR("failed to allocate IQ matrix");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ else {
+ /* Create new picture */
+ picture = gst_vaapi_picture_h264_new(decoder);
+ if (!picture) {
+ GST_ERROR("failed to allocate picture");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ }
+ }
+ gst_vaapi_picture_replace(&priv->current_picture, picture);
+ gst_vaapi_picture_unref(picture);
+
+ /* Clear inter-view references list if this is the primary coded
+ picture of the current access unit */
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+ g_ptr_array_set_size(priv->inter_views, 0);
+
+ /* Update cropping rectangle */
+ if (sps->frame_cropping_flag) {
+ GstVaapiRectangle crop_rect;
+ crop_rect.x = sps->crop_rect_x;
+ crop_rect.y = sps->crop_rect_y;
+ crop_rect.width = sps->crop_rect_width;
+ crop_rect.height = sps->crop_rect_height;
+ gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
}
- status = ensure_quant_matrix(decoder, pps);
+ status = ensure_quant_matrix(decoder, picture);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
GST_ERROR("failed to reset quantizer matrix");
return status;
}
- priv->sps = sps;
- priv->pps = pps;
-
- if (!init_picture(decoder, picture, slice_hdr, nalu))
+ if (!init_picture(decoder, picture, pi))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- if (!fill_picture(decoder, picture, slice_hdr, nalu))
+ if (!fill_picture(decoder, picture))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
-}
-
-static gboolean
-decode_picture_end(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
-{
- if (!fill_quant_matrix(decoder, picture))
- return FALSE;
- if (!exit_picture(decoder, picture))
- return FALSE;
- if (!dpb_add(decoder, picture))
- return FALSE;
- return TRUE;
-}
-
-#ifndef HAVE_GST_H264_SLICE_HDR_EPB_COUNT
-static guint
-get_epb_count(const guint8 *buf, guint buf_size, guint header_size)
-{
- guint i, n = 0;
- if (buf_size > header_size)
- buf_size = header_size;
-
- for (i = 2; i < buf_size; i++) {
- if (!buf[i - 2] && !buf[i - 1] && buf[i] == 0x03)
- i += 2, n++;
- }
- return n;
+ priv->decoder_state = pi->state;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-#endif
static inline guint
-get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, GstH264NalUnit *nalu)
+get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
{
guint epb_count;
-#ifdef HAVE_GST_H264_SLICE_HDR_EPB_COUNT
epb_count = slice_hdr->n_emulation_prevention_bytes;
-#else
- epb_count = get_epb_count(
- nalu->data + nalu->offset,
- nalu->size,
- slice_hdr->header_size / 8
- );
-#endif
- return 8 /* nal_unit_type */ + slice_hdr->header_size - epb_count * 8;
+ return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
}
static gboolean
-fill_pred_weight_table(GstVaapiDecoderH264 *decoder, GstVaapiSliceH264 *slice)
+fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
+ GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
{
- GstH264SliceHdr * const slice_hdr = &slice->slice_hdr;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ VASliceParameterBufferH264 * const slice_param = slice->param;
+ GstH264PPS * const pps = get_pps(decoder);
+ GstH264SPS * const sps = get_sps(decoder);
GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
- VASliceParameterBufferH264 * const slice_param = slice->base.param;
guint num_weight_tables = 0;
gint i, j;
else
num_weight_tables = 0;
- slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
- slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
+ slice_param->luma_log2_weight_denom = 0;
+ slice_param->chroma_log2_weight_denom = 0;
slice_param->luma_weight_l0_flag = 0;
slice_param->chroma_weight_l0_flag = 0;
slice_param->luma_weight_l1_flag = 0;
if (num_weight_tables < 1)
return TRUE;
+ slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
+ slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
+
slice_param->luma_weight_l0_flag = 1;
for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
}
static gboolean
-fill_RefPicList(GstVaapiDecoderH264 *decoder, GstVaapiSliceH264 *slice)
+fill_RefPicList(GstVaapiDecoderH264 *decoder,
+ GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SliceHdr * const slice_hdr = &slice->slice_hdr;
- VASliceParameterBufferH264 * const slice_param = slice->base.param;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ VASliceParameterBufferH264 * const slice_param = slice->param;
guint i, num_ref_lists = 0;
slice_param->num_ref_idx_l0_active_minus1 = 0;
slice_hdr->num_ref_idx_l0_active_minus1;
for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
- slice_param->RefPicList0[i] = priv->RefPicList0[i]->info;
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
+ priv->RefPicList0[i]);
for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList0[i]);
slice_hdr->num_ref_idx_l1_active_minus1;
for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
- slice_param->RefPicList1[i] = priv->RefPicList1[i]->info;
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
+ priv->RefPicList1[i]);
for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList1[i]);
return TRUE;
}
static gboolean
-fill_slice(
- GstVaapiDecoderH264 *decoder,
- GstVaapiSliceH264 *slice,
- GstH264NalUnit *nalu
-)
+fill_slice(GstVaapiDecoderH264 *decoder,
+ GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
{
- GstH264SliceHdr * const slice_hdr = &slice->slice_hdr;
- VASliceParameterBufferH264 * const slice_param = slice->base.param;
+ VASliceParameterBufferH264 * const slice_param = slice->param;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
/* Fill in VASliceParameterBufferH264 */
- slice_param->slice_data_bit_offset = get_slice_data_bit_offset(slice_hdr, nalu);
+ slice_param->slice_data_bit_offset =
+ get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
slice_param->slice_type = slice_hdr->type % 5;
slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
- if (!fill_RefPicList(decoder, slice))
+ if (!fill_RefPicList(decoder, slice, slice_hdr))
return FALSE;
- if (!fill_pred_weight_table(decoder, slice))
+ if (!fill_pred_weight_table(decoder, slice, slice_hdr))
return FALSE;
return TRUE;
}
static GstVaapiDecoderStatus
-decode_slice(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu)
+decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiDecoderStatus status;
- GstVaapiPictureH264 *picture;
- GstVaapiSliceH264 *slice = NULL;
- GstH264SliceHdr *slice_hdr;
- GstH264ParserResult result;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstVaapiPictureH264 * const picture = priv->current_picture;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstVaapiSlice *slice;
+ GstBuffer * const buffer =
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
+ GstMapInfo map_info;
- GST_DEBUG("slice (%u bytes)", nalu->size);
+ GST_DEBUG("slice (%u bytes)", pi->nalu.size);
- slice = gst_vaapi_slice_h264_new(
- decoder,
- nalu->data + nalu->offset,
- nalu->size
- );
- if (!slice) {
- GST_ERROR("failed to allocate slice");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ if (!is_valid_state(pi->state,
+ GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
+ GST_WARNING("failed to receive enough headers to decode slice");
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
- slice_hdr = &slice->slice_hdr;
- memset(slice_hdr, 0, sizeof(*slice_hdr));
- result = gst_h264_parser_parse_slice_hdr(priv->parser, nalu, slice_hdr, TRUE, TRUE);
- if (result != GST_H264_PARSER_OK) {
- status = get_status(result);
- goto error;
+ if (!ensure_pps(decoder, slice_hdr->pps)) {
+ GST_ERROR("failed to activate PPS");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
- if (slice_hdr->first_mb_in_slice == 0) {
- status = decode_picture(decoder, nalu, slice_hdr);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- goto error;
+ if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
+ GST_ERROR("failed to activate SPS");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
- picture = priv->current_picture;
- priv->mb_x = slice_hdr->first_mb_in_slice % priv->mb_width;
- priv->mb_y = slice_hdr->first_mb_in_slice / priv->mb_width; // FIXME: MBAFF or field
+ if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
+ GST_ERROR("failed to map buffer");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
- if (!fill_slice(decoder, slice, nalu)) {
- status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- goto error;
+ /* Check wether this is the first/last slice in the current access unit */
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+
+ slice = GST_VAAPI_SLICE_NEW(H264, decoder,
+ (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
+ gst_buffer_unmap(buffer, &map_info);
+ if (!slice) {
+ GST_ERROR("failed to allocate slice");
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
- gst_vaapi_picture_add_slice(
- GST_VAAPI_PICTURE_CAST(picture),
- GST_VAAPI_SLICE_CAST(slice)
- );
- /* Commit picture for decoding if we reached the last slice */
- if (++priv->mb_y >= priv->mb_height) {
- if (!decode_current_picture(decoder)) {
- status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- goto error;
- }
- GST_DEBUG("done");
+ init_picture_refs(decoder, picture, slice_hdr);
+ if (!fill_slice(decoder, slice, pi)) {
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
-error:
- if (slice)
- gst_mini_object_unref(GST_MINI_OBJECT(slice));
- return status;
+ gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
+ picture->last_slice_hdr = slice_hdr;
+ priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline gint
}
static GstVaapiDecoderStatus
-decode_buffer(GstVaapiDecoderH264 *decoder, GstBuffer *buffer)
+decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
GstVaapiDecoderStatus status;
- GstH264ParserResult result;
- GstH264NalUnit nalu;
- gboolean is_eos;
- const guchar *buf;
- guint i, buf_size, nalu_size, size;
- guint32 start_code;
- gint ofs;
-
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
- is_eos = GST_BUFFER_IS_EOS(buffer);
- if (buf && buf_size > 0)
- gst_adapter_push(priv->adapter, gst_buffer_ref(buffer));
-
- size = gst_adapter_available(priv->adapter);
- do {
- if (size == 0) {
- status = GST_VAAPI_DECODER_STATUS_SUCCESS;
- break;
- }
-
- status = gst_vaapi_decoder_check_status(GST_VAAPI_DECODER(decoder));
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- break;
-
- status = GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- if (priv->is_avc) {
- if (size < priv->nal_length_size)
- break;
- buf = gst_adapter_peek(priv->adapter, priv->nal_length_size);
-
- nalu_size = 0;
- for (i = 0; i < priv->nal_length_size; i++)
- nalu_size = (nalu_size << 8) | buf[i];
-
- buf_size = priv->nal_length_size + nalu_size;
- if (size < buf_size)
- break;
- buffer = gst_adapter_take_buffer(priv->adapter, buf_size);
- size -= buf_size;
-
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
-
- result = gst_h264_parser_identify_nalu_avc(
- priv->parser,
- buf, 0, buf_size, priv->nal_length_size,
- &nalu
- );
- }
- else {
- if (size < 4)
- break;
- ofs = scan_for_start_code(priv->adapter, 0, size, &start_code);
- if (ofs < 0)
- break;
- gst_adapter_flush(priv->adapter, ofs);
- size -= ofs;
-
- ofs = G_UNLIKELY(size < 8) ? -1 :
- scan_for_start_code(priv->adapter, 4, size - 4, NULL);
- if (ofs < 0) {
- // Assume the whole NAL unit is present if end-of-stream
- if (!is_eos)
- break;
- ofs = size;
- }
- buffer = gst_adapter_take_buffer(priv->adapter, ofs);
- size -= ofs;
-
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
-
- result = gst_h264_parser_identify_nalu_unchecked(
- priv->parser,
- buf, 0, buf_size,
- &nalu
- );
- }
- status = get_status(result);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- gst_buffer_unref(buffer);
- break;
- }
-
- switch (nalu.type) {
- case GST_H264_NAL_SLICE_IDR:
- /* fall-through. IDR specifics are handled in init_picture() */
- case GST_H264_NAL_SLICE:
- status = decode_slice(decoder, &nalu);
- break;
- case GST_H264_NAL_SPS:
- status = decode_sps(decoder, &nalu);
- break;
- case GST_H264_NAL_PPS:
- status = decode_pps(decoder, &nalu);
- break;
- case GST_H264_NAL_SEI:
- status = decode_sei(decoder, &nalu);
- break;
- case GST_H264_NAL_SEQ_END:
- status = decode_sequence_end(decoder);
- break;
- case GST_H264_NAL_AU_DELIMITER:
- /* skip all Access Unit NALs */
- status = GST_VAAPI_DECODER_STATUS_SUCCESS;
- break;
- case GST_H264_NAL_FILLER_DATA:
- /* skip all Filler Data NALs */
- status = GST_VAAPI_DECODER_STATUS_SUCCESS;
- break;
- default:
- GST_WARNING("unsupported NAL unit type %d", nalu.type);
- status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
- break;
- }
- gst_buffer_unref(buffer);
- } while (status == GST_VAAPI_DECODER_STATUS_SUCCESS);
- if (is_eos && (status == GST_VAAPI_DECODER_STATUS_SUCCESS ||
- status == GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA))
+ priv->decoder_state |= pi->state;
+ switch (pi->nalu.type) {
+ case GST_H264_NAL_SPS:
+ status = decode_sps(decoder, unit);
+ break;
+ case GST_H264_NAL_SUBSET_SPS:
+ status = decode_subset_sps(decoder, unit);
+ break;
+ case GST_H264_NAL_PPS:
+ status = decode_pps(decoder, unit);
+ break;
+ case GST_H264_NAL_SLICE_EXT:
+ case GST_H264_NAL_SLICE_IDR:
+ /* fall-through. IDR specifics are handled in init_picture() */
+ case GST_H264_NAL_SLICE:
+ status = decode_slice(decoder, unit);
+ break;
+ case GST_H264_NAL_SEQ_END:
+ case GST_H264_NAL_STREAM_END:
status = decode_sequence_end(decoder);
+ break;
+ case GST_H264_NAL_SEI:
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ default:
+ GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
+ status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
+ break;
+ }
return status;
}
static GstVaapiDecoderStatus
-decode_codec_data(GstVaapiDecoderH264 *decoder, GstBuffer *buffer)
+gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
+ const guchar *buf, guint buf_size)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderStatus status;
- GstH264NalUnit nalu;
+ GstVaapiDecoderUnit unit;
+ GstVaapiParserInfoH264 *pi = NULL;
GstH264ParserResult result;
- guchar *buf;
- guint buf_size;
guint i, ofs, num_sps, num_pps;
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
- if (!buf || buf_size == 0)
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ unit.parsed_info = NULL;
if (buf_size < 8)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
ofs = 6;
for (i = 0; i < num_sps; i++) {
+ pi = gst_vaapi_parser_info_h264_new();
+ if (!pi)
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ unit.parsed_info = pi;
+
result = gst_h264_parser_identify_nalu_avc(
priv->parser,
buf, ofs, buf_size, 2,
- &nalu
+ &pi->nalu
);
- if (result != GST_H264_PARSER_OK)
- return get_status(result);
+ if (result != GST_H264_PARSER_OK) {
+ status = get_status(result);
+ goto cleanup;
+ }
- status = decode_sps(decoder, &nalu);
+ status = parse_sps(decoder, &unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
- ofs = nalu.offset + nalu.size;
+ goto cleanup;
+ ofs = pi->nalu.offset + pi->nalu.size;
+
+ status = decode_sps(decoder, &unit);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ goto cleanup;
+ gst_vaapi_parser_info_h264_replace(&pi, NULL);
}
num_pps = buf[ofs];
ofs++;
for (i = 0; i < num_pps; i++) {
+ pi = gst_vaapi_parser_info_h264_new();
+ if (!pi)
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ unit.parsed_info = pi;
+
result = gst_h264_parser_identify_nalu_avc(
priv->parser,
buf, ofs, buf_size, 2,
- &nalu
+ &pi->nalu
);
- if (result != GST_H264_PARSER_OK)
- return get_status(result);
+ if (result != GST_H264_PARSER_OK) {
+ status = get_status(result);
+ goto cleanup;
+ }
- status = decode_pps(decoder, &nalu);
+ status = parse_pps(decoder, &unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
- ofs = nalu.offset + nalu.size;
+ goto cleanup;
+ ofs = pi->nalu.offset + pi->nalu.size;
+
+ status = decode_pps(decoder, &unit);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ goto cleanup;
+ gst_vaapi_parser_info_h264_replace(&pi, NULL);
}
- priv->is_avc = TRUE;
+ priv->is_avcC = TRUE;
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+cleanup:
+ gst_vaapi_parser_info_h264_replace(&pi, NULL);
return status;
}
-GstVaapiDecoderStatus
-gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base, GstBuffer *buffer)
+static GstVaapiDecoderStatus
+ensure_decoder(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(base);
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderStatus status;
- GstBuffer *codec_data;
-
- g_return_val_if_fail(priv->is_constructed,
- GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
if (!priv->is_opened) {
- priv->is_opened = gst_vaapi_decoder_h264_open(decoder, buffer);
+ priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
if (!priv->is_opened)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
- codec_data = GST_VAAPI_DECODER_CODEC_DATA(decoder);
- if (codec_data) {
- status = decode_codec_data(decoder, codec_data);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
+ status = gst_vaapi_decoder_decode_codec_data(
+ GST_VAAPI_DECODER_CAST(decoder));
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+ }
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
+ GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
+ GstVaapiParserInfoH264 *pi;
+ GstVaapiDecoderStatus status;
+ GstH264ParserResult result;
+ guchar *buf;
+ guint i, size, buf_size, nalu_size, flags;
+ guint32 start_code;
+ gint ofs, ofs2;
+
+ status = ensure_decoder(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ switch (priv->stream_alignment) {
+ case GST_VAAPI_STREAM_ALIGN_H264_NALU:
+ size = gst_adapter_available_fast(adapter);
+ break;
+ default:
+ size = gst_adapter_available(adapter);
+ break;
+ }
+
+ if (priv->is_avcC) {
+ if (size < priv->nal_length_size)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ buf = (guchar *)&start_code;
+ g_assert(priv->nal_length_size <= sizeof(start_code));
+ gst_adapter_copy(adapter, buf, 0, priv->nal_length_size);
+
+ nalu_size = 0;
+ for (i = 0; i < priv->nal_length_size; i++)
+ nalu_size = (nalu_size << 8) | buf[i];
+
+ buf_size = priv->nal_length_size + nalu_size;
+ if (size < buf_size)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ }
+ else {
+ if (size < 4)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
+ buf_size = size;
+ else {
+ ofs = scan_for_start_code(adapter, 0, size, NULL);
+ if (ofs < 0)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ if (ofs > 0) {
+ gst_adapter_flush(adapter, ofs);
+ size -= ofs;
+ }
+
+ ofs2 = ps->input_offset2 - ofs - 4;
+ if (ofs2 < 4)
+ ofs2 = 4;
+
+ ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
+ scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
+ if (ofs < 0) {
+ // Assume the whole NAL unit is present if end-of-stream
+ if (!at_eos) {
+ ps->input_offset2 = size;
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ }
+ ofs = size;
+ }
+ buf_size = ofs;
+ }
+ }
+ ps->input_offset2 = 0;
+
+ buf = (guchar *)gst_adapter_map(adapter, buf_size);
+ if (!buf)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+
+ unit->size = buf_size;
+
+ pi = gst_vaapi_parser_info_h264_new();
+ if (!pi)
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+
+ gst_vaapi_decoder_unit_set_parsed_info(unit,
+ pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
+
+ if (priv->is_avcC)
+ result = gst_h264_parser_identify_nalu_avc(priv->parser,
+ buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
+ else
+ result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
+ buf, 0, buf_size, &pi->nalu);
+ status = get_status(result);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ switch (pi->nalu.type) {
+ case GST_H264_NAL_SPS:
+ status = parse_sps(decoder, unit);
+ break;
+ case GST_H264_NAL_SUBSET_SPS:
+ status = parse_subset_sps(decoder, unit);
+ break;
+ case GST_H264_NAL_PPS:
+ status = parse_pps(decoder, unit);
+ break;
+ case GST_H264_NAL_SEI:
+ status = parse_sei(decoder, unit);
+ break;
+ case GST_H264_NAL_SLICE_EXT:
+ if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ /* fall-through */
+ case GST_H264_NAL_SLICE_IDR:
+ case GST_H264_NAL_SLICE:
+ status = parse_slice(decoder, unit);
+ break;
+ default:
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+
+ flags = 0;
+ switch (pi->nalu.type) {
+ case GST_H264_NAL_AU_DELIMITER:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ /* fall-through */
+ case GST_H264_NAL_FILLER_DATA:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ case GST_H264_NAL_STREAM_END:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_STREAM_END;
+ /* fall-through */
+ case GST_H264_NAL_SEQ_END:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+ break;
+ case GST_H264_NAL_SPS:
+ case GST_H264_NAL_SUBSET_SPS:
+ case GST_H264_NAL_PPS:
+ case GST_H264_NAL_SEI:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ break;
+ case GST_H264_NAL_SLICE_EXT:
+ if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ }
+ /* fall-through */
+ case GST_H264_NAL_SLICE_IDR:
+ case GST_H264_NAL_SLICE:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
+ if (is_new_picture(pi, priv->prev_slice_pi)) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ if (is_new_access_unit(pi, priv->prev_slice_pi))
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
}
- }
- return decode_buffer(decoder, buffer);
+ gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
+ break;
+ case GST_H264_NAL_SPS_EXT:
+ case GST_H264_NAL_SLICE_AUX:
+ /* skip SPS extension and auxiliary slice for now */
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ case GST_H264_NAL_PREFIX_UNIT:
+ /* skip Prefix NAL units for now */
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ break;
+ default:
+ if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ break;
+ }
+ if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
+ priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+ GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
+
+ pi->nalu.data = NULL;
+ pi->state = priv->parser_state;
+ pi->flags = flags;
+ gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
-static void
-gst_vaapi_decoder_h264_finalize(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
+ GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(object);
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderStatus status;
+
+ status = ensure_decoder(decoder);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
+ return decode_unit(decoder, unit);
+}
- gst_vaapi_decoder_h264_destroy(decoder);
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
+ GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
- G_OBJECT_CLASS(gst_vaapi_decoder_h264_parent_class)->finalize(object);
+ return decode_picture(decoder, unit);
}
-static void
-gst_vaapi_decoder_h264_constructed(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(object);
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GObjectClass *parent_class;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+
+ return decode_current_picture(decoder);
+}
- parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_h264_parent_class);
- if (parent_class->constructed)
- parent_class->constructed(object);
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
+{
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
- priv->is_constructed = gst_vaapi_decoder_h264_create(decoder);
+ dpb_flush(decoder, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static void
gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
{
- GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiMiniObjectClass * const object_class =
+ GST_VAAPI_MINI_OBJECT_CLASS(klass);
GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
- g_type_class_add_private(klass, sizeof(GstVaapiDecoderH264Private));
-
- object_class->finalize = gst_vaapi_decoder_h264_finalize;
- object_class->constructed = gst_vaapi_decoder_h264_constructed;
+ object_class->size = sizeof(GstVaapiDecoderH264);
+ object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
+ decoder_class->create = gst_vaapi_decoder_h264_create;
+ decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
+ decoder_class->parse = gst_vaapi_decoder_h264_parse;
decoder_class->decode = gst_vaapi_decoder_h264_decode;
+ decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
+ decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
+ decoder_class->flush = gst_vaapi_decoder_h264_flush;
+
+ decoder_class->decode_codec_data =
+ gst_vaapi_decoder_h264_decode_codec_data;
}
-static void
-gst_vaapi_decoder_h264_init(GstVaapiDecoderH264 *decoder)
-{
- GstVaapiDecoderH264Private *priv;
-
- priv = GST_VAAPI_DECODER_H264_GET_PRIVATE(decoder);
- decoder->priv = priv;
- priv->parser = NULL;
- priv->sps = &priv->last_sps;
- priv->pps = &priv->last_pps;
- priv->current_picture = NULL;
- priv->dpb_count = 0;
- priv->dpb_size = 0;
- priv->profile = GST_VAAPI_PROFILE_H264_HIGH;
- priv->short_ref_count = 0;
- priv->long_ref_count = 0;
- priv->RefPicList0_count = 0;
- priv->RefPicList1_count = 0;
- priv->nal_length_size = 0;
- priv->width = 0;
- priv->height = 0;
- priv->mb_x = 0;
- priv->mb_y = 0;
- priv->mb_width = 0;
- priv->mb_height = 0;
- priv->adapter = NULL;
- priv->field_poc[0] = 0;
- priv->field_poc[1] = 0;
- priv->poc_msb = 0;
- priv->poc_lsb = 0;
- priv->prev_poc_msb = 0;
- priv->prev_poc_lsb = 0;
- priv->frame_num_offset = 0;
- priv->prev_frame_num_offset = 0;
- priv->frame_num = 0;
- priv->prev_frame_num = 0;
- priv->is_constructed = FALSE;
- priv->is_opened = FALSE;
- priv->is_avc = FALSE;
- priv->has_context = FALSE;
-
- memset(priv->dpb, 0, sizeof(priv->dpb));
- memset(priv->short_ref, 0, sizeof(priv->short_ref));
- memset(priv->long_ref, 0, sizeof(priv->long_ref));
- memset(priv->RefPicList0, 0, sizeof(priv->RefPicList0));
- memset(priv->RefPicList1, 0, sizeof(priv->RefPicList1));
+static inline const GstVaapiDecoderClass *
+gst_vaapi_decoder_h264_class(void)
+{
+ static GstVaapiDecoderH264Class g_class;
+ static gsize g_class_init = FALSE;
+
+ if (g_once_init_enter(&g_class_init)) {
+ gst_vaapi_decoder_h264_class_init(&g_class);
+ g_once_init_leave(&g_class_init, TRUE);
+ }
+ return GST_VAAPI_DECODER_CLASS(&g_class);
+}
+
+/**
+ * gst_vaapi_decoder_h264_set_alignment:
+ * @decoder: a #GstVaapiDecoderH264
+ * @alignment: the #GstVaapiStreamAlignH264
+ *
+ * Specifies how stream buffers are aligned / fed, i.e. the boundaries
+ * of each buffer that is supplied to the decoder. This could be no
+ * specific alignment, NAL unit boundaries, or access unit boundaries.
+ */
+void
+gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
+ GstVaapiStreamAlignH264 alignment)
+{
+ g_return_if_fail(decoder != NULL);
+
+ decoder->priv.stream_alignment = alignment;
}
/**
GstVaapiDecoder *
gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
{
- GstVaapiDecoderH264 *decoder;
-
- g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
- g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
-
- decoder = g_object_new(
- GST_VAAPI_TYPE_DECODER_H264,
- "display", display,
- "caps", caps,
- NULL
- );
- if (!decoder->priv->is_constructed) {
- g_object_unref(decoder);
- return NULL;
- }
- return GST_VAAPI_DECODER_CAST(decoder);
+ return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
}