/*
* gstvaapidecoder_h264.c - H.264 decoder
*
- * Copyright (C) 2011-2012 Intel Corporation
+ * Copyright (C) 2011-2014 Intel Corporation
+ * Author: Gwenole Beauchesne <gwenole.beauchesne@intel.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public License
#include "gstvaapidecoder_priv.h"
#include "gstvaapidisplay_priv.h"
#include "gstvaapiobject_priv.h"
+#include "gstvaapiutils_h264_priv.h"
#define DEBUG 1
#include "gstvaapidebug.h"
/* Defined to 1 if strict ordering of DPB is needed. Only useful for debug */
#define USE_STRICT_DPB_ORDERING 0
+typedef struct _GstVaapiDecoderH264Private GstVaapiDecoderH264Private;
+typedef struct _GstVaapiDecoderH264Class GstVaapiDecoderH264Class;
typedef struct _GstVaapiFrameStore GstVaapiFrameStore;
typedef struct _GstVaapiFrameStoreClass GstVaapiFrameStoreClass;
-typedef struct _GstVaapiDecoderUnitH264 GstVaapiDecoderUnitH264;
+typedef struct _GstVaapiParserInfoH264 GstVaapiParserInfoH264;
typedef struct _GstVaapiPictureH264 GstVaapiPictureH264;
-typedef struct _GstVaapiSliceH264 GstVaapiSliceH264;
// Used for field_poc[]
#define TOP_FIELD 0
#define BOTTOM_FIELD 1
/* ------------------------------------------------------------------------- */
-/* --- H.264 Decoder Units --- */
+/* --- H.264 Parser Info --- */
/* ------------------------------------------------------------------------- */
-struct _GstVaapiDecoderUnitH264 {
- GstVaapiDecoderUnit base;
+/*
+ * Extended decoder unit flags:
+ *
+ * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
+ * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
+ */
+enum {
+ /* This flag does not strictly follow the definitions (7.4.1.2.3)
+ for detecting the start of an access unit as we are only
+ interested in knowing if the current slice is the first one or
+ the last one in the current access unit */
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
+ GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
+ GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
+
+ GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
+};
+
+#define GST_VAAPI_PARSER_INFO_H264(obj) \
+ ((GstVaapiParserInfoH264 *)(obj))
+
+struct _GstVaapiParserInfoH264 {
+ GstVaapiMiniObject parent_instance;
GstH264NalUnit nalu;
union {
GstH264SPS sps;
GstH264PPS pps;
+ GArray *sei;
GstH264SliceHdr slice_hdr;
} data;
+ guint state;
+ guint flags; // Same as decoder unit flags (persistent)
+ guint view_id; // View ID of slice
+ guint voc; // View order index (VOIdx) of slice
};
-static GstVaapiDecoderUnitH264 *
-gst_vaapi_decoder_unit_h264_new(guint size)
+static void
+gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
{
- GstVaapiDecoderUnitH264 *unit;
+ switch (pi->nalu.type) {
+ case GST_H264_NAL_SPS:
+ case GST_H264_NAL_SUBSET_SPS:
+ gst_h264_sps_clear(&pi->data.sps);
+ break;
+ case GST_H264_NAL_PPS:
+ gst_h264_pps_clear(&pi->data.pps);
+ break;
+ case GST_H264_NAL_SEI:
+ if (pi->data.sei) {
+ g_array_unref(pi->data.sei);
+ pi->data.sei = NULL;
+ }
+ break;
+ }
+}
- static const GstVaapiMiniObjectClass GstVaapiDecoderUnitH264Class = {
- sizeof(GstVaapiDecoderUnitH264),
- (GDestroyNotify)gst_vaapi_decoder_unit_finalize
+static inline const GstVaapiMiniObjectClass *
+gst_vaapi_parser_info_h264_class(void)
+{
+ static const GstVaapiMiniObjectClass GstVaapiParserInfoH264Class = {
+ .size = sizeof(GstVaapiParserInfoH264),
+ .finalize = (GDestroyNotify)gst_vaapi_parser_info_h264_finalize
};
+ return &GstVaapiParserInfoH264Class;
+}
- unit = (GstVaapiDecoderUnitH264 *)
- gst_vaapi_mini_object_new0(&GstVaapiDecoderUnitH264Class);
- if (!unit)
- return NULL;
-
- unit->base.size = size;
- return unit;
+static inline GstVaapiParserInfoH264 *
+gst_vaapi_parser_info_h264_new(void)
+{
+ return (GstVaapiParserInfoH264 *)
+ gst_vaapi_mini_object_new(gst_vaapi_parser_info_h264_class());
}
-/* ------------------------------------------------------------------------- */
-/* --- H.264 Pictures --- */
-/* ------------------------------------------------------------------------- */
+#define gst_vaapi_parser_info_h264_ref(pi) \
+ gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(pi))
-#define GST_VAAPI_PICTURE_H264_CAST(obj) \
- ((GstVaapiPictureH264 *)(obj))
+#define gst_vaapi_parser_info_h264_unref(pi) \
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(pi))
-#define GST_VAAPI_PICTURE_H264(obj) \
- GST_VAAPI_PICTURE_H264_CAST(obj)
+#define gst_vaapi_parser_info_h264_replace(old_pi_ptr, new_pi) \
+ gst_vaapi_mini_object_replace((GstVaapiMiniObject **)(old_pi_ptr), \
+ (GstVaapiMiniObject *)(new_pi))
-#define GST_VAAPI_IS_PICTURE_H264(obj) \
- (GST_VAAPI_PICTURE_H264(obj) != NULL)
+/* ------------------------------------------------------------------------- */
+/* --- H.264 Pictures --- */
+/* ------------------------------------------------------------------------- */
/*
* Extended picture flags:
*
* @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
+ * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
+ * may be used for inter-view prediction
+ * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
+ * i.e. a picture that is decoded with only inter-view prediction,
+ * and not inter prediction
+ * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
+ * access unit (AU)
+ * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
+ * access unit (AU)
* @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
* "used for short-term reference"
* @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
* reference picture (short-term reference or long-term reference)
*/
enum {
- GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
+ GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
+ GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
+ GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
+ GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
+ GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
GST_VAAPI_PICTURE_FLAG_REFERENCE),
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
- GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
+ GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
+#define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
+
+#define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
+
+#define GST_VAAPI_PICTURE_H264(picture) \
+ ((GstVaapiPictureH264 *)(picture))
+
struct _GstVaapiPictureH264 {
GstVaapiPicture base;
- GstH264PPS *pps;
+ GstH264SliceHdr *last_slice_hdr;
guint structure;
gint32 field_poc[2];
gint32 frame_num; // Original frame_num from slice_header()
static inline GstVaapiPictureH264 *
gst_vaapi_picture_h264_new(GstVaapiDecoderH264 *decoder)
{
- GstVaapiCodecObject *object;
-
- g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), NULL);
-
- object = gst_vaapi_codec_object_new(
+ return (GstVaapiPictureH264 *)gst_vaapi_codec_object_new(
&GstVaapiPictureH264Class,
GST_VAAPI_CODEC_BASE(decoder),
NULL, sizeof(VAPictureParameterBufferH264),
NULL, 0,
- 0
- );
- if (!object)
- return NULL;
- return GST_VAAPI_PICTURE_H264_CAST(object);
+ 0);
}
static inline void
gboolean other_field
)
{
- g_return_if_fail(GST_VAAPI_IS_PICTURE_H264(picture));
-
+ if (!picture)
+ return;
GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAGS_REFERENCE);
GST_VAAPI_PICTURE_FLAG_SET(picture, reference_flags);
static inline GstVaapiPictureH264 *
gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
{
- GstVaapiPicture *base_picture;
-
- g_return_val_if_fail(GST_VAAPI_IS_PICTURE_H264(picture), NULL);
-
- base_picture = gst_vaapi_picture_new_field(&picture->base);
- if (!base_picture)
- return NULL;
- return GST_VAAPI_PICTURE_H264_CAST(base_picture);
-}
-
-static inline GstVaapiSliceH264 *
-gst_vaapi_picture_h264_get_last_slice(GstVaapiPictureH264 *picture)
-{
- g_return_val_if_fail(GST_VAAPI_IS_PICTURE_H264(picture), NULL);
-
- if (G_UNLIKELY(picture->base.slices->len < 1))
- return NULL;
- return g_ptr_array_index(picture->base.slices,
- picture->base.slices->len - 1);
-}
-
-/* ------------------------------------------------------------------------- */
-/* --- Slices --- */
-/* ------------------------------------------------------------------------- */
-
-#define GST_VAAPI_SLICE_H264_CAST(obj) \
- ((GstVaapiSliceH264 *)(obj))
-
-#define GST_VAAPI_SLICE_H264(obj) \
- GST_VAAPI_SLICE_H264(obj)
-
-#define GST_VAAPI_IS_SLICE_H264(obj) \
- (GST_VAAPI_SLICE_H264(obj) != NULL)
-
-struct _GstVaapiSliceH264 {
- GstVaapiSlice base;
- GstH264SliceHdr slice_hdr; // parsed slice_header()
-};
-
-GST_VAAPI_CODEC_DEFINE_TYPE(GstVaapiSliceH264, gst_vaapi_slice_h264);
-
-void
-gst_vaapi_slice_h264_destroy(GstVaapiSliceH264 *slice)
-{
- gst_vaapi_slice_destroy(GST_VAAPI_SLICE(slice));
-}
+ g_return_val_if_fail(picture, NULL);
-gboolean
-gst_vaapi_slice_h264_create(
- GstVaapiSliceH264 *slice,
- const GstVaapiCodecObjectConstructorArgs *args
-)
-{
- if (!gst_vaapi_slice_create(GST_VAAPI_SLICE(slice), args))
- return FALSE;
- return TRUE;
-}
-
-static inline GstVaapiSliceH264 *
-gst_vaapi_slice_h264_new(
- GstVaapiDecoderH264 *decoder,
- const guint8 *data,
- guint data_size
-)
-{
- GstVaapiCodecObject *object;
-
- g_return_val_if_fail(GST_VAAPI_IS_DECODER(decoder), NULL);
-
- object = gst_vaapi_codec_object_new(
- &GstVaapiSliceH264Class,
- GST_VAAPI_CODEC_BASE(decoder),
- NULL, sizeof(VASliceParameterBufferH264),
- data, data_size,
- 0
- );
- if (!object)
- return NULL;
- return GST_VAAPI_SLICE_H264_CAST(object);
+ return (GstVaapiPictureH264 *)gst_vaapi_picture_new_field(&picture->base);
}
/* ------------------------------------------------------------------------- */
/* --- Frame Buffers (DPB) --- */
/* ------------------------------------------------------------------------- */
-#define GST_VAAPI_FRAME_STORE_CAST(obj) \
- ((GstVaapiFrameStore *)(obj))
-
-#define GST_VAAPI_FRAME_STORE(obj) \
- GST_VAAPI_FRAME_STORE_CAST(obj)
-
-#define GST_VAAPI_IS_FRAME_STORE(obj) \
- (GST_VAAPI_MINI_OBJECT(obj) != NULL)
-
struct _GstVaapiFrameStore {
/*< private >*/
GstVaapiMiniObject parent_instance;
+ guint view_id;
guint structure;
GstVaapiPictureH264 *buffers[2];
guint num_buffers;
gst_vaapi_frame_store_finalize
};
- g_return_val_if_fail(GST_VAAPI_IS_PICTURE_H264(picture), NULL);
-
fs = (GstVaapiFrameStore *)
gst_vaapi_mini_object_new(&GstVaapiFrameStoreClass);
if (!fs)
return NULL;
+ fs->view_id = picture->base.view_id;
fs->structure = picture->structure;
fs->buffers[0] = gst_vaapi_picture_ref(picture);
fs->buffers[1] = NULL;
{
guint field;
- g_return_val_if_fail(GST_VAAPI_IS_FRAME_STORE(fs), FALSE);
g_return_val_if_fail(fs->num_buffers == 1, FALSE);
- g_return_val_if_fail(GST_VAAPI_IS_PICTURE_H264(picture), FALSE);
g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
+ g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
gst_vaapi_picture_replace(&fs->buffers[fs->num_buffers++], picture);
if (picture->output_flag) {
return FALSE;
}
+static gboolean
+gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
+{
+ guint i;
+
+ for (i = 0; i < fs->num_buffers; i++) {
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
+ return TRUE;
+ }
+ return FALSE;
+}
+
#define gst_vaapi_frame_store_ref(fs) \
gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
/* --- H.264 Decoder --- */
/* ------------------------------------------------------------------------- */
-G_DEFINE_TYPE(GstVaapiDecoderH264,
- gst_vaapi_decoder_h264,
- GST_VAAPI_TYPE_DECODER)
+#define GST_VAAPI_DECODER_H264_CAST(decoder) \
+ ((GstVaapiDecoderH264 *)(decoder))
+
+typedef enum {
+ GST_H264_VIDEO_STATE_GOT_SPS = 1 << 0,
+ GST_H264_VIDEO_STATE_GOT_PPS = 1 << 1,
+ GST_H264_VIDEO_STATE_GOT_SLICE = 1 << 2,
-#define GST_VAAPI_DECODER_H264_GET_PRIVATE(obj) \
- (G_TYPE_INSTANCE_GET_PRIVATE((obj), \
- GST_VAAPI_TYPE_DECODER_H264, \
- GstVaapiDecoderH264Private))
+ GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS = (
+ GST_H264_VIDEO_STATE_GOT_SPS |
+ GST_H264_VIDEO_STATE_GOT_PPS),
+ GST_H264_VIDEO_STATE_VALID_PICTURE = (
+ GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS |
+ GST_H264_VIDEO_STATE_GOT_SLICE)
+} GstH264VideoState;
struct _GstVaapiDecoderH264Private {
GstH264NalParser *parser;
+ guint parser_state;
+ guint decoder_state;
+ GstVaapiStreamAlignH264 stream_alignment;
GstVaapiPictureH264 *current_picture;
- GstVaapiFrameStore *prev_frame;
- GstVaapiFrameStore *dpb[16];
+ GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
+ GstVaapiParserInfoH264 *active_sps;
+ GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
+ GstVaapiParserInfoH264 *active_pps;
+ GstVaapiParserInfoH264 *prev_pi;
+ GstVaapiParserInfoH264 *prev_slice_pi;
+ GstVaapiFrameStore **prev_frames;
+ guint prev_frames_alloc;
+ GstVaapiFrameStore **dpb;
guint dpb_count;
guint dpb_size;
+ guint dpb_size_max;
+ guint max_views;
GstVaapiProfile profile;
GstVaapiEntrypoint entrypoint;
GstVaapiChromaType chroma_type;
+ GPtrArray *inter_views;
GstVaapiPictureH264 *short_ref[32];
guint short_ref_count;
GstVaapiPictureH264 *long_ref[32];
gint32 prev_frame_num; // prevFrameNum
gboolean prev_pic_has_mmco5; // prevMmco5Pic
gboolean prev_pic_structure; // previous picture structure
- guint is_constructed : 1;
guint is_opened : 1;
- guint is_avc : 1;
- guint got_sps : 1;
- guint got_pps : 1;
+ guint is_avcC : 1;
guint has_context : 1;
guint progressive_sequence : 1;
};
+/**
+ * GstVaapiDecoderH264:
+ *
+ * A decoder based on H264.
+ */
+struct _GstVaapiDecoderH264 {
+ /*< private >*/
+ GstVaapiDecoder parent_instance;
+ GstVaapiDecoderH264Private priv;
+};
+
+/**
+ * GstVaapiDecoderH264Class:
+ *
+ * A decoder class based on H264.
+ */
+struct _GstVaapiDecoderH264Class {
+ /*< private >*/
+ GstVaapiDecoderClass parent_class;
+};
+
static gboolean
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture);
+
+static inline gboolean
+is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
+ GstVaapiFrameStore *fs)
+{
+ return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
+}
+
+/* Determines if the supplied profile is one of the MVC set */
+static gboolean
+is_mvc_profile(GstH264Profile profile)
+{
+ return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
+ profile == GST_H264_PROFILE_STEREO_HIGH;
+}
+
+/* Determines the view_id from the supplied NAL unit */
+static inline guint
+get_view_id(GstH264NalUnit *nalu)
+{
+ return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
+}
+
+/* Determines the view order index (VOIdx) from the supplied view_id */
+static gint
+get_view_order_index(GstH264SPS *sps, guint16 view_id)
+{
+ GstH264SPSExtMVC *mvc;
+ gint i;
+
+ if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return 0;
+
+ mvc = &sps->extension.mvc;
+ for (i = 0; i <= mvc->num_views_minus1; i++) {
+ if (mvc->view[i].view_id == view_id)
+ return i;
+ }
+ GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
+ return -1;
+}
+
+/* Determines NumViews */
+static guint
+get_num_views(GstH264SPS *sps)
+{
+ return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
+ sps->extension.mvc.num_views_minus1 : 0);
+}
+
/* Get number of reference frames to use */
static guint
get_max_dec_frame_buffering(GstH264SPS *sps)
{
- guint max_dec_frame_buffering, MaxDpbMbs, PicSizeMbs;
+ guint num_views, max_dpb_frames;
+ guint max_dec_frame_buffering, PicSizeMbs;
+ GstVaapiLevelH264 level;
+ const GstVaapiH264LevelLimits *level_limits;
/* Table A-1 - Level limits */
- switch (sps->level_idc) {
- case 10: MaxDpbMbs = 396; break;
- case 11: MaxDpbMbs = 900; break;
- case 12: MaxDpbMbs = 2376; break;
- case 13: MaxDpbMbs = 2376; break;
- case 20: MaxDpbMbs = 2376; break;
- case 21: MaxDpbMbs = 4752; break;
- case 22: MaxDpbMbs = 8100; break;
- case 30: MaxDpbMbs = 8100; break;
- case 31: MaxDpbMbs = 18000; break;
- case 32: MaxDpbMbs = 20480; break;
- case 40: MaxDpbMbs = 32768; break;
- case 41: MaxDpbMbs = 32768; break;
- case 42: MaxDpbMbs = 34816; break;
- case 50: MaxDpbMbs = 110400; break;
- case 51: MaxDpbMbs = 184320; break;
- default:
- g_assert(0 && "unhandled level");
- break;
+ if (G_UNLIKELY(sps->level_idc == 11 && sps->constraint_set3_flag))
+ level = GST_VAAPI_LEVEL_H264_L1b;
+ else
+ level = gst_vaapi_utils_h264_get_level(sps->level_idc);
+ level_limits = gst_vaapi_utils_h264_get_level_limits(level);
+ if (G_UNLIKELY(!level_limits)) {
+ GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
+ max_dec_frame_buffering = 16;
}
-
- PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
- (sps->pic_height_in_map_units_minus1 + 1) *
- (sps->frame_mbs_only_flag ? 1 : 2));
- max_dec_frame_buffering = MaxDpbMbs / PicSizeMbs;
+ else {
+ PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
+ (sps->pic_height_in_map_units_minus1 + 1) *
+ (sps->frame_mbs_only_flag ? 1 : 2));
+ max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
+ }
+ if (is_mvc_profile(sps->profile_idc))
+ max_dec_frame_buffering <<= 1;
/* VUI parameters */
if (sps->vui_parameters_present_flag) {
else {
switch (sps->profile_idc) {
case 44: // CAVLC 4:4:4 Intra profile
- case 86: // Scalable High profile
- case 100: // High profile
- case 110: // High 10 profile
- case 122: // High 4:2:2 profile
- case 244: // High 4:4:4 Predictive profile
+ case GST_H264_PROFILE_SCALABLE_HIGH:
+ case GST_H264_PROFILE_HIGH:
+ case GST_H264_PROFILE_HIGH10:
+ case GST_H264_PROFILE_HIGH_422:
+ case GST_H264_PROFILE_HIGH_444:
if (sps->constraint_set3_flag)
max_dec_frame_buffering = 0;
break;
}
}
- if (max_dec_frame_buffering > 16)
- max_dec_frame_buffering = 16;
+ num_views = get_num_views(sps);
+ max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
+ if (max_dec_frame_buffering > max_dpb_frames)
+ max_dec_frame_buffering = max_dpb_frames;
else if (max_dec_frame_buffering < sps->num_ref_frames)
max_dec_frame_buffering = sps->num_ref_frames;
return MAX(1, max_dec_frame_buffering);
static void
dpb_remove_index(GstVaapiDecoderH264 *decoder, guint index)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i, num_frames = --priv->dpb_count;
if (USE_STRICT_DPB_ORDERING) {
{
picture->output_needed = FALSE;
- if (fs) {
- if (--fs->output_needed > 0)
- return TRUE;
- picture = fs->buffers[0];
- }
+ if (--fs->output_needed > 0)
+ return TRUE;
- /* XXX: update cropping rectangle */
+ if (!GST_VAAPI_PICTURE_IS_COMPLETE(picture))
+ return TRUE;
return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
}
static inline void
dpb_evict(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture, guint i)
{
- GstVaapiFrameStore * const fs = decoder->priv->dpb[i];
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiFrameStore * const fs = priv->dpb[i];
if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
dpb_remove_index(decoder, i);
}
-static gboolean
-dpb_bump(GstVaapiDecoderH264 *decoder)
+/* Finds the frame store holding the supplied picture */
+static gint
+dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ gint i, j;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ for (j = 0; j < fs->num_buffers; j++) {
+ if (fs->buffers[j] == picture)
+ return i;
+ }
+ }
+ return -1;
+}
+
+/* Finds the picture with the lowest POC that needs to be output */
+static gint
+dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 **found_picture_ptr)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 *found_picture = NULL;
guint i, j, found_index;
- gboolean success;
for (i = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
if (!fs->output_needed)
continue;
+ if (picture && picture->base.view_id != fs->view_id)
+ continue;
for (j = 0; j < fs->num_buffers; j++) {
- GstVaapiPictureH264 * const picture = fs->buffers[j];
- if (!picture->output_needed)
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (!pic->output_needed)
continue;
- if (!found_picture || found_picture->base.poc > picture->base.poc)
- found_picture = picture, found_index = i;
+ if (!found_picture || found_picture->base.poc > pic->base.poc ||
+ (found_picture->base.poc == pic->base.poc &&
+ found_picture->base.voc > pic->base.voc))
+ found_picture = pic, found_index = i;
}
}
- if (!found_picture)
+
+ if (found_picture_ptr)
+ *found_picture_ptr = found_picture;
+ return found_picture ? found_index : -1;
+}
+
+/* Finds the picture with the lowest VOC that needs to be output */
+static gint
+dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 **found_picture_ptr)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture = NULL;
+ guint i, j, found_index;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (!fs->output_needed || fs->view_id == picture->base.view_id)
+ continue;
+ for (j = 0; j < fs->num_buffers; j++) {
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (!pic->output_needed || pic->base.poc != picture->base.poc)
+ continue;
+ if (!found_picture || found_picture->base.voc > pic->base.voc)
+ found_picture = pic, found_index = i;
+ }
+ }
+
+ if (found_picture_ptr)
+ *found_picture_ptr = found_picture;
+ return found_picture ? found_index : -1;
+}
+
+static gboolean
+dpb_output_other_views(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, guint voc)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
+ gboolean success;
+
+ if (priv->max_views == 1)
+ return TRUE;
+
+ /* Emit all other view components that were in the same access
+ unit than the picture we have just found */
+ found_picture = picture;
+ for (;;) {
+ found_index = dpb_find_lowest_voc(decoder, found_picture,
+ &found_picture);
+ if (found_index < 0 || found_picture->base.voc >= voc)
+ break;
+ success = dpb_output(decoder, priv->dpb[found_index], found_picture);
+ dpb_evict(decoder, found_picture, found_index);
+ if (!success)
+ return FALSE;
+ }
+ return TRUE;
+}
+
+static gboolean
+dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
+ gboolean success;
+
+ found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
+ if (found_index < 0)
return FALSE;
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
+
success = dpb_output(decoder, priv->dpb[found_index], found_picture);
dpb_evict(decoder, found_picture, found_index);
+ if (priv->max_views == 1)
+ return success;
+
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
return success;
}
static void
-dpb_clear(GstVaapiDecoderH264 *decoder)
+dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- guint i;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i, n;
- for (i = 0; i < priv->dpb_count; i++)
+ for (i = 0; i < priv->dpb_count; i++) {
+ if (picture && picture->base.view_id != priv->dpb[i]->view_id)
+ continue;
gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
- priv->dpb_count = 0;
+ }
- gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
+ /* Compact the resulting DPB, i.e. remove holes */
+ for (i = 0, n = 0; i < priv->dpb_count; i++) {
+ if (priv->dpb[i]) {
+ if (i != n) {
+ priv->dpb[n] = priv->dpb[i];
+ priv->dpb[i] = NULL;
+ }
+ n++;
+ }
+ }
+ priv->dpb_count = n;
+
+ /* Clear previous frame buffers only if this is a "flush-all" operation,
+ or if the picture is the first one in the access unit */
+ if (priv->prev_frames && (!picture ||
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_AU_START))) {
+ for (i = 0; i < priv->max_views; i++)
+ gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
+ }
}
static void
-dpb_flush(GstVaapiDecoderH264 *decoder)
+dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- while (dpb_bump(decoder))
+ while (dpb_bump(decoder, picture))
;
- dpb_clear(decoder);
+ dpb_clear(decoder, picture);
+}
+
+static void
+dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const gboolean is_last_picture = /* in the access unit */
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+ guint i;
+
+ // Remove all unused inter-view only reference components of the current AU
+ i = 0;
+ while (i < priv->dpb_count) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (fs->view_id != picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
+ (is_last_picture ||
+ !is_inter_view_reference_for_next_frames(decoder, fs)))
+ dpb_remove_index(decoder, i);
+ else
+ i++;
+ }
}
static gboolean
dpb_add(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiFrameStore *fs;
- guint i, j;
+ guint i;
+
+ if (priv->max_views > 1)
+ dpb_prune_mvc(decoder, picture);
// Remove all unused pictures
if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
i = 0;
while (i < priv->dpb_count) {
GstVaapiFrameStore * const fs = priv->dpb[i];
- if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
+ if (fs->view_id == picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
dpb_remove_index(decoder, i);
else
i++;
}
// Check if picture is the second field and the first field is still in DPB
- fs = priv->prev_frame;
- if (fs && !gst_vaapi_frame_store_has_frame(fs)) {
- g_return_val_if_fail(fs->num_buffers == 1, FALSE);
- g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FRAME(picture), FALSE);
- g_return_val_if_fail(!GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture), FALSE);
- return gst_vaapi_frame_store_add(fs, picture);
+ if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
+ !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
+ const gint found_index = dpb_find_picture(decoder,
+ GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
+ if (found_index >= 0)
+ return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
+
+ // ... also check the previous picture that was immediately output
+ fs = priv->prev_frames[picture->base.voc];
+ if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
+ if (!gst_vaapi_frame_store_add(fs, picture))
+ return FALSE;
+ return dpb_output(decoder, fs, picture);
+ }
}
// Create new frame store, and split fields if necessary
fs = gst_vaapi_frame_store_new(picture);
if (!fs)
return FALSE;
- gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
+ gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
gst_vaapi_frame_store_unref(fs);
+ if (picture->output_flag) {
+ picture->output_needed = TRUE;
+ fs->output_needed++;
+ }
+
if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
if (!gst_vaapi_frame_store_split_fields(fs))
return FALSE;
// C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
while (priv->dpb_count == priv->dpb_size) {
- if (!dpb_bump(decoder))
+ if (!dpb_bump(decoder, picture))
return FALSE;
}
- gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
- if (picture->output_flag) {
- picture->output_needed = TRUE;
- fs->output_needed++;
- }
}
// C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
else {
- if (!picture->output_flag)
+ const gboolean StoreInterViewOnlyRefFlag =
+ !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_AU_END) &&
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
return TRUE;
while (priv->dpb_count == priv->dpb_size) {
- gboolean found_picture = FALSE;
- for (i = 0; !found_picture && i < priv->dpb_count; i++) {
- GstVaapiFrameStore * const fs = priv->dpb[i];
- if (!fs->output_needed)
- continue;
- for (j = 0; !found_picture && j < fs->num_buffers; j++)
- found_picture = fs->buffers[j]->output_needed &&
- fs->buffers[j]->base.poc < picture->base.poc;
+ GstVaapiPictureH264 *found_picture;
+ if (!StoreInterViewOnlyRefFlag) {
+ if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
+ found_picture->base.poc > picture->base.poc)
+ return dpb_output(decoder, fs, picture);
}
- if (!found_picture)
- return dpb_output(decoder, NULL, picture);
- if (!dpb_bump(decoder))
+ if (!dpb_bump(decoder, picture))
return FALSE;
}
- gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
- picture->output_needed = TRUE;
- fs->output_needed++;
}
+ gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
return TRUE;
}
-static inline void
-dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+static gboolean
+dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+
+ if (dpb_size > priv->dpb_size_max) {
+ priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
+ if (!priv->dpb)
+ return FALSE;
+ memset(&priv->dpb[priv->dpb_size_max], 0,
+ (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
+ priv->dpb_size_max = dpb_size;
+ }
+ priv->dpb_size = dpb_size;
- priv->dpb_size = get_max_dec_frame_buffering(sps);
GST_DEBUG("DPB size %u", priv->dpb_size);
+ return TRUE;
+}
+
+static void
+unref_inter_view(GstVaapiPictureH264 *picture)
+{
+ if (!picture)
+ return;
+ GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ gst_vaapi_picture_unref(picture);
+}
+
+/* Resets MVC resources */
+static gboolean
+mvc_reset(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i;
+
+ // Resize array of inter-view references
+ if (!priv->inter_views) {
+ priv->inter_views = g_ptr_array_new_full(priv->max_views,
+ (GDestroyNotify)unref_inter_view);
+ if (!priv->inter_views)
+ return FALSE;
+ }
+
+ // Resize array of previous frame buffers
+ for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
+ gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
+
+ priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
+ sizeof(*priv->prev_frames));
+ if (!priv->prev_frames) {
+ priv->prev_frames_alloc = 0;
+ return FALSE;
+ }
+ for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
+ priv->prev_frames[i] = NULL;
+ priv->prev_frames_alloc = priv->max_views;
+ return TRUE;
}
static GstVaapiDecoderStatus
static void
gst_vaapi_decoder_h264_close(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gst_vaapi_picture_replace(&priv->current_picture, NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
- dpb_clear(decoder);
+ dpb_clear(decoder, NULL);
+
+ if (priv->inter_views) {
+ g_ptr_array_unref(priv->inter_views);
+ priv->inter_views = NULL;
+ }
if (priv->parser) {
gst_h264_nal_parser_free(priv->parser);
static gboolean
gst_vaapi_decoder_h264_open(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gst_vaapi_decoder_h264_close(decoder);
}
static void
-gst_vaapi_decoder_h264_destroy(GstVaapiDecoderH264 *decoder)
+gst_vaapi_decoder_h264_destroy(GstVaapiDecoder *base_decoder)
{
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i;
+
gst_vaapi_decoder_h264_close(decoder);
+
+ g_free(priv->dpb);
+ priv->dpb = NULL;
+ priv->dpb_size = 0;
+
+ g_free(priv->prev_frames);
+ priv->prev_frames = NULL;
+ priv->prev_frames_alloc = 0;
+
+ for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
+ gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
+
+ for (i = 0; i < G_N_ELEMENTS(priv->sps); i++)
+ gst_vaapi_parser_info_h264_replace(&priv->sps[i], NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->active_sps, NULL);
}
static gboolean
-gst_vaapi_decoder_h264_create(GstVaapiDecoderH264 *decoder)
+gst_vaapi_decoder_h264_create(GstVaapiDecoder *base_decoder)
{
- if (!GST_VAAPI_DECODER_CODEC(decoder))
- return FALSE;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+
+ priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
+ priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
+ priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
+ priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ priv->progressive_sequence = TRUE;
return TRUE;
}
-static guint
-h264_get_profile(GstH264SPS *sps)
+/* Activates the supplied PPS */
+static GstH264PPS *
+ensure_pps(GstVaapiDecoderH264 *decoder, GstH264PPS *pps)
{
- guint profile = 0;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = priv->pps[pps->id];
- switch (sps->profile_idc) {
- case 66:
- profile = GST_VAAPI_PROFILE_H264_BASELINE;
- break;
- case 77:
- profile = GST_VAAPI_PROFILE_H264_MAIN;
+ gst_vaapi_parser_info_h264_replace(&priv->active_pps, pi);
+ return pi ? &pi->data.pps : NULL;
+}
+
+/* Returns the active PPS */
+static inline GstH264PPS *
+get_pps(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiParserInfoH264 * const pi = decoder->priv.active_pps;
+
+ return pi ? &pi->data.pps : NULL;
+}
+
+/* Activate the supplied SPS */
+static GstH264SPS *
+ensure_sps(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = priv->sps[sps->id];
+
+ gst_vaapi_parser_info_h264_replace(&priv->active_sps, pi);
+ return pi ? &pi->data.sps : NULL;
+}
+
+/* Returns the active SPS */
+static inline GstH264SPS *
+get_sps(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiParserInfoH264 * const pi = decoder->priv.active_sps;
+
+ return pi ? &pi->data.sps : NULL;
+}
+
+static void
+fill_profiles(GstVaapiProfile profiles[16], guint *n_profiles_ptr,
+ GstVaapiProfile profile)
+{
+ guint n_profiles = *n_profiles_ptr;
+
+ profiles[n_profiles++] = profile;
+ switch (profile) {
+ case GST_VAAPI_PROFILE_H264_MAIN:
+ profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
break;
- case 100:
- profile = GST_VAAPI_PROFILE_H264_HIGH;
+ default:
break;
}
- return profile;
+ *n_profiles_ptr = n_profiles;
}
-static guint
-h264_get_chroma_type(GstH264SPS *sps)
+/* Fills in compatible profiles for MVC decoding */
+static void
+fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
+ guint *n_profiles_ptr, guint dpb_size)
{
- guint chroma_type = 0;
+ const gchar * const vendor_string =
+ gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
- switch (sps->chroma_format_idc) {
- case 1:
- chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
- break;
- case 2:
- chroma_type = GST_VAAPI_CHROMA_TYPE_YUV422;
- break;
- case 3:
- if (!sps->separate_colour_plane_flag)
- chroma_type = GST_VAAPI_CHROMA_TYPE_YUV444;
- break;
+ gboolean add_high_profile = FALSE;
+ struct map {
+ const gchar *str;
+ guint str_len;
+ };
+ const struct map *m;
+
+ // Drivers that support slice level decoding
+ if (vendor_string && dpb_size <= 16) {
+ static const struct map drv_names[] = {
+ { "Intel i965 driver", 17 },
+ { NULL, 0 }
+ };
+ for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
+ if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
+ add_high_profile = TRUE;
+ }
}
- return chroma_type;
+
+ if (add_high_profile)
+ fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
}
static GstVaapiProfile
-get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
- GstVaapiProfile profile, profiles[2];
+ GstVaapiProfile profile, profiles[4];
guint i, n_profiles = 0;
- profile = h264_get_profile(sps);
+ profile = gst_vaapi_utils_h264_get_profile(sps->profile_idc);
if (!profile)
return GST_VAAPI_PROFILE_UNKNOWN;
- profiles[n_profiles++] = profile;
+ fill_profiles(profiles, &n_profiles, profile);
switch (profile) {
- case GST_VAAPI_PROFILE_H264_MAIN:
- profiles[n_profiles++] = GST_VAAPI_PROFILE_H264_HIGH;
+ case GST_VAAPI_PROFILE_H264_BASELINE:
+ if (sps->constraint_set1_flag) { // A.2.2 (main profile)
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_CONSTRAINED_BASELINE);
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MAIN);
+ }
+ break;
+ case GST_VAAPI_PROFILE_H264_EXTENDED:
+ if (sps->constraint_set1_flag) { // A.2.2 (main profile)
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MAIN);
+ }
+ break;
+ case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
+ if (priv->max_views == 2) {
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_STEREO_HIGH);
+ }
+ fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
+ break;
+ case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
+ if (sps->frame_mbs_only_flag) {
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
+ }
+ fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
break;
default:
break;
ensure_context(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
{
GstVaapiDecoder * const base_decoder = GST_VAAPI_DECODER_CAST(decoder);
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiContextInfo info;
GstVaapiProfile profile;
GstVaapiChromaType chroma_type;
gboolean reset_context = FALSE;
- guint mb_width, mb_height;
+ guint mb_width, mb_height, dpb_size, num_views;
+
+ num_views = get_num_views(sps);
+ if (priv->max_views < num_views) {
+ priv->max_views = num_views;
+ GST_DEBUG("maximum number of views changed to %u", num_views);
+ }
+
+ dpb_size = get_max_dec_frame_buffering(sps);
+ if (priv->dpb_size < dpb_size) {
+ GST_DEBUG("DPB size increased");
+ reset_context = TRUE;
+ }
- profile = get_profile(decoder, sps);
+ profile = get_profile(decoder, sps, dpb_size);
if (!profile) {
GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
}
- if (priv->profile != profile) {
+ if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
GST_DEBUG("profile changed");
reset_context = TRUE;
priv->profile = profile;
}
- chroma_type = h264_get_chroma_type(sps);
- if (!chroma_type || chroma_type != GST_VAAPI_CHROMA_TYPE_YUV420) {
+ chroma_type = gst_vaapi_utils_h264_get_chroma_type(sps->chroma_format_idc);
+ if (!chroma_type) {
GST_ERROR("unsupported chroma_format_idc %u", sps->chroma_format_idc);
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
}
}
priv->progressive_sequence = sps->frame_mbs_only_flag;
-#if 0
- /* XXX: we only output complete frames for now */
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
-#endif
gst_vaapi_decoder_set_pixel_aspect_ratio(
base_decoder,
/* XXX: fix surface size when cropping is implemented */
info.profile = priv->profile;
info.entrypoint = priv->entrypoint;
+ info.chroma_type = priv->chroma_type;
info.width = sps->width;
info.height = sps->height;
- info.ref_frames = get_max_dec_frame_buffering(sps);
+ info.ref_frames = dpb_size;
if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
priv->has_context = TRUE;
/* Reset DPB */
- dpb_reset(decoder, sps);
+ if (!dpb_reset(decoder, dpb_size))
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+
+ /* Reset MVC data */
+ if (!mvc_reset(decoder))
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static void
-fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
+fill_iq_matrix_4x4(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
+ const GstH264SPS *sps)
{
- const guint8 (* const ScalingList4x4)[6][16] = &pps->scaling_lists_4x4;
- guint i, j;
+ guint i;
/* There are always 6 4x4 scaling lists */
g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4) == 6);
g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
- if (sizeof(iq_matrix->ScalingList4x4[0][0]) == 1)
- memcpy(iq_matrix->ScalingList4x4, *ScalingList4x4,
- sizeof(iq_matrix->ScalingList4x4));
- else {
- for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++) {
- for (j = 0; j < G_N_ELEMENTS(iq_matrix->ScalingList4x4[i]); j++)
- iq_matrix->ScalingList4x4[i][j] = (*ScalingList4x4)[i][j];
- }
- }
+ for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
+ gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
+ iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
}
static void
-fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps)
+fill_iq_matrix_8x8(VAIQMatrixBufferH264 *iq_matrix, const GstH264PPS *pps,
+ const GstH264SPS *sps)
{
- const guint8 (* const ScalingList8x8)[6][64] = &pps->scaling_lists_8x8;
- const GstH264SPS * const sps = pps->sequence;
- guint i, j, n;
+ guint i, n;
/* If chroma_format_idc != 3, there are up to 2 8x8 scaling lists */
if (!pps->transform_8x8_mode_flag)
g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8) >= 2);
g_assert(G_N_ELEMENTS(iq_matrix->ScalingList8x8[0]) == 64);
- if (sizeof(iq_matrix->ScalingList8x8[0][0]) == 1)
- memcpy(iq_matrix->ScalingList8x8, *ScalingList8x8,
- sizeof(iq_matrix->ScalingList8x8));
- else {
- n = (sps->chroma_format_idc != 3) ? 2 : 6;
- for (i = 0; i < n; i++) {
- for (j = 0; j < G_N_ELEMENTS(iq_matrix->ScalingList8x8[i]); j++)
- iq_matrix->ScalingList8x8[i][j] = (*ScalingList8x8)[i][j];
- }
+ n = (sps->chroma_format_idc != 3) ? 2 : 6;
+ for (i = 0; i < n; i++) {
+ gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
+ iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
}
}
ensure_quant_matrix(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
GstVaapiPicture * const base_picture = &picture->base;
- GstH264PPS * const pps = picture->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstH264PPS * const pps = get_pps(decoder);
+ GstH264SPS * const sps = get_sps(decoder);
VAIQMatrixBufferH264 *iq_matrix;
base_picture->iq_matrix = GST_VAAPI_IQ_MATRIX_NEW(H264, decoder);
if (sps->chroma_format_idc == 3)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CHROMA_FORMAT;
- fill_iq_matrix_4x4(iq_matrix, pps);
- fill_iq_matrix_8x8(iq_matrix, pps);
+ fill_iq_matrix_4x4(iq_matrix, pps, sps);
+ fill_iq_matrix_8x8(iq_matrix, pps, sps);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
+static inline gboolean
+is_valid_state(guint state, guint ref_state)
+{
+ return (state & ref_state) == ref_state;
+}
+
static GstVaapiDecoderStatus
decode_current_picture(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 * const picture = priv->current_picture;
- GstVaapiDecoderStatus status;
+
+ if (!is_valid_state(priv->decoder_state, GST_H264_VIDEO_STATE_VALID_PICTURE))
+ goto drop_frame;
+ priv->decoder_state = 0;
if (!picture)
return GST_VAAPI_DECODER_STATUS_SUCCESS;
- status = ensure_context(decoder, picture->pps->sequence);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
-
+ if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
+ goto error;
if (!exec_ref_pic_marking(decoder, picture))
goto error;
if (!dpb_add(decoder, picture))
goto error;
- if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
- goto error;
- if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
- gst_vaapi_picture_replace(&priv->current_picture, NULL);
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
error:
/* XXX: fix for cases where first field failed to be decoded */
gst_vaapi_picture_replace(&priv->current_picture, NULL);
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+drop_frame:
+ priv->decoder_state = 0;
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
}
static GstVaapiDecoderStatus
-parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnitH264 *unit)
+parse_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SPS * const sps = &unit->data.sps;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
GstH264ParserResult result;
GST_DEBUG("parse SPS");
- result = gst_h264_parser_parse_sps(priv->parser, &unit->nalu, sps, TRUE);
+ priv->parser_state = 0;
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+
+ result = gst_h264_parser_parse_sps(priv->parser, &pi->nalu, sps, TRUE);
if (result != GST_H264_PARSER_OK)
return get_status(result);
- priv->got_sps = TRUE;
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnitH264 *unit)
+parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = &unit->data.pps;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+ GstH264ParserResult result;
+
+ GST_DEBUG("parse subset SPS");
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+
+ result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
+ TRUE);
+ if (result != GST_H264_PARSER_OK)
+ return get_status(result);
+
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264PPS * const pps = &pi->data.pps;
GstH264ParserResult result;
GST_DEBUG("parse PPS");
- result = gst_h264_parser_parse_pps(priv->parser, &unit->nalu, pps);
+ priv->parser_state &= GST_H264_VIDEO_STATE_GOT_SPS;
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ pps->slice_group_map_type = 0;
+ pps->slice_group_change_rate_minus1 = 0;
+
+ result = gst_h264_parser_parse_pps(priv->parser, &pi->nalu, pps);
if (result != GST_H264_PARSER_OK)
return get_status(result);
- priv->got_pps = TRUE;
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_PPS;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnitH264 *unit)
+parse_sei(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SEIMessage sei;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GArray ** const sei_ptr = &pi->data.sei;
GstH264ParserResult result;
GST_DEBUG("parse SEI");
- memset(&sei, 0, sizeof(sei));
- result = gst_h264_parser_parse_sei(priv->parser, &unit->nalu, &sei);
+ result = gst_h264_parser_parse_sei(priv->parser, &pi->nalu, sei_ptr);
if (result != GST_H264_PARSER_OK) {
- GST_WARNING("failed to parse SEI, payload type:%d", sei.payloadType);
+ GST_WARNING("failed to parse SEI messages");
return get_status(result);
}
-
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
-parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnitH264 *unit)
+parse_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SliceHdr * const slice_hdr = &unit->data.slice_hdr;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstH264NalUnit * const nalu = &pi->nalu;
+ GstH264SPS *sps;
GstH264ParserResult result;
GST_DEBUG("parse slice");
- result = gst_h264_parser_parse_slice_hdr(priv->parser, &unit->nalu,
+ priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
+ GST_H264_VIDEO_STATE_GOT_PPS);
+
+ /* Propagate Prefix NAL unit info, if necessary */
+ switch (nalu->type) {
+ case GST_H264_NAL_SLICE:
+ case GST_H264_NAL_SLICE_IDR: {
+ GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
+ if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
+ /* MVC sequences shall have a Prefix NAL unit immediately
+ preceding this NAL unit */
+ pi->nalu.extension_type = prev_pi->nalu.extension_type;
+ pi->nalu.extension = prev_pi->nalu.extension;
+ }
+ else {
+ /* In the very unlikely case there is no Prefix NAL unit
+ immediately preceding this NAL unit, try to infer some
+ defaults (H.7.4.1.1) */
+ GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
+ mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
+ nalu->idr_pic_flag = !mvc->non_idr_flag;
+ mvc->priority_id = 0;
+ mvc->view_id = 0;
+ mvc->temporal_id = 0;
+ mvc->anchor_pic_flag = 0;
+ mvc->inter_view_flag = 1;
+ }
+ break;
+ }
+ }
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ slice_hdr->cabac_init_idc = 0;
+ slice_hdr->direct_spatial_mv_pred_flag = 0;
+
+ result = gst_h264_parser_parse_slice_hdr(priv->parser, &pi->nalu,
slice_hdr, TRUE, TRUE);
if (result != GST_H264_PARSER_OK)
return get_status(result);
+ sps = slice_hdr->pps->sequence;
+
+ /* Update MVC data */
+ pi->view_id = get_view_id(&pi->nalu);
+ pi->voc = get_view_order_index(sps, pi->view_id);
+
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+
+ GST_DEBUG("decode SPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+
+ GST_DEBUG("decode subset SPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
+decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264PPS * const pps = &pi->data.pps;
+
+ GST_DEBUG("decode PPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->pps[pps->id], pi);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderH264 *decoder)
{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderStatus status;
GST_DEBUG("decode sequence-end");
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- dpb_flush(decoder);
- return GST_VAAPI_DECODER_STATUS_END_OF_STREAM;
+ dpb_flush(decoder, NULL);
+
+ /* Reset defaults, should there be a new sequence available next */
+ priv->max_views = 1;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
/* 8.2.1.1 - Decoding process for picture order count type 0 */
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxPicOrderCntLsb = 1 << (sps->log2_max_pic_order_cnt_lsb_minus4 + 4);
gint32 temp_poc;
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
gint32 prev_frame_num_offset, abs_frame_num, expected_poc;
guint i;
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
gint32 prev_frame_num_offset, temp_poc;
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
switch (sps->pic_order_cnt_type) {
case 0:
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
guint i;
for (i = 0; i < priv->short_ref_count; i++) {
GstVaapiPictureH264 * const pic = priv->short_ref[i];
+ // (H.8.2)
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+
// (8-27)
if (pic->frame_num > priv->frame_num)
pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
for (i = 0; i < priv->long_ref_count; i++) {
GstVaapiPictureH264 * const pic = priv->long_ref[i];
+ // (H.8.2)
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+
// (8-29, 8-32, 8-33)
if (GST_VAAPI_PICTURE_IS_FRAME(picture))
pic->long_term_pic_num = pic->long_term_frame_idx;
*RefPicList_count = n;
}
+/* Finds the inter-view reference picture with the supplied view id */
+static GstVaapiPictureH264 *
+find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
+{
+ GPtrArray * const inter_views = decoder->priv.inter_views;
+ guint i;
+
+ for (i = 0; i < inter_views->len; i++) {
+ GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
+ if (picture->base.view_id == view_id)
+ return picture;
+ }
+
+ GST_WARNING("failed to find inter-view reference picture for view_id: %d",
+ view_id);
+ return NULL;
+}
+
+/* Checks whether the view id exists in the supplied list of view ids */
+static gboolean
+find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
+{
+ guint i;
+
+ for (i = 0; i < num_view_ids; i++) {
+ if (view_ids[i] == view_id)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
+ gboolean is_anchor)
+{
+ if (is_anchor)
+ return (find_view_id(view_id, view->anchor_ref_l0,
+ view->num_anchor_refs_l0) ||
+ find_view_id(view_id, view->anchor_ref_l1,
+ view->num_anchor_refs_l1));
+
+ return (find_view_id(view_id, view->non_anchor_ref_l0,
+ view->num_non_anchor_refs_l0) ||
+ find_view_id(view_id, view->non_anchor_ref_l1,
+ view->num_non_anchor_refs_l1));
+}
+
+/* Checks whether the inter-view reference picture with the supplied
+ view id is used for decoding the current view component picture */
+static gboolean
+is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
+ guint16 view_id, GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ return find_view_id_in_view(view_id,
+ &sps->extension.mvc.view[picture->base.voc], is_anchor);
+}
+
+/* Checks whether the supplied inter-view reference picture is used
+ for decoding the next view component pictures */
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+ guint i, num_views;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ num_views = sps->extension.mvc.num_views_minus1 + 1;
+ for (i = picture->base.voc + 1; i < num_views; i++) {
+ const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
+ if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/* H.8.2.1 - Initialization process for inter-view prediction references */
+static void
+init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
+ const guint16 *view_ids, guint num_view_ids)
+{
+ guint j, n;
+
+ n = *ref_list_count_ptr;
+ for (j = 0; j < num_view_ids && n < num_refs; j++) {
+ GstVaapiPictureH264 * const pic =
+ find_inter_view_reference(decoder, view_ids[j]);
+ if (pic)
+ ref_list[n++] = pic;
+ }
+ *ref_list_count_ptr = n;
+}
+
+static inline void
+init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const GstH264SPS * const sps = get_sps(decoder);
+ const GstH264SPSExtMVCView *view;
+
+ GST_DEBUG("initialize reference picture list for inter-view prediction");
+
+ if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return;
+ view = &sps->extension.mvc.view[picture->base.voc];
+
+#define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
+ init_picture_refs_mvc_1(decoder, \
+ priv->RefPicList##ref_list, \
+ &priv->RefPicList##ref_list##_count, \
+ slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
+ view->view_list##_l##ref_list, \
+ view->num_##view_list##s_l##ref_list); \
+ } while (0)
+
+ if (list == 0) {
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
+ INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
+ else
+ INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
+ }
+ else {
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
+ INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
+ else
+ INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
+ }
+
+#undef INVOKE_INIT_PICTURE_REFS_MVC
+}
+
static void
init_picture_refs_p_slice(
GstVaapiDecoderH264 *decoder,
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 **ref_list;
guint i;
long_ref, long_ref_count
);
}
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
+ /* RefPicList0 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
+ }
}
static void
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 **ref_list;
guint i, n;
priv->RefPicList1[0] = priv->RefPicList1[1];
priv->RefPicList1[1] = tmp;
}
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
+ /* RefPicList0 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
+
+ /* RefPicList1 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
+ }
}
#undef SORT_REF_LIST
static gint
find_short_term_reference(GstVaapiDecoderH264 *decoder, gint32 pic_num)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i;
for (i = 0; i < priv->short_ref_count; i++) {
static gint
find_long_term_reference(GstVaapiDecoderH264 *decoder, gint32 long_term_pic_num)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i;
for (i = 0; i < priv->long_ref_count; i++) {
guint list
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
GstH264RefPicListModification *ref_pic_list_modification;
guint num_ref_pic_list_modifications;
GstVaapiPictureH264 **ref_list;
guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
- guint i, j, n, num_refs;
+ const guint16 *view_ids = NULL;
+ guint i, j, n, num_refs, num_view_ids = 0;
gint found_ref_idx;
- gint32 MaxPicNum, CurrPicNum, picNumPred;
+ gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
GST_DEBUG("modification process of reference picture list %u", list);
ref_list = priv->RefPicList0;
ref_list_count_ptr = &priv->RefPicList0_count;
num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+ const GstH264SPSExtMVCView * const view =
+ &sps->extension.mvc.view[picture->base.voc];
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
+ view_ids = view->anchor_ref_l0;
+ num_view_ids = view->num_anchor_refs_l0;
+ }
+ else {
+ view_ids = view->non_anchor_ref_l0;
+ num_view_ids = view->num_non_anchor_refs_l0;
+ }
+ }
}
else {
ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
ref_list = priv->RefPicList1;
ref_list_count_ptr = &priv->RefPicList1_count;
num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+ const GstH264SPSExtMVCView * const view =
+ &sps->extension.mvc.view[picture->base.voc];
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
+ view_ids = view->anchor_ref_l1;
+ num_view_ids = view->num_anchor_refs_l1;
+ }
+ else {
+ view_ids = view->non_anchor_ref_l1;
+ num_view_ids = view->num_non_anchor_refs_l1;
+ }
+ }
}
ref_list_count = *ref_list_count_ptr;
}
picNumPred = CurrPicNum;
+ picViewIdxPred = -1;
for (i = 0; i < num_ref_pic_list_modifications; i++) {
GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
PicNumF =
GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
ref_list[j]->pic_num : MaxPicNum;
- if (PicNumF != picNum)
+ if (PicNumF != picNum ||
+ ref_list[j]->base.view_id != picture->base.view_id)
ref_list[n++] = ref_list[j];
}
}
/* 8.2.4.3.2 - Long-term reference pictures */
- else {
+ else if (l->modification_of_pic_nums_idc == 2) {
for (j = num_refs; j > ref_list_idx; j--)
ref_list[j] = ref_list[j - 1];
LongTermPicNumF =
GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
ref_list[j]->long_term_pic_num : INT_MAX;
- if (LongTermPicNumF != l->value.long_term_pic_num)
+ if (LongTermPicNumF != l->value.long_term_pic_num ||
+ ref_list[j]->base.view_id != picture->base.view_id)
+ ref_list[n++] = ref_list[j];
+ }
+ }
+
+ /* H.8.2.2.3 - Inter-view prediction reference pictures */
+ else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
+ (l->modification_of_pic_nums_idc == 4 ||
+ l->modification_of_pic_nums_idc == 5)) {
+ gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
+ gint32 picViewIdx, targetViewId;
+
+ // (H-6)
+ if (l->modification_of_pic_nums_idc == 4) {
+ picViewIdx = picViewIdxPred - abs_diff_view_idx;
+ if (picViewIdx < 0)
+ picViewIdx += num_view_ids;
+ }
+
+ // (H-7)
+ else {
+ picViewIdx = picViewIdxPred + abs_diff_view_idx;
+ if (picViewIdx >= num_view_ids)
+ picViewIdx -= num_view_ids;
+ }
+ picViewIdxPred = picViewIdx;
+
+ // (H-8, H-9)
+ targetViewId = view_ids[picViewIdx];
+
+ // (H-10)
+ for (j = num_refs; j > ref_list_idx; j--)
+ ref_list[j] = ref_list[j - 1];
+ ref_list[ref_list_idx++] =
+ find_inter_view_reference(decoder, targetViewId);
+ n = ref_list_idx;
+ for (j = ref_list_idx; j <= num_refs; j++) {
+ if (!ref_list[j])
+ continue;
+ if (ref_list[j]->base.view_id != targetViewId ||
+ ref_list[j]->base.poc != picture->base.poc)
ref_list[n++] = ref_list[j];
}
}
}
static void
-init_picture_ref_lists(GstVaapiDecoderH264 *decoder)
+init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i, j, short_ref_count, long_ref_count;
short_ref_count = 0;
long_ref_count = 0;
- if (GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture)) {
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
for (i = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
- GstVaapiPictureH264 *picture;
+ GstVaapiPictureH264 *pic;
if (!gst_vaapi_frame_store_has_frame(fs))
continue;
- picture = fs->buffers[0];
- if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
- priv->short_ref[short_ref_count++] = picture;
- else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
- priv->long_ref[long_ref_count++] = picture;
- picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
- picture->other_field = fs->buffers[1];
+ pic = fs->buffers[0];
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
+ priv->short_ref[short_ref_count++] = pic;
+ else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
+ priv->long_ref[long_ref_count++] = pic;
+ pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ pic->other_field = fs->buffers[1];
}
}
else {
for (i = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
for (j = 0; j < fs->num_buffers; j++) {
- GstVaapiPictureH264 * const picture = fs->buffers[j];
- if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
- priv->short_ref[short_ref_count++] = picture;
- else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
- priv->long_ref[long_ref_count++] = picture;
- picture->structure = picture->base.structure;
- picture->other_field = fs->buffers[j ^ 1];
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
+ priv->short_ref[short_ref_count++] = pic;
+ else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
+ priv->long_ref[long_ref_count++] = pic;
+ pic->structure = pic->base.structure;
+ pic->other_field = fs->buffers[j ^ 1];
}
}
}
GstH264SliceHdr *slice_hdr
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiPicture * const base_picture = &picture->base;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i, num_refs;
- init_picture_ref_lists(decoder);
+ init_picture_ref_lists(decoder, picture);
init_picture_refs_pic_num(decoder, picture, slice_hdr);
priv->RefPicList0_count = 0;
priv->RefPicList1_count = 0;
- switch (base_picture->type) {
- case GST_VAAPI_PICTURE_TYPE_P:
- case GST_VAAPI_PICTURE_TYPE_SP:
+ switch (slice_hdr->type % 5) {
+ case GST_H264_P_SLICE:
+ case GST_H264_SP_SLICE:
init_picture_refs_p_slice(decoder, picture, slice_hdr);
break;
- case GST_VAAPI_PICTURE_TYPE_B:
+ case GST_H264_B_SLICE:
init_picture_refs_b_slice(decoder, picture, slice_hdr);
break;
default:
exec_picture_refs_modification(decoder, picture, slice_hdr);
- switch (base_picture->type) {
- case GST_VAAPI_PICTURE_TYPE_B:
+ switch (slice_hdr->type % 5) {
+ case GST_H264_B_SLICE:
num_refs = 1 + slice_hdr->num_ref_idx_l1_active_minus1;
for (i = priv->RefPicList1_count; i < num_refs; i++)
priv->RefPicList1[i] = NULL;
priv->RefPicList1_count = num_refs;
// fall-through
- case GST_VAAPI_PICTURE_TYPE_P:
- case GST_VAAPI_PICTURE_TYPE_SP:
+ case GST_H264_P_SLICE:
+ case GST_H264_SP_SLICE:
num_refs = 1 + slice_hdr->num_ref_idx_l0_active_minus1;
for (i = priv->RefPicList0_count; i < num_refs; i++)
priv->RefPicList0[i] = NULL;
static gboolean
init_picture(
GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 *picture,
- GstH264SliceHdr *slice_hdr,
- GstH264NalUnit *nalu
-)
+ GstVaapiPictureH264 *picture, GstVaapiParserInfoH264 *pi)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPicture * const base_picture = &picture->base;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
priv->prev_frame_num = priv->frame_num;
priv->frame_num = slice_hdr->frame_num;
picture->frame_num_wrap = priv->frame_num;
picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
+ base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
+ base_picture->view_id = pi->view_id;
+ base_picture->voc = pi->voc;
+
+ /* Initialize extensions */
+ switch (pi->nalu.extension_type) {
+ case GST_H264_NAL_EXTENSION_MVC: {
+ GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
+
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
+ if (mvc->inter_view_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ if (mvc->anchor_pic_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_ANCHOR);
+ break;
+ }
+ }
/* Reset decoder state for IDR pictures */
- if (nalu->type == GST_H264_NAL_SLICE_IDR) {
+ if (pi->nalu.idr_pic_flag) {
GST_DEBUG("<IDR>");
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
- dpb_flush(decoder);
- }
-
- /* Initialize slice type */
- switch (slice_hdr->type % 5) {
- case GST_H264_P_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_P;
- break;
- case GST_H264_B_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_B;
- break;
- case GST_H264_I_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_I;
- break;
- case GST_H264_SP_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_SP;
- break;
- case GST_H264_SI_SLICE:
- base_picture->type = GST_VAAPI_PICTURE_TYPE_SI;
- break;
+ dpb_flush(decoder, picture);
}
/* Initialize picture structure */
picture->structure = base_picture->structure;
/* Initialize reference flags */
- if (nalu->ref_idc) {
+ if (pi->nalu.ref_idc) {
GstH264DecRefPicMarking * const dec_ref_pic_marking =
&slice_hdr->dec_ref_pic_marking;
}
init_picture_poc(decoder, picture, slice_hdr);
- init_picture_refs(decoder, picture, slice_hdr);
return TRUE;
}
static gboolean
exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264PPS * const pps = priv->current_picture->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SPS * const sps = get_sps(decoder);
GstVaapiPictureH264 *ref_picture;
guint i, m, max_num_ref_frames;
GstH264RefPicMarking *ref_pic_marking
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gint32 i, picNumX;
picNumX = get_picNumX(picture, ref_pic_marking);
GstH264RefPicMarking *ref_pic_marking
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gint32 i;
i = find_long_term_reference(decoder, ref_pic_marking->long_term_pic_num);
GstH264RefPicMarking *ref_pic_marking
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiPictureH264 *ref_picture;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *ref_picture, *other_field;
gint32 i, picNumX;
for (i = 0; i < priv->long_ref_count; i++) {
ref_picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
gst_vaapi_picture_h264_set_reference(ref_picture,
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
- GST_VAAPI_PICTURE_IS_FRAME(picture));
+ GST_VAAPI_PICTURE_IS_COMPLETE(picture));
+
+ /* Assign LongTermFrameIdx to the other field if it was also
+ marked as "used for long-term reference */
+ other_field = ref_picture->other_field;
+ if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
+ other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
}
/* 8.2.5.4.4. Mark pictures with LongTermFramIdx > max_long_term_frame_idx
GstH264RefPicMarking *ref_pic_marking
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
gint32 i, long_term_frame_idx;
long_term_frame_idx = ref_pic_marking->max_long_term_frame_idx_plus1 - 1;
GstH264RefPicMarking *ref_pic_marking
)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
- dpb_flush(decoder);
+ dpb_flush(decoder, picture);
priv->prev_pic_has_mmco5 = TRUE;
GstH264RefPicMarking *ref_pic_marking
)
{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *other_field;
+ guint i;
+
+ for (i = 0; i < priv->long_ref_count; i++) {
+ if (priv->long_ref[i]->long_term_frame_idx == ref_pic_marking->long_term_frame_idx)
+ break;
+ }
+ if (i != priv->long_ref_count) {
+ gst_vaapi_picture_h264_set_reference(priv->long_ref[i], 0, TRUE);
+ ARRAY_REMOVE_INDEX(priv->long_ref, i);
+ }
+
picture->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
gst_vaapi_picture_h264_set_reference(picture,
- GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE, FALSE);
+ GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE,
+ GST_VAAPI_PICTURE_IS_COMPLETE(picture));
+
+ /* Assign LongTermFrameIdx to the other field if it was also
+ marked as "used for long-term reference */
+ other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
+ if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
+ other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
}
/* 8.2.5.4. Adaptive memory control decoded reference picture marking process */
static gboolean
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
priv->prev_pic_has_mmco5 = FALSE;
priv->prev_pic_structure = picture->structure;
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
+ g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
+
if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
return TRUE;
if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
- GstVaapiSliceH264 * const slice =
- gst_vaapi_picture_h264_get_last_slice(picture);
GstH264DecRefPicMarking * const dec_ref_pic_marking =
- &slice->slice_hdr.dec_ref_pic_marking;
+ &picture->last_slice_hdr->dec_ref_pic_marking;
if (dec_ref_pic_marking->adaptive_ref_pic_marking_mode_flag) {
if (!exec_ref_pic_marking_adaptive(decoder, picture, dec_ref_pic_marking))
return FALSE;
}
}
+static void
+vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
+ GstVaapiPictureH264 *picture)
+{
+ vaapi_fill_picture(pic, picture, 0);
+
+ /* H.8.4 - MVC inter prediction and inter-view prediction process */
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
+ /* The inter-view reference components and inter-view only
+ reference components that are included in the reference
+ picture lists are considered as not being marked as "used for
+ short-term reference" or "used for long-term reference" */
+ pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
+ VA_PICTURE_H264_LONG_TERM_REFERENCE);
+ }
+}
+
static gboolean
-fill_picture(
- GstVaapiDecoderH264 *decoder,
- GstVaapiPictureH264 *picture,
- GstH264SliceHdr *slice_hdr,
- GstH264NalUnit *nalu
-)
+fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPicture * const base_picture = &picture->base;
- GstH264PPS * const pps = picture->pps;
- GstH264SPS * const sps = pps->sequence;
+ GstH264PPS * const pps = get_pps(decoder);
+ GstH264SPS * const sps = get_sps(decoder);
VAPictureParameterBufferH264 * const pic_param = base_picture->param;
guint i, n;
for (i = 0, n = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
- if (gst_vaapi_frame_store_has_reference(fs))
+ if ((gst_vaapi_frame_store_has_reference(fs) &&
+ fs->view_id == picture->base.view_id) ||
+ (gst_vaapi_frame_store_has_inter_view(fs) &&
+ is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
fs->buffers[0], fs->structure);
+ if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
+ break;
}
for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
vaapi_init_picture(&pic_param->ReferenceFrames[n]);
COPY_BFM(seq_fields, sps, delta_pic_order_always_zero_flag);
pic_param->pic_fields.value = 0; /* reset all bits */
- pic_param->pic_fields.bits.field_pic_flag = slice_hdr->field_pic_flag;
+ pic_param->pic_fields.bits.field_pic_flag = GST_VAAPI_PICTURE_IS_INTERLACED(picture);
pic_param->pic_fields.bits.reference_pic_flag = GST_VAAPI_PICTURE_IS_REFERENCE(picture);
COPY_BFM(pic_fields, pps, entropy_coding_mode_flag);
/* Detection of the first VCL NAL unit of a primary coded picture (7.4.1.2.4) */
static gboolean
-is_new_picture(
- GstVaapiDecoderH264 *decoder,
- GstH264NalUnit *nalu,
- GstH264SliceHdr *slice_hdr
-)
+is_new_picture(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
GstH264PPS * const pps = slice_hdr->pps;
GstH264SPS * const sps = pps->sequence;
- GstVaapiSliceH264 *slice;
GstH264SliceHdr *prev_slice_hdr;
- if (!priv->current_picture)
+ if (!prev_pi)
return TRUE;
-
- slice = gst_vaapi_picture_h264_get_last_slice(priv->current_picture);
- if (!slice)
- return FALSE;
- prev_slice_hdr = &slice->slice_hdr;
+ prev_slice_hdr = &prev_pi->data.slice_hdr;
#define CHECK_EXPR(expr, field_name) do { \
if (!(expr)) { \
#define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
+ /* view_id differs in value and VOIdx of current slice_hdr is less
+ than the VOIdx of the prev_slice_hdr */
+ CHECK_VALUE(pi, prev_pi, view_id);
+
/* frame_num differs in value, regardless of inferred values to 0 */
CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
CHECK_VALUE(slice_hdr, prev_slice_hdr, bottom_field_flag);
/* nal_ref_idc differs in value with one of the nal_ref_idc values is 0 */
- CHECK_EXPR(((GST_VAAPI_PICTURE_IS_REFERENCE(priv->current_picture) ^
- (nalu->ref_idc != 0)) == 0), "nal_ref_idc");
+ CHECK_EXPR((pi->nalu.ref_idc != 0) ==
+ (prev_pi->nalu.ref_idc != 0), "nal_ref_idc");
/* POC type is 0 for both and either pic_order_cnt_lsb differs in
value or delta_pic_order_cnt_bottom differs in value */
}
/* IdrPicFlag differs in value */
- CHECK_EXPR(((GST_VAAPI_PICTURE_IS_IDR(priv->current_picture) ^
- (nalu->type == GST_H264_NAL_SLICE_IDR)) == 0), "IdrPicFlag");
+ CHECK_VALUE(&pi->nalu, &prev_pi->nalu, idr_pic_flag);
/* IdrPicFlag is equal to 1 for both and idr_pic_id differs in value */
- if (GST_VAAPI_PICTURE_IS_IDR(priv->current_picture))
+ if (pi->nalu.idr_pic_flag)
CHECK_VALUE(slice_hdr, prev_slice_hdr, idr_pic_id);
#undef CHECK_EXPR
return FALSE;
}
+/* Detection of a new access unit, assuming we are already in presence
+ of a new picture */
+static inline gboolean
+is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
+{
+ if (!prev_pi || prev_pi->view_id == pi->view_id)
+ return TRUE;
+ return pi->voc < prev_pi->voc;
+}
+
+/* Finds the first field picture corresponding to the supplied picture */
+static GstVaapiPictureH264 *
+find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstVaapiFrameStore *fs;
+
+ if (!slice_hdr->field_pic_flag)
+ return NULL;
+
+ fs = priv->prev_frames[pi->voc];
+ if (!fs || gst_vaapi_frame_store_has_frame(fs))
+ return NULL;
+
+ if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
+ return fs->buffers[0];
+ return NULL;
+}
+
static GstVaapiDecoderStatus
-decode_picture(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu, GstH264SliceHdr *slice_hdr)
+decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstVaapiPictureH264 *picture;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
+ GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
+ GstVaapiPictureH264 *picture, *first_field;
GstVaapiDecoderStatus status;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
- status = decode_current_picture(decoder);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
+ g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+ g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+
+ /* Only decode base stream for MVC */
+ switch (sps->profile_idc) {
+ case GST_H264_PROFILE_MULTIVIEW_HIGH:
+ case GST_H264_PROFILE_STEREO_HIGH:
+ if (0) {
+ GST_DEBUG("drop picture from substream");
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+ }
+ break;
+ }
status = ensure_context(decoder, sps);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- if (priv->current_picture) {
+ priv->decoder_state = 0;
+
+ first_field = find_first_field(decoder, pi);
+ if (first_field) {
/* Re-use current picture where the first field was decoded */
- picture = gst_vaapi_picture_h264_new_field(priv->current_picture);
+ picture = gst_vaapi_picture_h264_new_field(first_field);
if (!picture) {
GST_ERROR("failed to allocate field picture");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
gst_vaapi_picture_replace(&priv->current_picture, picture);
gst_vaapi_picture_unref(picture);
- picture->pps = pps;
+ /* Clear inter-view references list if this is the primary coded
+ picture of the current access unit */
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+ g_ptr_array_set_size(priv->inter_views, 0);
+
+ /* Update cropping rectangle */
+ if (sps->frame_cropping_flag) {
+ GstVaapiRectangle crop_rect;
+ crop_rect.x = sps->crop_rect_x;
+ crop_rect.y = sps->crop_rect_y;
+ crop_rect.width = sps->crop_rect_width;
+ crop_rect.height = sps->crop_rect_height;
+ gst_vaapi_picture_set_crop_rect(&picture->base, &crop_rect);
+ }
status = ensure_quant_matrix(decoder, picture);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
return status;
}
- if (!init_picture(decoder, picture, slice_hdr, nalu))
+ if (!init_picture(decoder, picture, pi))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- if (!fill_picture(decoder, picture, slice_hdr, nalu))
+ if (!fill_picture(decoder, picture))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+
+ priv->decoder_state = pi->state;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static inline guint
-get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, GstH264NalUnit *nalu)
+get_slice_data_bit_offset(GstH264SliceHdr *slice_hdr, guint nal_header_bytes)
{
guint epb_count;
epb_count = slice_hdr->n_emulation_prevention_bytes;
- return 8 /* nal_unit_type */ + slice_hdr->header_size - epb_count * 8;
+ return 8 * nal_header_bytes + slice_hdr->header_size - epb_count * 8;
}
static gboolean
-fill_pred_weight_table(GstVaapiDecoderH264 *decoder, GstVaapiSliceH264 *slice)
+fill_pred_weight_table(GstVaapiDecoderH264 *decoder,
+ GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
{
- GstH264SliceHdr * const slice_hdr = &slice->slice_hdr;
- GstH264PPS * const pps = slice_hdr->pps;
- GstH264SPS * const sps = pps->sequence;
+ VASliceParameterBufferH264 * const slice_param = slice->param;
+ GstH264PPS * const pps = get_pps(decoder);
+ GstH264SPS * const sps = get_sps(decoder);
GstH264PredWeightTable * const w = &slice_hdr->pred_weight_table;
- VASliceParameterBufferH264 * const slice_param = slice->base.param;
guint num_weight_tables = 0;
gint i, j;
else
num_weight_tables = 0;
- slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
- slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
+ slice_param->luma_log2_weight_denom = 0;
+ slice_param->chroma_log2_weight_denom = 0;
slice_param->luma_weight_l0_flag = 0;
slice_param->chroma_weight_l0_flag = 0;
slice_param->luma_weight_l1_flag = 0;
if (num_weight_tables < 1)
return TRUE;
+ slice_param->luma_log2_weight_denom = w->luma_log2_weight_denom;
+ slice_param->chroma_log2_weight_denom = w->chroma_log2_weight_denom;
+
slice_param->luma_weight_l0_flag = 1;
for (i = 0; i <= slice_param->num_ref_idx_l0_active_minus1; i++) {
slice_param->luma_weight_l0[i] = w->luma_weight_l0[i];
}
static gboolean
-fill_RefPicList(GstVaapiDecoderH264 *decoder, GstVaapiSliceH264 *slice)
+fill_RefPicList(GstVaapiDecoderH264 *decoder,
+ GstVaapiSlice *slice, GstH264SliceHdr *slice_hdr)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SliceHdr * const slice_hdr = &slice->slice_hdr;
- VASliceParameterBufferH264 * const slice_param = slice->base.param;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ VASliceParameterBufferH264 * const slice_param = slice->param;
guint i, num_ref_lists = 0;
slice_param->num_ref_idx_l0_active_minus1 = 0;
slice_hdr->num_ref_idx_l0_active_minus1;
for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
- vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
+ priv->RefPicList0[i]);
for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList0[i]);
slice_hdr->num_ref_idx_l1_active_minus1;
for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
- vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
+ priv->RefPicList1[i]);
for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList1[i]);
return TRUE;
}
static gboolean
-fill_slice(
- GstVaapiDecoderH264 *decoder,
- GstVaapiSliceH264 *slice,
- GstH264NalUnit *nalu
-)
+fill_slice(GstVaapiDecoderH264 *decoder,
+ GstVaapiSlice *slice, GstVaapiParserInfoH264 *pi)
{
- GstH264SliceHdr * const slice_hdr = &slice->slice_hdr;
- VASliceParameterBufferH264 * const slice_param = slice->base.param;
+ VASliceParameterBufferH264 * const slice_param = slice->param;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
/* Fill in VASliceParameterBufferH264 */
- slice_param->slice_data_bit_offset = get_slice_data_bit_offset(slice_hdr, nalu);
+ slice_param->slice_data_bit_offset =
+ get_slice_data_bit_offset(slice_hdr, pi->nalu.header_bytes);
slice_param->first_mb_in_slice = slice_hdr->first_mb_in_slice;
slice_param->slice_type = slice_hdr->type % 5;
slice_param->direct_spatial_mv_pred_flag = slice_hdr->direct_spatial_mv_pred_flag;
slice_param->slice_alpha_c0_offset_div2 = slice_hdr->slice_alpha_c0_offset_div2;
slice_param->slice_beta_offset_div2 = slice_hdr->slice_beta_offset_div2;
- if (!fill_RefPicList(decoder, slice))
+ if (!fill_RefPicList(decoder, slice, slice_hdr))
return FALSE;
- if (!fill_pred_weight_table(decoder, slice))
+ if (!fill_pred_weight_table(decoder, slice, slice_hdr))
return FALSE;
return TRUE;
}
static GstVaapiDecoderStatus
-decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnitH264 *unit)
+decode_slice(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GstH264SliceHdr * const slice_hdr = &unit->data.slice_hdr;
- GstH264NalUnit * const nalu = &unit->nalu;
- GstVaapiDecoderStatus status;
- GstVaapiPictureH264 *picture;
- GstVaapiSliceH264 *slice = NULL;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstVaapiPictureH264 * const picture = priv->current_picture;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstVaapiSlice *slice;
+ GstBuffer * const buffer =
+ GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer;
+ GstMapInfo map_info;
- GST_DEBUG("slice (%u bytes)", nalu->size);
+ GST_DEBUG("slice (%u bytes)", pi->nalu.size);
- if (!priv->got_sps || !priv->got_pps)
+ if (!is_valid_state(pi->state,
+ GST_H264_VIDEO_STATE_VALID_PICTURE_HEADERS)) {
+ GST_WARNING("failed to receive enough headers to decode slice");
return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ }
- if (!priv->has_context) {
- status = ensure_context(decoder, slice_hdr->pps->sequence);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
+ if (!ensure_pps(decoder, slice_hdr->pps)) {
+ GST_ERROR("failed to activate PPS");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
- unit->base.buffer = gst_buffer_create_sub(
- GST_VAAPI_DECODER_CODEC_FRAME(decoder)->input_buffer,
- unit->base.offset, unit->base.size);
- if (!unit->base.buffer) {
- GST_ERROR("failed to allocate slice data");
- return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ if (!ensure_sps(decoder, slice_hdr->pps->sequence)) {
+ GST_ERROR("failed to activate SPS");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
+ }
+
+ if (!gst_buffer_map(buffer, &map_info, GST_MAP_READ)) {
+ GST_ERROR("failed to map buffer");
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
- slice = gst_vaapi_slice_h264_new(decoder,
- GST_BUFFER_DATA(unit->base.buffer) + nalu->offset, nalu->size);
+ /* Check wether this is the first/last slice in the current access unit */
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+
+ slice = GST_VAAPI_SLICE_NEW(H264, decoder,
+ (map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
+ gst_buffer_unmap(buffer, &map_info);
if (!slice) {
GST_ERROR("failed to allocate slice");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
}
- slice->slice_hdr = *slice_hdr;
- if (is_new_picture(decoder, nalu, slice_hdr)) {
- status = decode_picture(decoder, nalu, slice_hdr);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- goto error;
+ init_picture_refs(decoder, picture, slice_hdr);
+ if (!fill_slice(decoder, slice, pi)) {
+ gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
+ return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
- picture = priv->current_picture;
- if (!fill_slice(decoder, slice, nalu)) {
- status = GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
- goto error;
- }
- gst_vaapi_picture_add_slice(
- GST_VAAPI_PICTURE_CAST(picture),
- GST_VAAPI_SLICE_CAST(slice)
- );
+ gst_vaapi_picture_add_slice(GST_VAAPI_PICTURE_CAST(picture), slice);
+ picture->last_slice_hdr = slice_hdr;
+ priv->decoder_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
-
-error:
- if (slice)
- gst_vaapi_mini_object_unref(GST_VAAPI_MINI_OBJECT(slice));
- return status;
}
static inline gint
}
static GstVaapiDecoderStatus
-decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnitH264 *unit)
+decode_unit(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
GstVaapiDecoderStatus status;
- switch (unit->nalu.type) {
+ priv->decoder_state |= pi->state;
+ switch (pi->nalu.type) {
+ case GST_H264_NAL_SPS:
+ status = decode_sps(decoder, unit);
+ break;
+ case GST_H264_NAL_SUBSET_SPS:
+ status = decode_subset_sps(decoder, unit);
+ break;
+ case GST_H264_NAL_PPS:
+ status = decode_pps(decoder, unit);
+ break;
+ case GST_H264_NAL_SLICE_EXT:
case GST_H264_NAL_SLICE_IDR:
/* fall-through. IDR specifics are handled in init_picture() */
case GST_H264_NAL_SLICE:
status = decode_slice(decoder, unit);
break;
case GST_H264_NAL_SEQ_END:
+ case GST_H264_NAL_STREAM_END:
status = decode_sequence_end(decoder);
break;
case GST_H264_NAL_SEI:
status = GST_VAAPI_DECODER_STATUS_SUCCESS;
break;
default:
- GST_WARNING("unsupported NAL unit type %d", unit->nalu.type);
+ GST_WARNING("unsupported NAL unit type %d", pi->nalu.type);
status = GST_VAAPI_DECODER_STATUS_ERROR_BITSTREAM_PARSER;
break;
}
}
static GstVaapiDecoderStatus
-decode_codec_data(GstVaapiDecoderH264 *decoder, GstBuffer *buffer)
+gst_vaapi_decoder_h264_decode_codec_data(GstVaapiDecoder *base_decoder,
+ const guchar *buf, guint buf_size)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderStatus status;
- GstVaapiDecoderUnitH264 unit;
+ GstVaapiDecoderUnit unit;
+ GstVaapiParserInfoH264 *pi = NULL;
GstH264ParserResult result;
- guchar *buf;
- guint buf_size;
guint i, ofs, num_sps, num_pps;
- buf = GST_BUFFER_DATA(buffer);
- buf_size = GST_BUFFER_SIZE(buffer);
- if (!buf || buf_size == 0)
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ unit.parsed_info = NULL;
if (buf_size < 8)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
ofs = 6;
for (i = 0; i < num_sps; i++) {
+ pi = gst_vaapi_parser_info_h264_new();
+ if (!pi)
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ unit.parsed_info = pi;
+
result = gst_h264_parser_identify_nalu_avc(
priv->parser,
buf, ofs, buf_size, 2,
- &unit.nalu
+ &pi->nalu
);
- if (result != GST_H264_PARSER_OK)
- return get_status(result);
+ if (result != GST_H264_PARSER_OK) {
+ status = get_status(result);
+ goto cleanup;
+ }
status = parse_sps(decoder, &unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
- ofs = unit.nalu.offset + unit.nalu.size;
+ goto cleanup;
+ ofs = pi->nalu.offset + pi->nalu.size;
+
+ status = decode_sps(decoder, &unit);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ goto cleanup;
+ gst_vaapi_parser_info_h264_replace(&pi, NULL);
}
num_pps = buf[ofs];
ofs++;
for (i = 0; i < num_pps; i++) {
+ pi = gst_vaapi_parser_info_h264_new();
+ if (!pi)
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+ unit.parsed_info = pi;
+
result = gst_h264_parser_identify_nalu_avc(
priv->parser,
buf, ofs, buf_size, 2,
- &unit.nalu
+ &pi->nalu
);
- if (result != GST_H264_PARSER_OK)
- return get_status(result);
+ if (result != GST_H264_PARSER_OK) {
+ status = get_status(result);
+ goto cleanup;
+ }
status = parse_pps(decoder, &unit);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
- ofs = unit.nalu.offset + unit.nalu.size;
+ goto cleanup;
+ ofs = pi->nalu.offset + pi->nalu.size;
+
+ status = decode_pps(decoder, &unit);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ goto cleanup;
+ gst_vaapi_parser_info_h264_replace(&pi, NULL);
}
- priv->is_avc = TRUE;
- return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ priv->is_avcC = TRUE;
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+
+cleanup:
+ gst_vaapi_parser_info_h264_replace(&pi, NULL);
+ return status;
}
static GstVaapiDecoderStatus
ensure_decoder(GstVaapiDecoderH264 *decoder)
{
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderStatus status;
- GstBuffer *codec_data;
-
- g_return_val_if_fail(priv->is_constructed,
- GST_VAAPI_DECODER_STATUS_ERROR_INIT_FAILED);
if (!priv->is_opened) {
priv->is_opened = gst_vaapi_decoder_h264_open(decoder);
if (!priv->is_opened)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_CODEC;
- codec_data = GST_VAAPI_DECODER_CODEC_DATA(decoder);
- if (codec_data) {
- status = decode_codec_data(decoder, codec_data);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
- return status;
- }
+ status = gst_vaapi_decoder_decode_codec_data(
+ GST_VAAPI_DECODER_CAST(decoder));
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
+ return status;
}
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static GstVaapiDecoderStatus
gst_vaapi_decoder_h264_parse(GstVaapiDecoder *base_decoder,
- GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit **unit_ptr)
+ GstAdapter *adapter, gboolean at_eos, GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(base_decoder);
- GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiParserState * const ps = GST_VAAPI_PARSER_STATE(base_decoder);
- GstVaapiDecoderUnitH264 *unit;
+ GstVaapiParserInfoH264 *pi;
GstVaapiDecoderStatus status;
GstH264ParserResult result;
guchar *buf;
guint i, size, buf_size, nalu_size, flags;
guint32 start_code;
gint ofs, ofs2;
+ gboolean at_au_end = FALSE;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- size = gst_adapter_available(adapter);
+ switch (priv->stream_alignment) {
+ case GST_VAAPI_STREAM_ALIGN_H264_NALU:
+ case GST_VAAPI_STREAM_ALIGN_H264_AU:
+ size = gst_adapter_available_fast(adapter);
+ break;
+ default:
+ size = gst_adapter_available(adapter);
+ break;
+ }
- if (priv->is_avc) {
+ if (priv->is_avcC) {
if (size < priv->nal_length_size)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
buf_size = priv->nal_length_size + nalu_size;
if (size < buf_size)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+ at_au_end = (buf_size == size);
}
else {
if (size < 4)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs = scan_for_start_code(adapter, 0, size, NULL);
- if (ofs < 0)
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
- if (ofs > 0) {
- gst_adapter_flush(adapter, ofs);
- size -= ofs;
- }
+ if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
+ buf_size = size;
+ else {
+ ofs = scan_for_start_code(adapter, 0, size, NULL);
+ if (ofs < 0)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs2 = ps->input_offset2 - ofs - 4;
- if (ofs2 < 4)
- ofs2 = 4;
+ if (ofs > 0) {
+ gst_adapter_flush(adapter, ofs);
+ size -= ofs;
+ }
- ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
- scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
- if (ofs < 0) {
- // Assume the whole NAL unit is present if end-of-stream
- if (!at_eos) {
- ps->input_offset2 = size;
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ ofs2 = ps->input_offset2 - ofs - 4;
+ if (ofs2 < 4)
+ ofs2 = 4;
+
+ ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
+ scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
+ if (ofs < 0) {
+ // Assume the whole NAL unit is present if end-of-stream
+ // or stream buffers aligned on access unit boundaries
+ if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+ at_au_end = TRUE;
+ else if (!at_eos) {
+ ps->input_offset2 = size;
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ }
+ ofs = size;
}
- ofs = size;
+ buf_size = ofs;
}
- buf_size = ofs;
}
ps->input_offset2 = 0;
- buf = (guchar *)gst_adapter_peek(adapter, buf_size);
+ buf = (guchar *)gst_adapter_map(adapter, buf_size);
if (!buf)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- unit = gst_vaapi_decoder_unit_h264_new(buf_size);
- if (!unit)
+ unit->size = buf_size;
+
+ pi = gst_vaapi_parser_info_h264_new();
+ if (!pi)
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
- if (priv->is_avc)
+ gst_vaapi_decoder_unit_set_parsed_info(unit,
+ pi, (GDestroyNotify)gst_vaapi_mini_object_unref);
+
+ if (priv->is_avcC)
result = gst_h264_parser_identify_nalu_avc(priv->parser,
- buf, 0, buf_size, priv->nal_length_size, &unit->nalu);
+ buf, 0, buf_size, priv->nal_length_size, &pi->nalu);
else
result = gst_h264_parser_identify_nalu_unchecked(priv->parser,
- buf, 0, buf_size, &unit->nalu);
+ buf, 0, buf_size, &pi->nalu);
status = get_status(result);
- if (status != GST_VAAPI_DECODER_STATUS_SUCCESS) {
- gst_vaapi_decoder_unit_unref(unit);
+ if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- }
- switch (unit->nalu.type) {
+ switch (pi->nalu.type) {
case GST_H264_NAL_SPS:
status = parse_sps(decoder, unit);
break;
+ case GST_H264_NAL_SUBSET_SPS:
+ status = parse_subset_sps(decoder, unit);
+ break;
case GST_H264_NAL_PPS:
status = parse_pps(decoder, unit);
break;
case GST_H264_NAL_SEI:
status = parse_sei(decoder, unit);
break;
+ case GST_H264_NAL_SLICE_EXT:
+ if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ /* fall-through */
case GST_H264_NAL_SLICE_IDR:
case GST_H264_NAL_SLICE:
status = parse_slice(decoder, unit);
return status;
flags = 0;
- switch (unit->nalu.type) {
+ if (at_au_end) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+ }
+ switch (pi->nalu.type) {
case GST_H264_NAL_AU_DELIMITER:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
/* fall-through */
case GST_H264_NAL_FILLER_DATA:
/* fall-through */
case GST_H264_NAL_SEQ_END:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
break;
case GST_H264_NAL_SPS:
+ case GST_H264_NAL_SUBSET_SPS:
case GST_H264_NAL_PPS:
- flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
- /* fall-through */
case GST_H264_NAL_SEI:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
+ case GST_H264_NAL_SLICE_EXT:
+ if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ }
+ /* fall-through */
case GST_H264_NAL_SLICE_IDR:
case GST_H264_NAL_SLICE:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
- /* XXX: assume we got a new frame if first_mb_in_slice is set
- to zero, thus ignoring Arbitrary Slice Order (ASO) feature
- from Baseline profile */
- if (unit->nalu.offset + 2 <= unit->nalu.size &&
- (unit->nalu.data[unit->nalu.offset + 1] & 0x80))
+ if (priv->prev_pi &&
+ (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ }
+ else if (is_new_picture(pi, priv->prev_slice_pi)) {
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ if (is_new_access_unit(pi, priv->prev_slice_pi))
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+ }
+ gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
+ break;
+ case GST_H264_NAL_SPS_EXT:
+ case GST_H264_NAL_SLICE_AUX:
+ /* skip SPS extension and auxiliary slice for now */
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ case GST_H264_NAL_PREFIX_UNIT:
+ /* skip Prefix NAL units for now */
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
default:
- if (unit->nalu.type >= 14 && unit->nalu.type <= 18)
- flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
}
+ if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
+ priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
- unit->nalu.data = NULL;
- *unit_ptr = &unit->base;
+ pi->nalu.data = NULL;
+ pi->state = priv->parser_state;
+ pi->flags = flags;
+ gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
gst_vaapi_decoder_h264_decode(GstVaapiDecoder *base_decoder,
GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(base_decoder);
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
GstVaapiDecoderStatus status;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- return decode_unit(decoder, (GstVaapiDecoderUnitH264 *)unit);
+ return decode_unit(decoder, unit);
}
-static void
-gst_vaapi_decoder_h264_finalize(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_start_frame(GstVaapiDecoder *base_decoder,
+ GstVaapiDecoderUnit *unit)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(object);
-
- gst_vaapi_decoder_h264_destroy(decoder);
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
- G_OBJECT_CLASS(gst_vaapi_decoder_h264_parent_class)->finalize(object);
+ return decode_picture(decoder, unit);
}
-static void
-gst_vaapi_decoder_h264_constructed(GObject *object)
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_end_frame(GstVaapiDecoder *base_decoder)
{
- GstVaapiDecoderH264 * const decoder = GST_VAAPI_DECODER_H264(object);
- GstVaapiDecoderH264Private * const priv = decoder->priv;
- GObjectClass *parent_class;
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
+
+ return decode_current_picture(decoder);
+}
- parent_class = G_OBJECT_CLASS(gst_vaapi_decoder_h264_parent_class);
- if (parent_class->constructed)
- parent_class->constructed(object);
+static GstVaapiDecoderStatus
+gst_vaapi_decoder_h264_flush(GstVaapiDecoder *base_decoder)
+{
+ GstVaapiDecoderH264 * const decoder =
+ GST_VAAPI_DECODER_H264_CAST(base_decoder);
- priv->is_constructed = gst_vaapi_decoder_h264_create(decoder);
+ dpb_flush(decoder, NULL);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
static void
gst_vaapi_decoder_h264_class_init(GstVaapiDecoderH264Class *klass)
{
- GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiMiniObjectClass * const object_class =
+ GST_VAAPI_MINI_OBJECT_CLASS(klass);
GstVaapiDecoderClass * const decoder_class = GST_VAAPI_DECODER_CLASS(klass);
- g_type_class_add_private(klass, sizeof(GstVaapiDecoderH264Private));
-
- object_class->finalize = gst_vaapi_decoder_h264_finalize;
- object_class->constructed = gst_vaapi_decoder_h264_constructed;
+ object_class->size = sizeof(GstVaapiDecoderH264);
+ object_class->finalize = (GDestroyNotify)gst_vaapi_decoder_finalize;
+ decoder_class->create = gst_vaapi_decoder_h264_create;
+ decoder_class->destroy = gst_vaapi_decoder_h264_destroy;
decoder_class->parse = gst_vaapi_decoder_h264_parse;
decoder_class->decode = gst_vaapi_decoder_h264_decode;
+ decoder_class->start_frame = gst_vaapi_decoder_h264_start_frame;
+ decoder_class->end_frame = gst_vaapi_decoder_h264_end_frame;
+ decoder_class->flush = gst_vaapi_decoder_h264_flush;
+
+ decoder_class->decode_codec_data =
+ gst_vaapi_decoder_h264_decode_codec_data;
}
-static void
-gst_vaapi_decoder_h264_init(GstVaapiDecoderH264 *decoder)
+static inline const GstVaapiDecoderClass *
+gst_vaapi_decoder_h264_class(void)
{
- GstVaapiDecoderH264Private *priv;
+ static GstVaapiDecoderH264Class g_class;
+ static gsize g_class_init = FALSE;
- priv = GST_VAAPI_DECODER_H264_GET_PRIVATE(decoder);
- decoder->priv = priv;
- priv->parser = NULL;
- priv->current_picture = NULL;
- priv->dpb_count = 0;
- priv->dpb_size = 0;
- priv->profile = GST_VAAPI_PROFILE_UNKNOWN;
- priv->entrypoint = GST_VAAPI_ENTRYPOINT_VLD;
- priv->chroma_type = GST_VAAPI_CHROMA_TYPE_YUV420;
- priv->short_ref_count = 0;
- priv->long_ref_count = 0;
- priv->RefPicList0_count = 0;
- priv->RefPicList1_count = 0;
- priv->nal_length_size = 0;
- priv->field_poc[0] = 0;
- priv->field_poc[1] = 0;
- priv->poc_msb = 0;
- priv->poc_lsb = 0;
- priv->prev_poc_msb = 0;
- priv->prev_poc_lsb = 0;
- priv->frame_num_offset = 0;
- priv->frame_num = 0;
- priv->prev_frame_num = 0;
- priv->prev_pic_has_mmco5 = FALSE;
- priv->prev_pic_structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
- priv->is_constructed = FALSE;
- priv->is_opened = FALSE;
- priv->is_avc = FALSE;
- priv->has_context = FALSE;
- priv->progressive_sequence = TRUE;
+ if (g_once_init_enter(&g_class_init)) {
+ gst_vaapi_decoder_h264_class_init(&g_class);
+ g_once_init_leave(&g_class_init, TRUE);
+ }
+ return GST_VAAPI_DECODER_CLASS(&g_class);
+}
+
+/**
+ * gst_vaapi_decoder_h264_set_alignment:
+ * @decoder: a #GstVaapiDecoderH264
+ * @alignment: the #GstVaapiStreamAlignH264
+ *
+ * Specifies how stream buffers are aligned / fed, i.e. the boundaries
+ * of each buffer that is supplied to the decoder. This could be no
+ * specific alignment, NAL unit boundaries, or access unit boundaries.
+ */
+void
+gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
+ GstVaapiStreamAlignH264 alignment)
+{
+ g_return_if_fail(decoder != NULL);
- memset(priv->dpb, 0, sizeof(priv->dpb));
- memset(priv->short_ref, 0, sizeof(priv->short_ref));
- memset(priv->long_ref, 0, sizeof(priv->long_ref));
- memset(priv->RefPicList0, 0, sizeof(priv->RefPicList0));
- memset(priv->RefPicList1, 0, sizeof(priv->RefPicList1));
+ decoder->priv.stream_alignment = alignment;
}
/**
GstVaapiDecoder *
gst_vaapi_decoder_h264_new(GstVaapiDisplay *display, GstCaps *caps)
{
- GstVaapiDecoderH264 *decoder;
-
- g_return_val_if_fail(GST_VAAPI_IS_DISPLAY(display), NULL);
- g_return_val_if_fail(GST_IS_CAPS(caps), NULL);
-
- decoder = g_object_new(
- GST_VAAPI_TYPE_DECODER_H264,
- "display", display,
- "caps", caps,
- NULL
- );
- if (!decoder->priv->is_constructed) {
- g_object_unref(decoder);
- return NULL;
- }
- return GST_VAAPI_DECODER_CAST(decoder);
+ return gst_vaapi_decoder_new(gst_vaapi_decoder_h264_class(), display, caps);
}