/* --- H.264 Parser Info --- */
/* ------------------------------------------------------------------------- */
+/*
+ * Extended decoder unit flags:
+ *
+ * @GST_VAAPI_DECODER_UNIT_AU_START: marks the start of an access unit.
+ * @GST_VAAPI_DECODER_UNIT_AU_END: marks the end of an access unit.
+ */
+enum {
+ /* This flag does not strictly follow the definitions (7.4.1.2.3)
+ for detecting the start of an access unit as we are only
+ interested in knowing if the current slice is the first one or
+ the last one in the current access unit */
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START = (
+ GST_VAAPI_DECODER_UNIT_FLAG_LAST << 0),
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END = (
+ GST_VAAPI_DECODER_UNIT_FLAG_LAST << 1),
+
+ GST_VAAPI_DECODER_UNIT_FLAGS_AU = (
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END),
+};
+
#define GST_VAAPI_PARSER_INFO_H264(obj) \
((GstVaapiParserInfoH264 *)(obj))
GstH264SliceHdr slice_hdr;
} data;
guint state;
+ guint flags; // Same as decoder unit flags (persistent)
+ guint view_id; // View ID of slice
+ guint voc; // View order index (VOIdx) of slice
};
static void
gst_vaapi_parser_info_h264_finalize(GstVaapiParserInfoH264 *pi)
{
switch (pi->nalu.type) {
+ case GST_H264_NAL_SPS:
+ case GST_H264_NAL_SUBSET_SPS:
+ gst_h264_sps_clear(&pi->data.sps);
+ break;
+ case GST_H264_NAL_PPS:
+ gst_h264_pps_clear(&pi->data.pps);
+ break;
case GST_H264_NAL_SEI:
if (pi->data.sei) {
g_array_unref(pi->data.sei);
* Extended picture flags:
*
* @GST_VAAPI_PICTURE_FLAG_IDR: flag that specifies an IDR picture
+ * @GST_VAAPI_PICTURE_FLAG_INTER_VIEW: flag that indicates the picture
+ * may be used for inter-view prediction
+ * @GST_VAAPI_PICTURE_FLAG_ANCHOR: flag that specifies an anchor picture,
+ * i.e. a picture that is decoded with only inter-view prediction,
+ * and not inter prediction
+ * @GST_VAAPI_PICTURE_FLAG_AU_START: flag that marks the start of an
+ * access unit (AU)
+ * @GST_VAAPI_PICTURE_FLAG_AU_END: flag that marks the end of an
+ * access unit (AU)
* @GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE: flag that specifies
* "used for short-term reference"
* @GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE: flag that specifies
* reference picture (short-term reference or long-term reference)
*/
enum {
- GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
+ GST_VAAPI_PICTURE_FLAG_IDR = (GST_VAAPI_PICTURE_FLAG_LAST << 0),
+ GST_VAAPI_PICTURE_FLAG_REFERENCE2 = (GST_VAAPI_PICTURE_FLAG_LAST << 1),
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW = (GST_VAAPI_PICTURE_FLAG_LAST << 2),
+ GST_VAAPI_PICTURE_FLAG_ANCHOR = (GST_VAAPI_PICTURE_FLAG_LAST << 3),
+ GST_VAAPI_PICTURE_FLAG_AU_START = (GST_VAAPI_PICTURE_FLAG_LAST << 4),
+ GST_VAAPI_PICTURE_FLAG_AU_END = (GST_VAAPI_PICTURE_FLAG_LAST << 5),
GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE = (
GST_VAAPI_PICTURE_FLAG_REFERENCE),
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE = (
- GST_VAAPI_PICTURE_FLAG_REFERENCE | (GST_VAAPI_PICTURE_FLAG_LAST << 1)),
+ GST_VAAPI_PICTURE_FLAG_REFERENCE | GST_VAAPI_PICTURE_FLAG_REFERENCE2),
GST_VAAPI_PICTURE_FLAGS_REFERENCE = (
GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE |
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE),
GST_VAAPI_PICTURE_FLAGS_REFERENCE) == \
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
+#define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
+
+#define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
+
+#define GST_VAAPI_PICTURE_H264(picture) \
+ ((GstVaapiPictureH264 *)(picture))
+
struct _GstVaapiPictureH264 {
GstVaapiPicture base;
GstH264SliceHdr *last_slice_hdr;
/*< private >*/
GstVaapiMiniObject parent_instance;
+ guint view_id;
guint structure;
GstVaapiPictureH264 *buffers[2];
guint num_buffers;
if (!fs)
return NULL;
+ fs->view_id = picture->base.view_id;
fs->structure = picture->structure;
fs->buffers[0] = gst_vaapi_picture_ref(picture);
fs->buffers[1] = NULL;
return FALSE;
}
+static gboolean
+gst_vaapi_frame_store_has_inter_view(GstVaapiFrameStore *fs)
+{
+ guint i;
+
+ for (i = 0; i < fs->num_buffers; i++) {
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(fs->buffers[i]))
+ return TRUE;
+ }
+ return FALSE;
+}
+
#define gst_vaapi_frame_store_ref(fs) \
gst_vaapi_mini_object_ref(GST_VAAPI_MINI_OBJECT(fs))
GstH264NalParser *parser;
guint parser_state;
guint decoder_state;
+ GstVaapiStreamAlignH264 stream_alignment;
GstVaapiPictureH264 *current_picture;
GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
GstVaapiParserInfoH264 *active_sps;
GstVaapiParserInfoH264 *pps[GST_H264_MAX_PPS_COUNT];
GstVaapiParserInfoH264 *active_pps;
+ GstVaapiParserInfoH264 *prev_pi;
GstVaapiParserInfoH264 *prev_slice_pi;
- GstVaapiFrameStore *prev_frame;
- GstVaapiFrameStore *dpb[16];
+ GstVaapiFrameStore **prev_frames;
+ guint prev_frames_alloc;
+ GstVaapiFrameStore **dpb;
guint dpb_count;
guint dpb_size;
+ guint dpb_size_max;
+ guint max_views;
GstVaapiProfile profile;
GstVaapiEntrypoint entrypoint;
GstVaapiChromaType chroma_type;
+ GPtrArray *inter_views;
GstVaapiPictureH264 *short_ref[32];
guint short_ref_count;
GstVaapiPictureH264 *long_ref[32];
static gboolean
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture);
+
+static inline gboolean
+is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
+ GstVaapiFrameStore *fs)
+{
+ return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
+}
+
+/* Determines if the supplied profile is one of the MVC set */
+static gboolean
+is_mvc_profile(GstH264Profile profile)
+{
+ return profile == GST_H264_PROFILE_MULTIVIEW_HIGH ||
+ profile == GST_H264_PROFILE_STEREO_HIGH;
+}
+
+/* Determines the view_id from the supplied NAL unit */
+static inline guint
+get_view_id(GstH264NalUnit *nalu)
+{
+ return GST_H264_IS_MVC_NALU(nalu) ? nalu->extension.mvc.view_id : 0;
+}
+
+/* Determines the view order index (VOIdx) from the supplied view_id */
+static gint
+get_view_order_index(GstH264SPS *sps, guint16 view_id)
+{
+ GstH264SPSExtMVC *mvc;
+ gint i;
+
+ if (!sps || sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return 0;
+
+ mvc = &sps->extension.mvc;
+ for (i = 0; i <= mvc->num_views_minus1; i++) {
+ if (mvc->view[i].view_id == view_id)
+ return i;
+ }
+ GST_ERROR("failed to find VOIdx from view_id (%d)", view_id);
+ return -1;
+}
+
+/* Determines NumViews */
+static guint
+get_num_views(GstH264SPS *sps)
+{
+ return 1 + (sps->extension_type == GST_H264_NAL_EXTENSION_MVC ?
+ sps->extension.mvc.num_views_minus1 : 0);
+}
+
/* Get number of reference frames to use */
static guint
get_max_dec_frame_buffering(GstH264SPS *sps)
{
+ guint num_views, max_dpb_frames;
guint max_dec_frame_buffering, PicSizeMbs;
GstVaapiLevelH264 level;
const GstVaapiH264LevelLimits *level_limits;
else
level = gst_vaapi_utils_h264_get_level(sps->level_idc);
level_limits = gst_vaapi_utils_h264_get_level_limits(level);
- if (!level_limits)
- return 16;
-
- PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
- (sps->pic_height_in_map_units_minus1 + 1) *
- (sps->frame_mbs_only_flag ? 1 : 2));
- max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
+ if (G_UNLIKELY(!level_limits)) {
+ GST_FIXME("unsupported level_idc value (%d)", sps->level_idc);
+ max_dec_frame_buffering = 16;
+ }
+ else {
+ PicSizeMbs = ((sps->pic_width_in_mbs_minus1 + 1) *
+ (sps->pic_height_in_map_units_minus1 + 1) *
+ (sps->frame_mbs_only_flag ? 1 : 2));
+ max_dec_frame_buffering = level_limits->MaxDpbMbs / PicSizeMbs;
+ }
+ if (is_mvc_profile(sps->profile_idc))
+ max_dec_frame_buffering <<= 1;
/* VUI parameters */
if (sps->vui_parameters_present_flag) {
}
}
- if (max_dec_frame_buffering > 16)
- max_dec_frame_buffering = 16;
+ num_views = get_num_views(sps);
+ max_dpb_frames = 16 * (num_views > 1 ? g_bit_storage(num_views - 1) : 1);
+ if (max_dec_frame_buffering > max_dpb_frames)
+ max_dec_frame_buffering = max_dpb_frames;
else if (max_dec_frame_buffering < sps->num_ref_frames)
max_dec_frame_buffering = sps->num_ref_frames;
return MAX(1, max_dec_frame_buffering);
{
picture->output_needed = FALSE;
- if (fs) {
- if (--fs->output_needed > 0)
- return TRUE;
- picture = fs->buffers[0];
- }
+ if (--fs->output_needed > 0)
+ return TRUE;
+
+ if (!GST_VAAPI_PICTURE_IS_COMPLETE(picture))
+ return TRUE;
return gst_vaapi_picture_output(GST_VAAPI_PICTURE_CAST(picture));
}
dpb_remove_index(decoder, i);
}
-static gboolean
-dpb_bump(GstVaapiDecoderH264 *decoder)
+/* Finds the frame store holding the supplied picture */
+static gint
+dpb_find_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ gint i, j;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ for (j = 0; j < fs->num_buffers; j++) {
+ if (fs->buffers[j] == picture)
+ return i;
+ }
+ }
+ return -1;
+}
+
+/* Finds the picture with the lowest POC that needs to be output */
+static gint
+dpb_find_lowest_poc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 **found_picture_ptr)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 *found_picture = NULL;
guint i, j, found_index;
- gboolean success;
for (i = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
if (!fs->output_needed)
continue;
+ if (picture && picture->base.view_id != fs->view_id)
+ continue;
for (j = 0; j < fs->num_buffers; j++) {
- GstVaapiPictureH264 * const picture = fs->buffers[j];
- if (!picture->output_needed)
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (!pic->output_needed)
continue;
- if (!found_picture || found_picture->base.poc > picture->base.poc)
- found_picture = picture, found_index = i;
+ if (!found_picture || found_picture->base.poc > pic->base.poc ||
+ (found_picture->base.poc == pic->base.poc &&
+ found_picture->base.voc > pic->base.voc))
+ found_picture = pic, found_index = i;
}
}
- if (!found_picture)
+
+ if (found_picture_ptr)
+ *found_picture_ptr = found_picture;
+ return found_picture ? found_index : -1;
+}
+
+/* Finds the picture with the lowest VOC that needs to be output */
+static gint
+dpb_find_lowest_voc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture,
+ GstVaapiPictureH264 **found_picture_ptr)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture = NULL;
+ guint i, j, found_index;
+
+ for (i = 0; i < priv->dpb_count; i++) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (!fs->output_needed || fs->view_id == picture->base.view_id)
+ continue;
+ for (j = 0; j < fs->num_buffers; j++) {
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (!pic->output_needed || pic->base.poc != picture->base.poc)
+ continue;
+ if (!found_picture || found_picture->base.voc > pic->base.voc)
+ found_picture = pic, found_index = i;
+ }
+ }
+
+ if (found_picture_ptr)
+ *found_picture_ptr = found_picture;
+ return found_picture ? found_index : -1;
+}
+
+static gboolean
+dpb_output_other_views(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, guint voc)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
+ gboolean success;
+
+ if (priv->max_views == 1)
+ return TRUE;
+
+ /* Emit all other view components that were in the same access
+ unit than the picture we have just found */
+ found_picture = picture;
+ for (;;) {
+ found_index = dpb_find_lowest_voc(decoder, found_picture,
+ &found_picture);
+ if (found_index < 0 || found_picture->base.voc >= voc)
+ break;
+ success = dpb_output(decoder, priv->dpb[found_index], found_picture);
+ dpb_evict(decoder, found_picture, found_index);
+ if (!success)
+ return FALSE;
+ }
+ return TRUE;
+}
+
+static gboolean
+dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
+ gboolean success;
+
+ found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
+ if (found_index < 0)
return FALSE;
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
+
success = dpb_output(decoder, priv->dpb[found_index], found_picture);
dpb_evict(decoder, found_picture, found_index);
+ if (priv->max_views == 1)
+ return success;
+
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
return success;
}
static void
-dpb_clear(GstVaapiDecoderH264 *decoder)
+dpb_clear(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
- guint i;
+ guint i, n;
- for (i = 0; i < priv->dpb_count; i++)
+ for (i = 0; i < priv->dpb_count; i++) {
+ if (picture && picture->base.view_id != priv->dpb[i]->view_id)
+ continue;
gst_vaapi_frame_store_replace(&priv->dpb[i], NULL);
- priv->dpb_count = 0;
+ }
- gst_vaapi_frame_store_replace(&priv->prev_frame, NULL);
+ /* Compact the resulting DPB, i.e. remove holes */
+ for (i = 0, n = 0; i < priv->dpb_count; i++) {
+ if (priv->dpb[i]) {
+ if (i != n) {
+ priv->dpb[n] = priv->dpb[i];
+ priv->dpb[i] = NULL;
+ }
+ n++;
+ }
+ }
+ priv->dpb_count = n;
+
+ /* Clear previous frame buffers only if this is a "flush-all" operation,
+ or if the picture is the first one in the access unit */
+ if (priv->prev_frames && (!picture ||
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_AU_START))) {
+ for (i = 0; i < priv->max_views; i++)
+ gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
+ }
}
static void
-dpb_flush(GstVaapiDecoderH264 *decoder)
+dpb_flush(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
- while (dpb_bump(decoder))
+ while (dpb_bump(decoder, picture))
;
- dpb_clear(decoder);
+ dpb_clear(decoder, picture);
+}
+
+static void
+dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const gboolean is_last_picture = /* in the access unit */
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+ guint i;
+
+ // Remove all unused inter-view only reference components of the current AU
+ i = 0;
+ while (i < priv->dpb_count) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (fs->view_id != picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
+ (is_last_picture ||
+ !is_inter_view_reference_for_next_frames(decoder, fs)))
+ dpb_remove_index(decoder, i);
+ else
+ i++;
+ }
}
static gboolean
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiFrameStore *fs;
- guint i, j;
+ guint i;
+
+ if (priv->max_views > 1)
+ dpb_prune_mvc(decoder, picture);
// Remove all unused pictures
if (!GST_VAAPI_PICTURE_IS_IDR(picture)) {
i = 0;
while (i < priv->dpb_count) {
GstVaapiFrameStore * const fs = priv->dpb[i];
- if (!fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
+ if (fs->view_id == picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
dpb_remove_index(decoder, i);
else
i++;
}
// Check if picture is the second field and the first field is still in DPB
- fs = priv->prev_frame;
- if (fs && !gst_vaapi_frame_store_has_frame(fs))
- return gst_vaapi_frame_store_add(fs, picture);
+ if (GST_VAAPI_PICTURE_IS_INTERLACED(picture) &&
+ !GST_VAAPI_PICTURE_IS_FIRST_FIELD(picture)) {
+ const gint found_index = dpb_find_picture(decoder,
+ GST_VAAPI_PICTURE_H264(picture->base.parent_picture));
+ if (found_index >= 0)
+ return gst_vaapi_frame_store_add(priv->dpb[found_index], picture);
+
+ // ... also check the previous picture that was immediately output
+ fs = priv->prev_frames[picture->base.voc];
+ if (fs && &fs->buffers[0]->base == picture->base.parent_picture) {
+ if (!gst_vaapi_frame_store_add(fs, picture))
+ return FALSE;
+ return dpb_output(decoder, fs, picture);
+ }
+ }
// Create new frame store, and split fields if necessary
fs = gst_vaapi_frame_store_new(picture);
if (!fs)
return FALSE;
- gst_vaapi_frame_store_replace(&priv->prev_frame, fs);
+ gst_vaapi_frame_store_replace(&priv->prev_frames[picture->base.voc], fs);
gst_vaapi_frame_store_unref(fs);
+ if (picture->output_flag) {
+ picture->output_needed = TRUE;
+ fs->output_needed++;
+ }
+
if (!priv->progressive_sequence && gst_vaapi_frame_store_has_frame(fs)) {
if (!gst_vaapi_frame_store_split_fields(fs))
return FALSE;
// C.4.5.1 - Storage and marking of a reference decoded picture into the DPB
if (GST_VAAPI_PICTURE_IS_REFERENCE(picture)) {
while (priv->dpb_count == priv->dpb_size) {
- if (!dpb_bump(decoder))
+ if (!dpb_bump(decoder, picture))
return FALSE;
}
- gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
- if (picture->output_flag) {
- picture->output_needed = TRUE;
- fs->output_needed++;
- }
}
// C.4.5.2 - Storage and marking of a non-reference decoded picture into the DPB
else {
- if (!picture->output_flag)
+ const gboolean StoreInterViewOnlyRefFlag =
+ !GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_AU_END) &&
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ if (!picture->output_flag && !StoreInterViewOnlyRefFlag)
return TRUE;
while (priv->dpb_count == priv->dpb_size) {
- gboolean found_picture = FALSE;
- for (i = 0; !found_picture && i < priv->dpb_count; i++) {
- GstVaapiFrameStore * const fs = priv->dpb[i];
- if (!fs->output_needed)
- continue;
- for (j = 0; !found_picture && j < fs->num_buffers; j++)
- found_picture = fs->buffers[j]->output_needed &&
- fs->buffers[j]->base.poc < picture->base.poc;
+ GstVaapiPictureH264 *found_picture;
+ if (!StoreInterViewOnlyRefFlag) {
+ if (dpb_find_lowest_poc(decoder, picture, &found_picture) < 0 ||
+ found_picture->base.poc > picture->base.poc)
+ return dpb_output(decoder, fs, picture);
}
- if (!found_picture)
- return dpb_output(decoder, NULL, picture);
- if (!dpb_bump(decoder))
+ if (!dpb_bump(decoder, picture))
return FALSE;
}
- gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
- picture->output_needed = TRUE;
- fs->output_needed++;
}
+ gst_vaapi_frame_store_replace(&priv->dpb[priv->dpb_count++], fs);
return TRUE;
}
-static inline void
-dpb_reset(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+static gboolean
+dpb_reset(GstVaapiDecoderH264 *decoder, guint dpb_size)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
- priv->dpb_size = get_max_dec_frame_buffering(sps);
+ if (dpb_size > priv->dpb_size_max) {
+ priv->dpb = g_try_realloc_n(priv->dpb, dpb_size, sizeof(*priv->dpb));
+ if (!priv->dpb)
+ return FALSE;
+ memset(&priv->dpb[priv->dpb_size_max], 0,
+ (dpb_size - priv->dpb_size_max) * sizeof(*priv->dpb));
+ priv->dpb_size_max = dpb_size;
+ }
+ priv->dpb_size = dpb_size;
+
GST_DEBUG("DPB size %u", priv->dpb_size);
+ return TRUE;
+}
+
+static void
+unref_inter_view(GstVaapiPictureH264 *picture)
+{
+ if (!picture)
+ return;
+ GST_VAAPI_PICTURE_FLAG_UNSET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ gst_vaapi_picture_unref(picture);
+}
+
+/* Resets MVC resources */
+static gboolean
+mvc_reset(GstVaapiDecoderH264 *decoder)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ guint i;
+
+ // Resize array of inter-view references
+ if (!priv->inter_views) {
+ priv->inter_views = g_ptr_array_new_full(priv->max_views,
+ (GDestroyNotify)unref_inter_view);
+ if (!priv->inter_views)
+ return FALSE;
+ }
+
+ // Resize array of previous frame buffers
+ for (i = priv->max_views; i < priv->prev_frames_alloc; i++)
+ gst_vaapi_frame_store_replace(&priv->prev_frames[i], NULL);
+
+ priv->prev_frames = g_try_realloc_n(priv->prev_frames, priv->max_views,
+ sizeof(*priv->prev_frames));
+ if (!priv->prev_frames) {
+ priv->prev_frames_alloc = 0;
+ return FALSE;
+ }
+ for (i = priv->prev_frames_alloc; i < priv->max_views; i++)
+ priv->prev_frames[i] = NULL;
+ priv->prev_frames_alloc = priv->max_views;
+ return TRUE;
}
static GstVaapiDecoderStatus
gst_vaapi_picture_replace(&priv->current_picture, NULL);
gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, NULL);
+ gst_vaapi_parser_info_h264_replace(&priv->prev_pi, NULL);
+
+ dpb_clear(decoder, NULL);
- dpb_clear(decoder);
+ if (priv->inter_views) {
+ g_ptr_array_unref(priv->inter_views);
+ priv->inter_views = NULL;
+ }
if (priv->parser) {
gst_h264_nal_parser_free(priv->parser);
gst_vaapi_decoder_h264_close(decoder);
+ g_free(priv->dpb);
+ priv->dpb = NULL;
+ priv->dpb_size = 0;
+
+ g_free(priv->prev_frames);
+ priv->prev_frames = NULL;
+ priv->prev_frames_alloc = 0;
+
for (i = 0; i < G_N_ELEMENTS(priv->pps); i++)
gst_vaapi_parser_info_h264_replace(&priv->pps[i], NULL);
gst_vaapi_parser_info_h264_replace(&priv->active_pps, NULL);
*n_profiles_ptr = n_profiles;
}
+/* Fills in compatible profiles for MVC decoding */
+static void
+fill_profiles_mvc(GstVaapiDecoderH264 *decoder, GstVaapiProfile profiles[16],
+ guint *n_profiles_ptr, guint dpb_size)
+{
+ const gchar * const vendor_string =
+ gst_vaapi_display_get_vendor_string(GST_VAAPI_DECODER_DISPLAY(decoder));
+
+ gboolean add_high_profile = FALSE;
+ struct map {
+ const gchar *str;
+ guint str_len;
+ };
+ const struct map *m;
+
+ // Drivers that support slice level decoding
+ if (vendor_string && dpb_size <= 16) {
+ static const struct map drv_names[] = {
+ { "Intel i965 driver", 17 },
+ { NULL, 0 }
+ };
+ for (m = drv_names; m->str != NULL && !add_high_profile; m++) {
+ if (g_ascii_strncasecmp(vendor_string, m->str, m->str_len) == 0)
+ add_high_profile = TRUE;
+ }
+ }
+
+ if (add_high_profile)
+ fill_profiles(profiles, n_profiles_ptr, GST_VAAPI_PROFILE_H264_HIGH);
+}
+
static GstVaapiProfile
-get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps)
+get_profile(GstVaapiDecoderH264 *decoder, GstH264SPS *sps, guint dpb_size)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDisplay * const display = GST_VAAPI_DECODER_DISPLAY(decoder);
GST_VAAPI_PROFILE_H264_MAIN);
}
break;
+ case GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH:
+ if (priv->max_views == 2) {
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_STEREO_HIGH);
+ }
+ fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
+ break;
+ case GST_VAAPI_PROFILE_H264_STEREO_HIGH:
+ if (sps->frame_mbs_only_flag) {
+ fill_profiles(profiles, &n_profiles,
+ GST_VAAPI_PROFILE_H264_MULTIVIEW_HIGH);
+ }
+ fill_profiles_mvc(decoder, profiles, &n_profiles, dpb_size);
+ break;
default:
break;
}
GstVaapiProfile profile;
GstVaapiChromaType chroma_type;
gboolean reset_context = FALSE;
- guint mb_width, mb_height;
+ guint mb_width, mb_height, dpb_size, num_views;
+
+ num_views = get_num_views(sps);
+ if (priv->max_views < num_views) {
+ priv->max_views = num_views;
+ GST_DEBUG("maximum number of views changed to %u", num_views);
+ }
- profile = get_profile(decoder, sps);
+ dpb_size = get_max_dec_frame_buffering(sps);
+ if (priv->dpb_size < dpb_size) {
+ GST_DEBUG("DPB size increased");
+ reset_context = TRUE;
+ }
+
+ profile = get_profile(decoder, sps, dpb_size);
if (!profile) {
GST_ERROR("unsupported profile_idc %u", sps->profile_idc);
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
}
- if (priv->profile != profile) {
+ if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
GST_DEBUG("profile changed");
reset_context = TRUE;
priv->profile = profile;
}
priv->progressive_sequence = sps->frame_mbs_only_flag;
-#if 0
- /* XXX: we only output complete frames for now */
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
-#endif
gst_vaapi_decoder_set_pixel_aspect_ratio(
base_decoder,
info.chroma_type = priv->chroma_type;
info.width = sps->width;
info.height = sps->height;
- info.ref_frames = get_max_dec_frame_buffering(sps);
+ info.ref_frames = dpb_size;
if (!gst_vaapi_decoder_ensure_context(GST_VAAPI_DECODER(decoder), &info))
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
priv->has_context = TRUE;
/* Reset DPB */
- dpb_reset(decoder, sps);
+ if (!dpb_reset(decoder, dpb_size))
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
+
+ /* Reset MVC data */
+ if (!mvc_reset(decoder))
+ return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
- gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
+ gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
}
n = (sps->chroma_format_idc != 3) ? 2 : 6;
for (i = 0; i < n; i++) {
- gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
+ gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
}
}
if (!picture)
return GST_VAAPI_DECODER_STATUS_SUCCESS;
+ if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
+ goto error;
if (!exec_ref_pic_marking(decoder, picture))
goto error;
if (!dpb_add(decoder, picture))
goto error;
- if (!gst_vaapi_picture_decode(GST_VAAPI_PICTURE_CAST(picture)))
- goto error;
- if (priv->prev_frame && gst_vaapi_frame_store_has_frame(priv->prev_frame))
- gst_vaapi_picture_replace(&priv->current_picture, NULL);
+ gst_vaapi_picture_replace(&priv->current_picture, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
error:
}
static GstVaapiDecoderStatus
+parse_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+ GstH264ParserResult result;
+
+ GST_DEBUG("parse subset SPS");
+
+ /* Variables that don't have inferred values per the H.264
+ standard but that should get a default value anyway */
+ sps->log2_max_pic_order_cnt_lsb_minus4 = 0;
+
+ result = gst_h264_parser_parse_subset_sps(priv->parser, &pi->nalu, sps,
+ TRUE);
+ if (result != GST_H264_PARSER_OK)
+ return get_status(result);
+
+ priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SPS;
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
parse_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiParserInfoH264 * const pi = unit->parsed_info;
GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstH264NalUnit * const nalu = &pi->nalu;
+ GstH264SPS *sps;
GstH264ParserResult result;
GST_DEBUG("parse slice");
priv->parser_state &= (GST_H264_VIDEO_STATE_GOT_SPS|
GST_H264_VIDEO_STATE_GOT_PPS);
+ /* Propagate Prefix NAL unit info, if necessary */
+ switch (nalu->type) {
+ case GST_H264_NAL_SLICE:
+ case GST_H264_NAL_SLICE_IDR: {
+ GstVaapiParserInfoH264 * const prev_pi = priv->prev_pi;
+ if (prev_pi && prev_pi->nalu.type == GST_H264_NAL_PREFIX_UNIT) {
+ /* MVC sequences shall have a Prefix NAL unit immediately
+ preceding this NAL unit */
+ pi->nalu.extension_type = prev_pi->nalu.extension_type;
+ pi->nalu.extension = prev_pi->nalu.extension;
+ }
+ else {
+ /* In the very unlikely case there is no Prefix NAL unit
+ immediately preceding this NAL unit, try to infer some
+ defaults (H.7.4.1.1) */
+ GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
+ mvc->non_idr_flag = !(nalu->type == GST_H264_NAL_SLICE_IDR);
+ nalu->idr_pic_flag = !mvc->non_idr_flag;
+ mvc->priority_id = 0;
+ mvc->view_id = 0;
+ mvc->temporal_id = 0;
+ mvc->anchor_pic_flag = 0;
+ mvc->inter_view_flag = 1;
+ }
+ break;
+ }
+ }
+
/* Variables that don't have inferred values per the H.264
standard but that should get a default value anyway */
slice_hdr->cabac_init_idc = 0;
if (result != GST_H264_PARSER_OK)
return get_status(result);
+ sps = slice_hdr->pps->sequence;
+
+ /* Update MVC data */
+ pi->view_id = get_view_id(&pi->nalu);
+ pi->voc = get_view_order_index(sps, pi->view_id);
+
priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
}
static GstVaapiDecoderStatus
+decode_subset_sps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiParserInfoH264 * const pi = unit->parsed_info;
+ GstH264SPS * const sps = &pi->data.sps;
+
+ GST_DEBUG("decode subset SPS");
+
+ gst_vaapi_parser_info_h264_replace(&priv->sps[sps->id], pi);
+ return GST_VAAPI_DECODER_STATUS_SUCCESS;
+}
+
+static GstVaapiDecoderStatus
decode_pps(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
static GstVaapiDecoderStatus
decode_sequence_end(GstVaapiDecoderH264 *decoder)
{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiDecoderStatus status;
GST_DEBUG("decode sequence-end");
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- dpb_flush(decoder);
+ dpb_flush(decoder, NULL);
+
+ /* Reset defaults, should there be a new sequence available next */
+ priv->max_views = 1;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
for (i = 0; i < priv->short_ref_count; i++) {
GstVaapiPictureH264 * const pic = priv->short_ref[i];
+ // (H.8.2)
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+
// (8-27)
if (pic->frame_num > priv->frame_num)
pic->frame_num_wrap = pic->frame_num - MaxFrameNum;
for (i = 0; i < priv->long_ref_count; i++) {
GstVaapiPictureH264 * const pic = priv->long_ref[i];
+ // (H.8.2)
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+
// (8-29, 8-32, 8-33)
if (GST_VAAPI_PICTURE_IS_FRAME(picture))
pic->long_term_pic_num = pic->long_term_frame_idx;
*RefPicList_count = n;
}
+/* Finds the inter-view reference picture with the supplied view id */
+static GstVaapiPictureH264 *
+find_inter_view_reference(GstVaapiDecoderH264 *decoder, guint16 view_id)
+{
+ GPtrArray * const inter_views = decoder->priv.inter_views;
+ guint i;
+
+ for (i = 0; i < inter_views->len; i++) {
+ GstVaapiPictureH264 * const picture = g_ptr_array_index(inter_views, i);
+ if (picture->base.view_id == view_id)
+ return picture;
+ }
+
+ GST_WARNING("failed to find inter-view reference picture for view_id: %d",
+ view_id);
+ return NULL;
+}
+
+/* Checks whether the view id exists in the supplied list of view ids */
+static gboolean
+find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
+{
+ guint i;
+
+ for (i = 0; i < num_view_ids; i++) {
+ if (view_ids[i] == view_id)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
+ gboolean is_anchor)
+{
+ if (is_anchor)
+ return (find_view_id(view_id, view->anchor_ref_l0,
+ view->num_anchor_refs_l0) ||
+ find_view_id(view_id, view->anchor_ref_l1,
+ view->num_anchor_refs_l1));
+
+ return (find_view_id(view_id, view->non_anchor_ref_l0,
+ view->num_non_anchor_refs_l0) ||
+ find_view_id(view_id, view->non_anchor_ref_l1,
+ view->num_non_anchor_refs_l1));
+}
+
+/* Checks whether the inter-view reference picture with the supplied
+ view id is used for decoding the current view component picture */
+static gboolean
+is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
+ guint16 view_id, GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ return find_view_id_in_view(view_id,
+ &sps->extension.mvc.view[picture->base.voc], is_anchor);
+}
+
+/* Checks whether the supplied inter-view reference picture is used
+ for decoding the next view component pictures */
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+ guint i, num_views;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ num_views = sps->extension.mvc.num_views_minus1 + 1;
+ for (i = picture->base.voc + 1; i < num_views; i++) {
+ const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
+ if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
+ return TRUE;
+ }
+ return FALSE;
+}
+
+/* H.8.2.1 - Initialization process for inter-view prediction references */
+static void
+init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 **ref_list, guint *ref_list_count_ptr, guint num_refs,
+ const guint16 *view_ids, guint num_view_ids)
+{
+ guint j, n;
+
+ n = *ref_list_count_ptr;
+ for (j = 0; j < num_view_ids && n < num_refs; j++) {
+ GstVaapiPictureH264 * const pic =
+ find_inter_view_reference(decoder, view_ids[j]);
+ if (pic)
+ ref_list[n++] = pic;
+ }
+ *ref_list_count_ptr = n;
+}
+
+static inline void
+init_picture_refs_mvc(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, GstH264SliceHdr *slice_hdr, guint list)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const GstH264SPS * const sps = get_sps(decoder);
+ const GstH264SPSExtMVCView *view;
+
+ GST_DEBUG("initialize reference picture list for inter-view prediction");
+
+ if (sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return;
+ view = &sps->extension.mvc.view[picture->base.voc];
+
+#define INVOKE_INIT_PICTURE_REFS_MVC(ref_list, view_list) do { \
+ init_picture_refs_mvc_1(decoder, \
+ priv->RefPicList##ref_list, \
+ &priv->RefPicList##ref_list##_count, \
+ slice_hdr->num_ref_idx_l##ref_list##_active_minus1 + 1, \
+ view->view_list##_l##ref_list, \
+ view->num_##view_list##s_l##ref_list); \
+ } while (0)
+
+ if (list == 0) {
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
+ INVOKE_INIT_PICTURE_REFS_MVC(0, anchor_ref);
+ else
+ INVOKE_INIT_PICTURE_REFS_MVC(0, non_anchor_ref);
+ }
+ else {
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture))
+ INVOKE_INIT_PICTURE_REFS_MVC(1, anchor_ref);
+ else
+ INVOKE_INIT_PICTURE_REFS_MVC(1, non_anchor_ref);
+ }
+
+#undef INVOKE_INIT_PICTURE_REFS_MVC
+}
+
static void
init_picture_refs_p_slice(
GstVaapiDecoderH264 *decoder,
long_ref, long_ref_count
);
}
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
+ /* RefPicList0 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
+ }
}
static void
priv->RefPicList1[0] = priv->RefPicList1[1];
priv->RefPicList1[1] = tmp;
}
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture)) {
+ /* RefPicList0 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 0);
+
+ /* RefPicList1 */
+ init_picture_refs_mvc(decoder, picture, slice_hdr, 1);
+ }
}
#undef SORT_REF_LIST
guint num_ref_pic_list_modifications;
GstVaapiPictureH264 **ref_list;
guint *ref_list_count_ptr, ref_list_count, ref_list_idx = 0;
- guint i, j, n, num_refs;
+ const guint16 *view_ids = NULL;
+ guint i, j, n, num_refs, num_view_ids = 0;
gint found_ref_idx;
- gint32 MaxPicNum, CurrPicNum, picNumPred;
+ gint32 MaxPicNum, CurrPicNum, picNumPred, picViewIdxPred;
GST_DEBUG("modification process of reference picture list %u", list);
ref_list = priv->RefPicList0;
ref_list_count_ptr = &priv->RefPicList0_count;
num_refs = slice_hdr->num_ref_idx_l0_active_minus1 + 1;
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+ const GstH264SPSExtMVCView * const view =
+ &sps->extension.mvc.view[picture->base.voc];
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
+ view_ids = view->anchor_ref_l0;
+ num_view_ids = view->num_anchor_refs_l0;
+ }
+ else {
+ view_ids = view->non_anchor_ref_l0;
+ num_view_ids = view->num_non_anchor_refs_l0;
+ }
+ }
}
else {
ref_pic_list_modification = slice_hdr->ref_pic_list_modification_l1;
ref_list = priv->RefPicList1;
ref_list_count_ptr = &priv->RefPicList1_count;
num_refs = slice_hdr->num_ref_idx_l1_active_minus1 + 1;
+
+ if (GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) {
+ const GstH264SPSExtMVCView * const view =
+ &sps->extension.mvc.view[picture->base.voc];
+ if (GST_VAAPI_PICTURE_IS_ANCHOR(picture)) {
+ view_ids = view->anchor_ref_l1;
+ num_view_ids = view->num_anchor_refs_l1;
+ }
+ else {
+ view_ids = view->non_anchor_ref_l1;
+ num_view_ids = view->num_non_anchor_refs_l1;
+ }
+ }
}
ref_list_count = *ref_list_count_ptr;
}
picNumPred = CurrPicNum;
+ picViewIdxPred = -1;
for (i = 0; i < num_ref_pic_list_modifications; i++) {
GstH264RefPicListModification * const l = &ref_pic_list_modification[i];
PicNumF =
GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(ref_list[j]) ?
ref_list[j]->pic_num : MaxPicNum;
- if (PicNumF != picNum)
+ if (PicNumF != picNum ||
+ ref_list[j]->base.view_id != picture->base.view_id)
ref_list[n++] = ref_list[j];
}
}
/* 8.2.4.3.2 - Long-term reference pictures */
- else {
+ else if (l->modification_of_pic_nums_idc == 2) {
for (j = num_refs; j > ref_list_idx; j--)
ref_list[j] = ref_list[j - 1];
LongTermPicNumF =
GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(ref_list[j]) ?
ref_list[j]->long_term_pic_num : INT_MAX;
- if (LongTermPicNumF != l->value.long_term_pic_num)
+ if (LongTermPicNumF != l->value.long_term_pic_num ||
+ ref_list[j]->base.view_id != picture->base.view_id)
+ ref_list[n++] = ref_list[j];
+ }
+ }
+
+ /* H.8.2.2.3 - Inter-view prediction reference pictures */
+ else if ((GST_VAAPI_PICTURE_IS_MVC(picture) &&
+ sps->extension_type == GST_H264_NAL_EXTENSION_MVC) &&
+ (l->modification_of_pic_nums_idc == 4 ||
+ l->modification_of_pic_nums_idc == 5)) {
+ gint32 abs_diff_view_idx = l->value.abs_diff_view_idx_minus1 + 1;
+ gint32 picViewIdx, targetViewId;
+
+ // (H-6)
+ if (l->modification_of_pic_nums_idc == 4) {
+ picViewIdx = picViewIdxPred - abs_diff_view_idx;
+ if (picViewIdx < 0)
+ picViewIdx += num_view_ids;
+ }
+
+ // (H-7)
+ else {
+ picViewIdx = picViewIdxPred + abs_diff_view_idx;
+ if (picViewIdx >= num_view_ids)
+ picViewIdx -= num_view_ids;
+ }
+ picViewIdxPred = picViewIdx;
+
+ // (H-8, H-9)
+ targetViewId = view_ids[picViewIdx];
+
+ // (H-10)
+ for (j = num_refs; j > ref_list_idx; j--)
+ ref_list[j] = ref_list[j - 1];
+ ref_list[ref_list_idx++] =
+ find_inter_view_reference(decoder, targetViewId);
+ n = ref_list_idx;
+ for (j = ref_list_idx; j <= num_refs; j++) {
+ if (!ref_list[j])
+ continue;
+ if (ref_list[j]->base.view_id != targetViewId ||
+ ref_list[j]->base.poc != picture->base.poc)
ref_list[n++] = ref_list[j];
}
}
}
static void
-init_picture_ref_lists(GstVaapiDecoderH264 *decoder)
+init_picture_ref_lists(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i, j, short_ref_count, long_ref_count;
short_ref_count = 0;
long_ref_count = 0;
- if (GST_VAAPI_PICTURE_IS_FRAME(priv->current_picture)) {
+ if (GST_VAAPI_PICTURE_IS_FRAME(picture)) {
for (i = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
- GstVaapiPictureH264 *picture;
+ GstVaapiPictureH264 *pic;
if (!gst_vaapi_frame_store_has_frame(fs))
continue;
- picture = fs->buffers[0];
- if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
- priv->short_ref[short_ref_count++] = picture;
- else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
- priv->long_ref[long_ref_count++] = picture;
- picture->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
- picture->other_field = fs->buffers[1];
+ pic = fs->buffers[0];
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
+ priv->short_ref[short_ref_count++] = pic;
+ else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
+ priv->long_ref[long_ref_count++] = pic;
+ pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ pic->other_field = fs->buffers[1];
}
}
else {
for (i = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
for (j = 0; j < fs->num_buffers; j++) {
- GstVaapiPictureH264 * const picture = fs->buffers[j];
- if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(picture))
- priv->short_ref[short_ref_count++] = picture;
- else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(picture))
- priv->long_ref[long_ref_count++] = picture;
- picture->structure = picture->base.structure;
- picture->other_field = fs->buffers[j ^ 1];
+ GstVaapiPictureH264 * const pic = fs->buffers[j];
+ if (pic->base.view_id != picture->base.view_id)
+ continue;
+ if (GST_VAAPI_PICTURE_IS_SHORT_TERM_REFERENCE(pic))
+ priv->short_ref[short_ref_count++] = pic;
+ else if (GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(pic))
+ priv->long_ref[long_ref_count++] = pic;
+ pic->structure = pic->base.structure;
+ pic->other_field = fs->buffers[j ^ 1];
}
}
}
GstVaapiDecoderH264Private * const priv = &decoder->priv;
guint i, num_refs;
- init_picture_ref_lists(decoder);
+ init_picture_ref_lists(decoder, picture);
init_picture_refs_pic_num(decoder, picture, slice_hdr);
priv->RefPicList0_count = 0;
picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
+ base_picture->view_id = pi->view_id;
+ base_picture->voc = pi->voc;
+
+ /* Initialize extensions */
+ switch (pi->nalu.extension_type) {
+ case GST_H264_NAL_EXTENSION_MVC: {
+ GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
+
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
+ if (mvc->inter_view_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_INTER_VIEW);
+ if (mvc->anchor_pic_flag)
+ GST_VAAPI_PICTURE_FLAG_SET(picture,
+ GST_VAAPI_PICTURE_FLAG_ANCHOR);
+ break;
+ }
+ }
/* Reset decoder state for IDR pictures */
- if (pi->nalu.type == GST_H264_NAL_SLICE_IDR) {
+ if (pi->nalu.idr_pic_flag) {
GST_DEBUG("<IDR>");
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
- dpb_flush(decoder);
+ dpb_flush(decoder, picture);
}
/* Initialize picture structure */
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
- dpb_flush(decoder);
+ dpb_flush(decoder, picture);
priv->prev_pic_has_mmco5 = TRUE;
/* Assign LongTermFrameIdx to the other field if it was also
marked as "used for long-term reference */
- other_field = (GstVaapiPictureH264 *)picture->base.parent_picture;
+ other_field = GST_VAAPI_PICTURE_H264(picture->base.parent_picture);
if (other_field && GST_VAAPI_PICTURE_IS_LONG_TERM_REFERENCE(other_field))
other_field->long_term_frame_idx = ref_pic_marking->long_term_frame_idx;
}
priv->prev_pic_has_mmco5 = FALSE;
priv->prev_pic_structure = picture->structure;
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture))
+ g_ptr_array_add(priv->inter_views, gst_vaapi_picture_ref(picture));
+
if (!GST_VAAPI_PICTURE_IS_REFERENCE(picture))
return TRUE;
}
}
+static void
+vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
+ GstVaapiPictureH264 *picture)
+{
+ vaapi_fill_picture(pic, picture, 0);
+
+ /* H.8.4 - MVC inter prediction and inter-view prediction process */
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
+ /* The inter-view reference components and inter-view only
+ reference components that are included in the reference
+ picture lists are considered as not being marked as "used for
+ short-term reference" or "used for long-term reference" */
+ pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
+ VA_PICTURE_H264_LONG_TERM_REFERENCE);
+ }
+}
+
static gboolean
fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
for (i = 0, n = 0; i < priv->dpb_count; i++) {
GstVaapiFrameStore * const fs = priv->dpb[i];
- if (gst_vaapi_frame_store_has_reference(fs))
+ if ((gst_vaapi_frame_store_has_reference(fs) &&
+ fs->view_id == picture->base.view_id) ||
+ (gst_vaapi_frame_store_has_inter_view(fs) &&
+ is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
fs->buffers[0], fs->structure);
+ if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
+ break;
}
for (; n < G_N_ELEMENTS(pic_param->ReferenceFrames); n++)
vaapi_init_picture(&pic_param->ReferenceFrames[n]);
#define CHECK_VALUE(new_slice_hdr, old_slice_hdr, field) \
CHECK_EXPR(((new_slice_hdr)->field == (old_slice_hdr)->field), #field)
+ /* view_id differs in value and VOIdx of current slice_hdr is less
+ than the VOIdx of the prev_slice_hdr */
+ CHECK_VALUE(pi, prev_pi, view_id);
+
/* frame_num differs in value, regardless of inferred values to 0 */
CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
return FALSE;
}
+/* Detection of a new access unit, assuming we are already in presence
+ of a new picture */
+static inline gboolean
+is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
+{
+ if (!prev_pi || prev_pi->view_id == pi->view_id)
+ return TRUE;
+ return pi->voc < prev_pi->voc;
+}
+
+/* Finds the first field picture corresponding to the supplied picture */
+static GstVaapiPictureH264 *
+find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
+ GstVaapiFrameStore *fs;
+
+ if (!slice_hdr->field_pic_flag)
+ return NULL;
+
+ fs = priv->prev_frames[pi->voc];
+ if (!fs || gst_vaapi_frame_store_has_frame(fs))
+ return NULL;
+
+ if (fs->buffers[0]->frame_num == slice_hdr->frame_num)
+ return fs->buffers[0];
+ return NULL;
+}
+
static GstVaapiDecoderStatus
decode_picture(GstVaapiDecoderH264 *decoder, GstVaapiDecoderUnit *unit)
{
GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
GstH264PPS * const pps = ensure_pps(decoder, slice_hdr->pps);
GstH264SPS * const sps = ensure_sps(decoder, slice_hdr->pps->sequence);
- GstVaapiPictureH264 *picture;
+ GstVaapiPictureH264 *picture, *first_field;
GstVaapiDecoderStatus status;
g_return_val_if_fail(pps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
g_return_val_if_fail(sps != NULL, GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN);
+ /* Only decode base stream for MVC */
+ switch (sps->profile_idc) {
+ case GST_H264_PROFILE_MULTIVIEW_HIGH:
+ case GST_H264_PROFILE_STEREO_HIGH:
+ if (0) {
+ GST_DEBUG("drop picture from substream");
+ return GST_VAAPI_DECODER_STATUS_DROP_FRAME;
+ }
+ break;
+ }
+
status = ensure_context(decoder, sps);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
priv->decoder_state = 0;
- if (priv->current_picture) {
+ first_field = find_first_field(decoder, pi);
+ if (first_field) {
/* Re-use current picture where the first field was decoded */
- picture = gst_vaapi_picture_h264_new_field(priv->current_picture);
+ picture = gst_vaapi_picture_h264_new_field(first_field);
if (!picture) {
GST_ERROR("failed to allocate field picture");
return GST_VAAPI_DECODER_STATUS_ERROR_ALLOCATION_FAILED;
gst_vaapi_picture_replace(&priv->current_picture, picture);
gst_vaapi_picture_unref(picture);
+ /* Clear inter-view references list if this is the primary coded
+ picture of the current access unit */
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+ g_ptr_array_set_size(priv->inter_views, 0);
+
/* Update cropping rectangle */
if (sps->frame_cropping_flag) {
GstVaapiRectangle crop_rect;
slice_hdr->num_ref_idx_l0_active_minus1;
for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
- vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
+ priv->RefPicList0[i]);
for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList0[i]);
slice_hdr->num_ref_idx_l1_active_minus1;
for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
- vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
+ priv->RefPicList1[i]);
for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList1[i]);
return TRUE;
return GST_VAAPI_DECODER_STATUS_ERROR_UNKNOWN;
}
+ /* Check wether this is the first/last slice in the current access unit */
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_START)
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_START);
+ if (pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)
+ GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
+
slice = GST_VAAPI_SLICE_NEW(H264, decoder,
(map_info.data + unit->offset + pi->nalu.offset), pi->nalu.size);
gst_buffer_unmap(buffer, &map_info);
case GST_H264_NAL_SPS:
status = decode_sps(decoder, unit);
break;
+ case GST_H264_NAL_SUBSET_SPS:
+ status = decode_subset_sps(decoder, unit);
+ break;
case GST_H264_NAL_PPS:
status = decode_pps(decoder, unit);
break;
+ case GST_H264_NAL_SLICE_EXT:
case GST_H264_NAL_SLICE_IDR:
/* fall-through. IDR specifics are handled in init_picture() */
case GST_H264_NAL_SLICE:
guint i, size, buf_size, nalu_size, flags;
guint32 start_code;
gint ofs, ofs2;
+ gboolean at_au_end = FALSE;
status = ensure_decoder(decoder);
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- size = gst_adapter_available(adapter);
+ switch (priv->stream_alignment) {
+ case GST_VAAPI_STREAM_ALIGN_H264_NALU:
+ case GST_VAAPI_STREAM_ALIGN_H264_AU:
+ size = gst_adapter_available_fast(adapter);
+ break;
+ default:
+ size = gst_adapter_available(adapter);
+ break;
+ }
if (priv->is_avcC) {
if (size < priv->nal_length_size)
buf_size = priv->nal_length_size + nalu_size;
if (size < buf_size)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ else if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+ at_au_end = (buf_size == size);
}
else {
if (size < 4)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs = scan_for_start_code(adapter, 0, size, NULL);
- if (ofs < 0)
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
- if (ofs > 0) {
- gst_adapter_flush(adapter, ofs);
- size -= ofs;
- }
+ if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
+ buf_size = size;
+ else {
+ ofs = scan_for_start_code(adapter, 0, size, NULL);
+ if (ofs < 0)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs2 = ps->input_offset2 - ofs - 4;
- if (ofs2 < 4)
- ofs2 = 4;
+ if (ofs > 0) {
+ gst_adapter_flush(adapter, ofs);
+ size -= ofs;
+ }
- ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
- scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
- if (ofs < 0) {
- // Assume the whole NAL unit is present if end-of-stream
- if (!at_eos) {
- ps->input_offset2 = size;
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ ofs2 = ps->input_offset2 - ofs - 4;
+ if (ofs2 < 4)
+ ofs2 = 4;
+
+ ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
+ scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
+ if (ofs < 0) {
+ // Assume the whole NAL unit is present if end-of-stream
+ // or stream buffers aligned on access unit boundaries
+ if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_AU)
+ at_au_end = TRUE;
+ else if (!at_eos) {
+ ps->input_offset2 = size;
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ }
+ ofs = size;
}
- ofs = size;
+ buf_size = ofs;
}
- buf_size = ofs;
}
ps->input_offset2 = 0;
case GST_H264_NAL_SPS:
status = parse_sps(decoder, unit);
break;
+ case GST_H264_NAL_SUBSET_SPS:
+ status = parse_subset_sps(decoder, unit);
+ break;
case GST_H264_NAL_PPS:
status = parse_pps(decoder, unit);
break;
case GST_H264_NAL_SEI:
status = parse_sei(decoder, unit);
break;
+ case GST_H264_NAL_SLICE_EXT:
+ if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
+ status = GST_VAAPI_DECODER_STATUS_SUCCESS;
+ break;
+ }
+ /* fall-through */
case GST_H264_NAL_SLICE_IDR:
case GST_H264_NAL_SLICE:
status = parse_slice(decoder, unit);
return status;
flags = 0;
+ if (at_au_end) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
+ }
switch (pi->nalu.type) {
case GST_H264_NAL_AU_DELIMITER:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
/* fall-through */
case GST_H264_NAL_FILLER_DATA:
/* fall-through */
case GST_H264_NAL_SEQ_END:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_END;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
break;
case GST_H264_NAL_SPS:
+ case GST_H264_NAL_SUBSET_SPS:
case GST_H264_NAL_PPS:
case GST_H264_NAL_SEI:
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
+ case GST_H264_NAL_SLICE_EXT:
+ if (!GST_H264_IS_MVC_NALU(&pi->nalu)) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
+ break;
+ }
+ /* fall-through */
case GST_H264_NAL_SLICE_IDR:
case GST_H264_NAL_SLICE:
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SLICE;
- if (is_new_picture(pi, priv->prev_slice_pi))
+ if (priv->prev_pi &&
+ (priv->prev_pi->flags & GST_VAAPI_DECODER_UNIT_FLAG_AU_END)) {
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ }
+ else if (is_new_picture(pi, priv->prev_slice_pi)) {
flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ if (is_new_access_unit(pi, priv->prev_slice_pi))
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START;
+ }
gst_vaapi_parser_info_h264_replace(&priv->prev_slice_pi, pi);
break;
case GST_H264_NAL_SPS_EXT:
/* skip SPS extension and auxiliary slice for now */
flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP;
break;
+ case GST_H264_NAL_PREFIX_UNIT:
+ /* skip Prefix NAL units for now */
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_SKIP |
+ GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ break;
default:
if (pi->nalu.type >= 14 && pi->nalu.type <= 18)
- flags |= GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
+ flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_START |
+ GST_VAAPI_DECODER_UNIT_FLAG_FRAME_START;
break;
}
+ if ((flags & GST_VAAPI_DECODER_UNIT_FLAGS_AU) && priv->prev_slice_pi)
+ priv->prev_slice_pi->flags |= GST_VAAPI_DECODER_UNIT_FLAG_AU_END;
GST_VAAPI_DECODER_UNIT_FLAG_SET(unit, flags);
pi->nalu.data = NULL;
pi->state = priv->parser_state;
+ pi->flags = flags;
+ gst_vaapi_parser_info_h264_replace(&priv->prev_pi, pi);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
GstVaapiDecoderH264 * const decoder =
GST_VAAPI_DECODER_H264_CAST(base_decoder);
- dpb_flush(decoder);
+ dpb_flush(decoder, NULL);
return GST_VAAPI_DECODER_STATUS_SUCCESS;
}
}
/**
+ * gst_vaapi_decoder_h264_set_alignment:
+ * @decoder: a #GstVaapiDecoderH264
+ * @alignment: the #GstVaapiStreamAlignH264
+ *
+ * Specifies how stream buffers are aligned / fed, i.e. the boundaries
+ * of each buffer that is supplied to the decoder. This could be no
+ * specific alignment, NAL unit boundaries, or access unit boundaries.
+ */
+void
+gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
+ GstVaapiStreamAlignH264 alignment)
+{
+ g_return_if_fail(decoder != NULL);
+
+ decoder->priv.stream_alignment = alignment;
+}
+
+/**
* gst_vaapi_decoder_h264_new:
* @display: a #GstVaapiDisplay
* @caps: a #GstCaps holding codec information