GstH264SliceHdr slice_hdr;
} data;
guint state;
- guint flags; // Same as decoder unit flags (persistent)
+ guint flags; // Same as decoder unit flags (persistent)
+ guint view_id; // View ID of slice
+ guint voc; // View order index (VOIdx) of slice
};
static void
GST_VAAPI_PICTURE_FLAG_LONG_TERM_REFERENCE)
#define GST_VAAPI_PICTURE_IS_INTER_VIEW(picture) \
- (GST_VAAPI_PICTURE_FLAGS(picture) & GST_VAAPI_PICTURE_FLAG_INTER_VIEW)
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_INTER_VIEW))
#define GST_VAAPI_PICTURE_IS_ANCHOR(picture) \
- (GST_VAAPI_PICTURE_FLAGS(picture) & GST_VAAPI_PICTURE_FLAG_ANCHOR)
+ (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_ANCHOR))
#define GST_VAAPI_PICTURE_H264(picture) \
((GstVaapiPictureH264 *)(picture))
GstH264NalParser *parser;
guint parser_state;
guint decoder_state;
+ GstVaapiStreamAlignH264 stream_alignment;
GstVaapiPictureH264 *current_picture;
GstVaapiParserInfoH264 *sps[GST_H264_MAX_SPS_COUNT];
GstVaapiParserInfoH264 *active_sps;
static gboolean
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture);
+
+static inline gboolean
+is_inter_view_reference_for_next_frames(GstVaapiDecoderH264 *decoder,
+ GstVaapiFrameStore *fs)
+{
+ return is_inter_view_reference_for_next_pictures(decoder, fs->buffers[0]);
+}
+
/* Determines if the supplied profile is one of the MVC set */
static gboolean
is_mvc_profile(GstH264Profile profile)
}
static gboolean
-dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+dpb_output_other_views(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture, guint voc)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
GstVaapiPictureH264 *found_picture;
gint found_index;
gboolean success;
- found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
- if (found_index < 0)
- return FALSE;
-
- success = dpb_output(decoder, priv->dpb[found_index], found_picture);
- dpb_evict(decoder, found_picture, found_index);
if (priv->max_views == 1)
- return success;
+ return TRUE;
/* Emit all other view components that were in the same access
unit than the picture we have just found */
+ found_picture = picture;
for (;;) {
found_index = dpb_find_lowest_voc(decoder, found_picture,
&found_picture);
- if (found_index < 0)
+ if (found_index < 0 || found_picture->base.voc >= voc)
break;
- dpb_output(decoder, priv->dpb[found_index], found_picture);
+ success = dpb_output(decoder, priv->dpb[found_index], found_picture);
dpb_evict(decoder, found_picture, found_index);
+ if (!success)
+ return FALSE;
}
+ return TRUE;
+}
+
+static gboolean
+dpb_bump(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
+{
+ GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstVaapiPictureH264 *found_picture;
+ gint found_index;
+ gboolean success;
+
+ found_index = dpb_find_lowest_poc(decoder, picture, &found_picture);
+ if (found_index < 0)
+ return FALSE;
+
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, found_picture->base.voc);
+
+ success = dpb_output(decoder, priv->dpb[found_index], found_picture);
+ dpb_evict(decoder, found_picture, found_index);
+ if (priv->max_views == 1)
+ return success;
+
+ if (picture && picture->base.poc != found_picture->base.poc)
+ dpb_output_other_views(decoder, found_picture, G_MAXUINT32);
return success;
}
dpb_prune_mvc(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ const gboolean is_last_picture = /* in the access unit */
+ GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END);
guint i;
- // Remove all unused inter-view pictures
- if (GST_VAAPI_PICTURE_FLAG_IS_SET(picture, GST_VAAPI_PICTURE_FLAG_AU_END)) {
- i = 0;
- while (i < priv->dpb_count) {
- GstVaapiFrameStore * const fs = priv->dpb[i];
- if (fs->view_id != picture->base.view_id &&
- !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs))
- dpb_remove_index(decoder, i);
- else
- i++;
- }
+ // Remove all unused inter-view only reference components of the current AU
+ i = 0;
+ while (i < priv->dpb_count) {
+ GstVaapiFrameStore * const fs = priv->dpb[i];
+ if (fs->view_id != picture->base.view_id &&
+ !fs->output_needed && !gst_vaapi_frame_store_has_reference(fs) &&
+ (is_last_picture ||
+ !is_inter_view_reference_for_next_frames(decoder, fs)))
+ dpb_remove_index(decoder, i);
+ else
+ i++;
}
}
guint i;
// Resize array of inter-view references
- if (priv->inter_views)
- g_ptr_array_set_size(priv->inter_views, priv->max_views);
- else {
+ if (!priv->inter_views) {
priv->inter_views = g_ptr_array_new_full(priv->max_views,
(GDestroyNotify)unref_inter_view);
if (!priv->inter_views)
return GST_VAAPI_DECODER_STATUS_ERROR_UNSUPPORTED_PROFILE;
}
- if (priv->profile != profile) {
+ if (!priv->profile || (priv->profile != profile && priv->max_views == 1)) {
GST_DEBUG("profile changed");
reset_context = TRUE;
priv->profile = profile;
}
priv->progressive_sequence = sps->frame_mbs_only_flag;
-#if 0
- /* XXX: we only output complete frames for now */
gst_vaapi_decoder_set_interlaced(base_decoder, !priv->progressive_sequence);
-#endif
gst_vaapi_decoder_set_pixel_aspect_ratio(
base_decoder,
g_assert(G_N_ELEMENTS(iq_matrix->ScalingList4x4[0]) == 16);
for (i = 0; i < G_N_ELEMENTS(iq_matrix->ScalingList4x4); i++)
- gst_h264_video_quant_matrix_4x4_get_raster_from_zigzag(
+ gst_h264_quant_matrix_4x4_get_raster_from_zigzag(
iq_matrix->ScalingList4x4[i], pps->scaling_lists_4x4[i]);
}
n = (sps->chroma_format_idc != 3) ? 2 : 6;
for (i = 0; i < n; i++) {
- gst_h264_video_quant_matrix_8x8_get_raster_from_zigzag(
+ gst_h264_quant_matrix_8x8_get_raster_from_zigzag(
iq_matrix->ScalingList8x8[i], pps->scaling_lists_8x8[i]);
}
}
GstVaapiParserInfoH264 * const pi = unit->parsed_info;
GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
GstH264NalUnit * const nalu = &pi->nalu;
+ GstH264SPS *sps;
GstH264ParserResult result;
guint num_views;
if (result != GST_H264_PARSER_OK)
return get_status(result);
- num_views = get_num_views(slice_hdr->pps->sequence);
+ sps = slice_hdr->pps->sequence;
+
+ /* Update MVC data */
+ num_views = get_num_views(sps);
if (priv->max_views < num_views) {
priv->max_views = num_views;
GST_DEBUG("maximum number of views changed to %u", num_views);
}
+ pi->view_id = get_view_id(&pi->nalu);
+ pi->voc = get_view_order_index(sps, pi->view_id);
priv->parser_state |= GST_H264_VIDEO_STATE_GOT_SLICE;
return GST_VAAPI_DECODER_STATUS_SUCCESS;
return NULL;
}
+/* Checks whether the view id exists in the supplied list of view ids */
+static gboolean
+find_view_id(guint16 view_id, const guint16 *view_ids, guint num_view_ids)
+{
+ guint i;
+
+ for (i = 0; i < num_view_ids; i++) {
+ if (view_ids[i] == view_id)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+find_view_id_in_view(guint16 view_id, const GstH264SPSExtMVCView *view,
+ gboolean is_anchor)
+{
+ if (is_anchor)
+ return (find_view_id(view_id, view->anchor_ref_l0,
+ view->num_anchor_refs_l0) ||
+ find_view_id(view_id, view->anchor_ref_l1,
+ view->num_anchor_refs_l1));
+
+ return (find_view_id(view_id, view->non_anchor_ref_l0,
+ view->num_non_anchor_refs_l0) ||
+ find_view_id(view_id, view->non_anchor_ref_l1,
+ view->num_non_anchor_refs_l1));
+}
+
+/* Checks whether the inter-view reference picture with the supplied
+ view id is used for decoding the current view component picture */
+static gboolean
+is_inter_view_reference_for_picture(GstVaapiDecoderH264 *decoder,
+ guint16 view_id, GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ return find_view_id_in_view(view_id,
+ &sps->extension.mvc.view[picture->base.voc], is_anchor);
+}
+
+/* Checks whether the supplied inter-view reference picture is used
+ for decoding the next view component pictures */
+static gboolean
+is_inter_view_reference_for_next_pictures(GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture)
+{
+ const GstH264SPS * const sps = get_sps(decoder);
+ gboolean is_anchor;
+ guint i, num_views;
+
+ if (!GST_VAAPI_PICTURE_IS_MVC(picture) ||
+ sps->extension_type != GST_H264_NAL_EXTENSION_MVC)
+ return FALSE;
+
+ is_anchor = GST_VAAPI_PICTURE_IS_ANCHOR(picture);
+ num_views = sps->extension.mvc.num_views_minus1 + 1;
+ for (i = picture->base.voc + 1; i < num_views; i++) {
+ const GstH264SPSExtMVCView * const view = &sps->extension.mvc.view[i];
+ if (find_view_id_in_view(picture->base.view_id, view, is_anchor))
+ return TRUE;
+ }
+ return FALSE;
+}
+
/* H.8.2.1 - Initialization process for inter-view prediction references */
static void
init_picture_refs_mvc_1(GstVaapiDecoderH264 *decoder,
picture->output_flag = TRUE; /* XXX: conformant to Annex A only */
base_picture->pts = GST_VAAPI_DECODER_CODEC_FRAME(decoder)->pts;
base_picture->type = GST_VAAPI_PICTURE_TYPE_NONE;
+ base_picture->view_id = pi->view_id;
+ base_picture->voc = pi->voc;
/* Initialize extensions */
switch (pi->nalu.extension_type) {
case GST_H264_NAL_EXTENSION_MVC: {
GstH264NalUnitExtensionMVC * const mvc = &pi->nalu.extension.mvc;
- base_picture->view_id = mvc->view_id;
- base_picture->voc = get_view_order_index(get_sps(decoder),
- base_picture->view_id);
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_MVC);
if (mvc->inter_view_flag)
}
}
+static void
+vaapi_fill_picture_for_RefPicListX(VAPictureH264 *pic,
+ GstVaapiPictureH264 *picture)
+{
+ vaapi_fill_picture(pic, picture, 0);
+
+ /* H.8.4 - MVC inter prediction and inter-view prediction process */
+ if (GST_VAAPI_PICTURE_IS_INTER_VIEW(picture)) {
+ /* The inter-view reference components and inter-view only
+ reference components that are included in the reference
+ picture lists are considered as not being marked as "used for
+ short-term reference" or "used for long-term reference" */
+ pic->flags &= ~(VA_PICTURE_H264_SHORT_TERM_REFERENCE|
+ VA_PICTURE_H264_LONG_TERM_REFERENCE);
+ }
+}
+
static gboolean
fill_picture(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture)
{
GstVaapiFrameStore * const fs = priv->dpb[i];
if ((gst_vaapi_frame_store_has_reference(fs) &&
fs->view_id == picture->base.view_id) ||
- gst_vaapi_frame_store_has_inter_view(fs))
+ (gst_vaapi_frame_store_has_inter_view(fs) &&
+ is_inter_view_reference_for_picture(decoder, fs->view_id, picture)))
vaapi_fill_picture(&pic_param->ReferenceFrames[n++],
fs->buffers[0], fs->structure);
if (n >= G_N_ELEMENTS(pic_param->ReferenceFrames))
/* view_id differs in value and VOIdx of current slice_hdr is less
than the VOIdx of the prev_slice_hdr */
- CHECK_VALUE(&pi->nalu.extension.mvc, &prev_pi->nalu.extension.mvc, view_id);
+ CHECK_VALUE(pi, prev_pi, view_id);
/* frame_num differs in value, regardless of inferred values to 0 */
CHECK_VALUE(slice_hdr, prev_slice_hdr, frame_num);
/* Detection of a new access unit, assuming we are already in presence
of a new picture */
-static gboolean
+static inline gboolean
is_new_access_unit(GstVaapiParserInfoH264 *pi, GstVaapiParserInfoH264 *prev_pi)
{
- GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
- GstH264SliceHdr *prev_slice_hdr;
- GstH264NalUnitExtensionMVC *mvc, *prev_mvc;
- gint voc, prev_voc;
-
- g_return_val_if_fail(is_new_picture(pi, prev_pi), FALSE);
-
- if (!prev_pi)
- return TRUE;
- prev_slice_hdr = &prev_pi->data.slice_hdr;
-
- mvc = &pi->nalu.extension.mvc;
- prev_mvc = &prev_pi->nalu.extension.mvc;
- if (mvc->view_id == prev_mvc->view_id)
+ if (!prev_pi || prev_pi->view_id == pi->view_id)
return TRUE;
-
- voc = get_view_order_index(slice_hdr->pps->sequence, mvc->view_id);
- prev_voc = get_view_order_index(prev_slice_hdr->pps->sequence,
- prev_mvc->view_id);
- return voc < prev_voc;
+ return pi->voc < prev_pi->voc;
}
/* Finds the first field picture corresponding to the supplied picture */
static GstVaapiPictureH264 *
-find_first_field(GstVaapiDecoderH264 *decoder, GstH264NalUnit *nalu,
- GstH264SliceHdr *slice_hdr)
+find_first_field(GstVaapiDecoderH264 *decoder, GstVaapiParserInfoH264 *pi)
{
GstVaapiDecoderH264Private * const priv = &decoder->priv;
+ GstH264SliceHdr * const slice_hdr = &pi->data.slice_hdr;
GstVaapiFrameStore *fs;
- gint voc;
if (!slice_hdr->field_pic_flag)
return NULL;
- voc = get_view_order_index(get_sps(decoder), get_view_id(nalu));
- if (voc < 0)
- return NULL;
-
- fs = priv->prev_frames[voc];
+ fs = priv->prev_frames[pi->voc];
if (!fs || gst_vaapi_frame_store_has_frame(fs))
return NULL;
priv->decoder_state = 0;
- first_field = find_first_field(decoder, &pi->nalu, slice_hdr);
+ first_field = find_first_field(decoder, pi);
if (first_field) {
/* Re-use current picture where the first field was decoded */
picture = gst_vaapi_picture_h264_new_field(first_field);
slice_hdr->num_ref_idx_l0_active_minus1;
for (i = 0; i < priv->RefPicList0_count && priv->RefPicList0[i]; i++)
- vaapi_fill_picture(&slice_param->RefPicList0[i], priv->RefPicList0[i], 0);
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList0[i],
+ priv->RefPicList0[i]);
for (; i <= slice_param->num_ref_idx_l0_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList0[i]);
slice_hdr->num_ref_idx_l1_active_minus1;
for (i = 0; i < priv->RefPicList1_count && priv->RefPicList1[i]; i++)
- vaapi_fill_picture(&slice_param->RefPicList1[i], priv->RefPicList1[i], 0);
+ vaapi_fill_picture_for_RefPicListX(&slice_param->RefPicList1[i],
+ priv->RefPicList1[i]);
for (; i <= slice_param->num_ref_idx_l1_active_minus1; i++)
vaapi_init_picture(&slice_param->RefPicList1[i]);
return TRUE;
if (status != GST_VAAPI_DECODER_STATUS_SUCCESS)
return status;
- size = gst_adapter_available(adapter);
+ switch (priv->stream_alignment) {
+ case GST_VAAPI_STREAM_ALIGN_H264_NALU:
+ size = gst_adapter_available_fast(adapter);
+ break;
+ default:
+ size = gst_adapter_available(adapter);
+ break;
+ }
if (priv->is_avcC) {
if (size < priv->nal_length_size)
if (size < 4)
return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs = scan_for_start_code(adapter, 0, size, NULL);
- if (ofs < 0)
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
-
- if (ofs > 0) {
- gst_adapter_flush(adapter, ofs);
- size -= ofs;
- }
+ if (priv->stream_alignment == GST_VAAPI_STREAM_ALIGN_H264_NALU)
+ buf_size = size;
+ else {
+ ofs = scan_for_start_code(adapter, 0, size, NULL);
+ if (ofs < 0)
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
- ofs2 = ps->input_offset2 - ofs - 4;
- if (ofs2 < 4)
- ofs2 = 4;
+ if (ofs > 0) {
+ gst_adapter_flush(adapter, ofs);
+ size -= ofs;
+ }
- ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
- scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
- if (ofs < 0) {
- // Assume the whole NAL unit is present if end-of-stream
- if (!at_eos) {
- ps->input_offset2 = size;
- return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ ofs2 = ps->input_offset2 - ofs - 4;
+ if (ofs2 < 4)
+ ofs2 = 4;
+
+ ofs = G_UNLIKELY(size < ofs2 + 4) ? -1 :
+ scan_for_start_code(adapter, ofs2, size - ofs2, NULL);
+ if (ofs < 0) {
+ // Assume the whole NAL unit is present if end-of-stream
+ if (!at_eos) {
+ ps->input_offset2 = size;
+ return GST_VAAPI_DECODER_STATUS_ERROR_NO_DATA;
+ }
+ ofs = size;
}
- ofs = size;
+ buf_size = ofs;
}
- buf_size = ofs;
}
ps->input_offset2 = 0;
}
/**
+ * gst_vaapi_decoder_h264_set_alignment:
+ * @decoder: a #GstVaapiDecoderH264
+ * @alignment: the #GstVaapiStreamAlignH264
+ *
+ * Specifies how stream buffers are aligned / fed, i.e. the boundaries
+ * of each buffer that is supplied to the decoder. This could be no
+ * specific alignment, NAL unit boundaries, or access unit boundaries.
+ */
+void
+gst_vaapi_decoder_h264_set_alignment(GstVaapiDecoderH264 *decoder,
+ GstVaapiStreamAlignH264 alignment)
+{
+ g_return_if_fail(decoder != NULL);
+
+ decoder->priv.stream_alignment = alignment;
+}
+
+/**
* gst_vaapi_decoder_h264_new:
* @display: a #GstVaapiDisplay
* @caps: a #GstCaps holding codec information