return GST_VAAPI_PICTURE_H264_CAST(base_picture);
}
+static inline GstVaapiPictureH264 *
+gst_vaapi_picture_h264_new_dummy(GstVaapiPictureH264 *picture)
+{
+ GstVaapiPicture *base_picture;
+
+ g_return_val_if_fail(GST_VAAPI_IS_PICTURE_H264(picture), NULL);
+
+ base_picture = gst_vaapi_picture_new_clone(&picture->base);
+ if (!base_picture)
+ return NULL;
+ GST_VAAPI_PICTURE_FLAG_SET(
+ base_picture,
+ (GST_VAAPI_PICTURE_FLAG_SKIPPED |
+ GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE));
+ base_picture->poc = -1;
+ return GST_VAAPI_PICTURE_H264_CAST(base_picture);
+}
+
static inline GstVaapiSliceH264 *
gst_vaapi_picture_h264_get_last_slice(GstVaapiPictureH264 *picture)
{
static gboolean
exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
+static gboolean
+exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder);
+
/* Get number of reference frames to use */
static guint
get_max_dec_frame_buffering(GstH264SPS *sps)
}
}
+static void
+process_for_gaps_in_frame_num(
+ GstVaapiDecoderH264 *decoder,
+ GstVaapiPictureH264 *picture,
+ GstH264SliceHdr *slice_hdr,
+ GstH264NalUnit *nalu
+)
+{
+ GstVaapiDecoderH264Private * const priv = decoder->priv;
+ GstH264SPS * const sps = slice_hdr->pps->sequence;
+ const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
+ GstVaapiPictureH264 *dummy_pic;
+ gint32 final_frame_num;
+
+ if (priv->frame_num == priv->prev_frame_num ||
+ priv->frame_num == (priv->prev_frame_num + 1)%MaxFrameNum)
+ return;
+
+ final_frame_num = priv->frame_num;
+ priv->frame_num = (priv->prev_frame_num + 1)%MaxFrameNum;
+ init_picture_ref_lists(decoder);
+
+ while(final_frame_num != priv->frame_num) {
+ dummy_pic = gst_vaapi_picture_h264_new_dummy(picture);
+ dummy_pic->frame_num = priv->frame_num;
+ dummy_pic->frame_num_wrap = priv->frame_num;
+ dummy_pic->pps = picture->pps;
+ dummy_pic->output_needed = FALSE;
+ dummy_pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+ priv->current_picture = dummy_pic;
+
+ init_picture_refs_pic_num(decoder, dummy_pic, slice_hdr);
+ exec_ref_pic_marking_sliding_window(decoder);
+ remove_short_reference(decoder, dummy_pic->frame_num);
+ /* add to short reference */
+ priv->short_ref[priv->short_ref_count++] = dummy_pic;
+ dpb_add(decoder,dummy_pic);
+ gst_vaapi_picture_unref(dummy_pic);
+ priv->prev_frame_num = priv->frame_num;
+ priv->frame_num = (priv->prev_frame_num + 1)%MaxFrameNum;
+ }
+ priv->frame_num = final_frame_num;
+ priv->current_picture = picture;
+}
+
static gboolean
init_picture(
GstVaapiDecoderH264 *decoder,
{
GstVaapiDecoderH264Private * const priv = decoder->priv;
GstVaapiPicture * const base_picture = &picture->base;
+ GstH264SPS * const sps = slice_hdr->pps->sequence;
priv->prev_frame_num = priv->frame_num;
priv->frame_num = slice_hdr->frame_num;
GST_DEBUG("<IDR>");
GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
dpb_flush(decoder);
- }
+ } else if (sps->gaps_in_frame_num_value_allowed_flag)
+ process_for_gaps_in_frame_num(decoder, picture, slice_hdr, nalu);
/* Initialize slice type */
switch (slice_hdr->type % 5) {
return NULL;
}
+GstVaapiPicture *
+gst_vaapi_picture_new_clone(GstVaapiPicture *picture)
+{
+ GType type;
+ GstMiniObject *obj;
+ GstVaapiCodecObject *va_obj;
+ GstVaapiCodecObjectConstructorArgs args;
+
+ g_return_val_if_fail(GST_VAAPI_IS_PICTURE(picture), NULL);
+
+ type = G_TYPE_FROM_CLASS(GST_VAAPI_PICTURE_GET_CLASS(picture));
+ obj = gst_mini_object_new(type);
+ if (!obj)
+ return NULL;
+
+ va_obj = GST_VAAPI_CODEC_OBJECT(obj);
+ args.codec = GST_VAAPI_CODEC_BASE(GET_DECODER(picture));
+ args.param = NULL;
+ args.param_size = picture->param_size;
+ args.data = picture;
+ args.data_size = 0;
+ args.flags = (GST_VAAPI_CREATE_PICTURE_FLAG_CLONE);
+ if (gst_vaapi_codec_object_construct(va_obj, &args))
+ return GST_VAAPI_PICTURE_CAST(va_obj);
+
+ gst_mini_object_unref(obj);
+ return NULL;
+}
+
void
gst_vaapi_picture_add_slice(GstVaapiPicture *picture, GstVaapiSlice *slice)
{