h264: support process for gaps in frame_num
authorWind Yuan <feng.yuan@intel.com>
Thu, 28 Feb 2013 07:26:36 +0000 (15:26 +0800)
committerzhongcong <congx.zhong@intel.com>
Sat, 2 Mar 2013 07:36:45 +0000 (15:36 +0800)
Follow 8.2.5.2 when frame_num is not equeal to
PreRefFrameNum and is not equal to
(PreRefFrameNum+1)%MaxFrameNum, there would be
"non-existing" picture "used for short-term reference".

gst-libs/gst/vaapi/gstvaapidecoder_h264.c
gst-libs/gst/vaapi/gstvaapidecoder_objects.c
gst-libs/gst/vaapi/gstvaapidecoder_objects.h

index 6fed7ef..0601731 100755 (executable)
@@ -214,6 +214,24 @@ gst_vaapi_picture_h264_new_field(GstVaapiPictureH264 *picture)
     return GST_VAAPI_PICTURE_H264_CAST(base_picture);
 }
 
+static inline GstVaapiPictureH264 *
+gst_vaapi_picture_h264_new_dummy(GstVaapiPictureH264 *picture)
+{
+    GstVaapiPicture *base_picture;
+
+    g_return_val_if_fail(GST_VAAPI_IS_PICTURE_H264(picture), NULL);
+
+    base_picture = gst_vaapi_picture_new_clone(&picture->base);
+    if (!base_picture)
+        return NULL;
+    GST_VAAPI_PICTURE_FLAG_SET(
+        base_picture,
+        (GST_VAAPI_PICTURE_FLAG_SKIPPED |
+         GST_VAAPI_PICTURE_FLAG_SHORT_TERM_REFERENCE));
+    base_picture->poc = -1;
+    return GST_VAAPI_PICTURE_H264_CAST(base_picture);
+}
+
 static inline GstVaapiSliceH264 *
 gst_vaapi_picture_h264_get_last_slice(GstVaapiPictureH264 *picture)
 {
@@ -560,6 +578,9 @@ struct _GstVaapiDecoderH264Private {
 static gboolean
 exec_ref_pic_marking(GstVaapiDecoderH264 *decoder, GstVaapiPictureH264 *picture);
 
+static gboolean
+exec_ref_pic_marking_sliding_window(GstVaapiDecoderH264 *decoder);
+
 /* Get number of reference frames to use */
 static guint
 get_max_dec_frame_buffering(GstH264SPS *sps)
@@ -2146,6 +2167,51 @@ init_picture_refs(
     }
 }
 
+static void
+process_for_gaps_in_frame_num(
+    GstVaapiDecoderH264 *decoder,
+    GstVaapiPictureH264 *picture,
+    GstH264SliceHdr     *slice_hdr,
+    GstH264NalUnit      *nalu
+)
+{
+    GstVaapiDecoderH264Private * const priv = decoder->priv;
+    GstH264SPS * const sps = slice_hdr->pps->sequence;
+    const gint32 MaxFrameNum = 1 << (sps->log2_max_frame_num_minus4 + 4);
+    GstVaapiPictureH264 *dummy_pic;
+    gint32 final_frame_num;
+
+    if (priv->frame_num == priv->prev_frame_num ||
+        priv->frame_num == (priv->prev_frame_num + 1)%MaxFrameNum)
+        return;
+
+    final_frame_num = priv->frame_num;
+    priv->frame_num = (priv->prev_frame_num + 1)%MaxFrameNum;
+    init_picture_ref_lists(decoder);
+
+    while(final_frame_num != priv->frame_num) {
+        dummy_pic = gst_vaapi_picture_h264_new_dummy(picture);
+        dummy_pic->frame_num = priv->frame_num;
+        dummy_pic->frame_num_wrap = priv->frame_num;
+        dummy_pic->pps = picture->pps;
+        dummy_pic->output_needed = FALSE;
+        dummy_pic->structure = GST_VAAPI_PICTURE_STRUCTURE_FRAME;
+        priv->current_picture = dummy_pic;
+
+        init_picture_refs_pic_num(decoder, dummy_pic, slice_hdr);
+        exec_ref_pic_marking_sliding_window(decoder);
+        remove_short_reference(decoder, dummy_pic->frame_num);
+        /* add to short reference */
+        priv->short_ref[priv->short_ref_count++] = dummy_pic;
+        dpb_add(decoder,dummy_pic);
+        gst_vaapi_picture_unref(dummy_pic);
+        priv->prev_frame_num = priv->frame_num;
+        priv->frame_num = (priv->prev_frame_num + 1)%MaxFrameNum;
+    }
+    priv->frame_num = final_frame_num;
+    priv->current_picture = picture;
+}
+
 static gboolean
 init_picture(
     GstVaapiDecoderH264 *decoder,
@@ -2156,6 +2222,7 @@ init_picture(
 {
     GstVaapiDecoderH264Private * const priv = decoder->priv;
     GstVaapiPicture * const base_picture = &picture->base;
+    GstH264SPS * const sps = slice_hdr->pps->sequence;
 
     priv->prev_frame_num        = priv->frame_num;
     priv->frame_num             = slice_hdr->frame_num;
@@ -2169,7 +2236,8 @@ init_picture(
         GST_DEBUG("<IDR>");
         GST_VAAPI_PICTURE_FLAG_SET(picture, GST_VAAPI_PICTURE_FLAG_IDR);
         dpb_flush(decoder);
-    }
+    } else if (sps->gaps_in_frame_num_value_allowed_flag)
+        process_for_gaps_in_frame_num(decoder, picture, slice_hdr, nalu);
 
     /* Initialize slice type */
     switch (slice_hdr->type % 5) {
index 06cdc9a..ce7dd98 100644 (file)
@@ -242,6 +242,35 @@ gst_vaapi_picture_new_field(GstVaapiPicture *picture)
     return NULL;
 }
 
+GstVaapiPicture *
+gst_vaapi_picture_new_clone(GstVaapiPicture *picture)
+{
+    GType type;
+    GstMiniObject *obj;
+    GstVaapiCodecObject *va_obj;
+    GstVaapiCodecObjectConstructorArgs args;
+
+    g_return_val_if_fail(GST_VAAPI_IS_PICTURE(picture), NULL);
+
+    type = G_TYPE_FROM_CLASS(GST_VAAPI_PICTURE_GET_CLASS(picture));
+    obj  = gst_mini_object_new(type);
+    if (!obj)
+        return NULL;
+
+    va_obj = GST_VAAPI_CODEC_OBJECT(obj);
+    args.codec      = GST_VAAPI_CODEC_BASE(GET_DECODER(picture));
+    args.param      = NULL;
+    args.param_size = picture->param_size;
+    args.data       = picture;
+    args.data_size  = 0;
+    args.flags      = (GST_VAAPI_CREATE_PICTURE_FLAG_CLONE);
+    if (gst_vaapi_codec_object_construct(va_obj, &args))
+        return GST_VAAPI_PICTURE_CAST(va_obj);
+
+    gst_mini_object_unref(obj);
+    return NULL;
+}
+
 void
 gst_vaapi_picture_add_slice(GstVaapiPicture *picture, GstVaapiSlice *slice)
 {
index b95f4e0..a32ff15 100644 (file)
@@ -179,6 +179,10 @@ GstVaapiPicture *
 gst_vaapi_picture_new_field(GstVaapiPicture *picture);
 
 G_GNUC_INTERNAL
+GstVaapiPicture *
+gst_vaapi_picture_new_clone(GstVaapiPicture *picture);
+
+G_GNUC_INTERNAL
 void
 gst_vaapi_picture_add_slice(GstVaapiPicture *picture, GstVaapiSlice *slice);