Add context API <gst_vaapi_context_get_surface_pool>
authorWind Yuan <feng.yuan@intel.com>
Wed, 23 Nov 2011 07:01:30 +0000 (02:01 -0500)
committerZhong Cong <congx.zhong@intel.com>
Tue, 5 Feb 2013 07:37:09 +0000 (15:37 +0800)
15 files changed:
gst-libs/gst/vaapi/gstvaapicontext.c
gst-libs/gst/vaapi/gstvaapicontext.h
gst/vaapiencode/gstvaapibaseencoder.c
gst/vaapiencode/gstvaapibaseencoder.h
gst/vaapiencode/gstvaapiencode.c
gst/vaapiencode/gstvaapiencode.h
gst/vaapiencode/gstvaapiencoder.c
gst/vaapiencode/gstvaapiencoder.h
gst/vaapiencode/gstvaapih263encode.c
gst/vaapiencode/gstvaapih263encoder.c
gst/vaapiencode/gstvaapih264encode.c
gst/vaapiencode/gstvaapih264encoder.c
gst/vaapiencode/gstvaapih264encoder.h
gst/vaapiencode/gstvaapimpeg4encode.c
gst/vaapiencode/gstvaapimpeg4encoder.c

index b94f647..70199bd 100644 (file)
@@ -291,7 +291,8 @@ gst_vaapi_context_create(GstVaapiContext *context)
     GstVaapiContextPrivate * const priv = context->priv;
     VAProfile va_profile;
     VAEntrypoint va_entrypoint;
-    VAConfigAttrib attrib;
+    VAConfigAttrib attribs[2];
+    guint attribs_num;
     VAContextID context_id;
     VASurfaceID surface_id;
     VAStatus status;
@@ -326,17 +327,27 @@ gst_vaapi_context_create(GstVaapiContext *context)
     va_entrypoint = gst_vaapi_entrypoint_get_va_entrypoint(priv->entrypoint);
 
     GST_VAAPI_DISPLAY_LOCK(display);
-    attrib.type = VAConfigAttribRTFormat;
+    attribs[0].type = VAConfigAttribRTFormat;
+    attribs[1].type = VAConfigAttribRateControl;
+    if (VAEntrypointEncSlice == va_entrypoint)
+      attribs_num = 2;
+    else
+      attribs_num = 1;
+
     status = vaGetConfigAttributes(
         GST_VAAPI_DISPLAY_VADISPLAY(display),
         va_profile,
         va_entrypoint,
-        &attrib, 1
+        attribs, attribs_num
     );
     GST_VAAPI_DISPLAY_UNLOCK(display);
     if (!vaapi_check_status(status, "vaGetConfigAttributes()"))
         goto end;
-    if (!(attrib.value & VA_RT_FORMAT_YUV420))
+    if (!(attribs[0].value & VA_RT_FORMAT_YUV420))
+        goto end;
+    if (attribs_num > 1 && !(attribs[1].value & VA_RC_NONE)
+                        && !(attribs[1].value & VA_RC_CBR)
+                        && !(attribs[1].value & VA_RC_VBR))
         goto end;
 
     GST_VAAPI_DISPLAY_LOCK(display);
@@ -344,7 +355,7 @@ gst_vaapi_context_create(GstVaapiContext *context)
         GST_VAAPI_DISPLAY_VADISPLAY(display),
         va_profile,
         va_entrypoint,
-        &attrib, 1,
+        attribs, attribs_num,
         &priv->config_id
     );
     GST_VAAPI_DISPLAY_UNLOCK(display);
@@ -803,6 +814,26 @@ gst_vaapi_context_get_surface(GstVaapiContext *context)
 }
 
 /**
+ * gst_vaapi_context_get_surface_pool:
+ * @context: a #GstVaapiContext
+ *
+ * Reference the surface pool. The returned surface pool should be released with
+ * g_object_unref(). This function returns %NULL if
+ * there is the surface pool is empty. The surface pool is
+ * created during context creation though.
+ *
+ * Return value: surface pool, or %NULL if it is not created.
+ */
+
+GstVaapiSurfacePool *
+gst_vaapi_context_get_surface_pool(GstVaapiContext *context)
+{
+  g_return_val_if_fail(GST_VAAPI_IS_CONTEXT(context), NULL);
+  return (GstVaapiSurfacePool*)g_object_ref(context->priv->surfaces_pool);
+}
+
+
+/**
  * gst_vaapi_context_get_surface_count:
  * @context: a #GstVaapiContext
  *
index cffbd7e..0c34036 100644 (file)
@@ -28,6 +28,7 @@
 #include <gst/vaapi/gstvaapidisplay.h>
 #include <gst/vaapi/gstvaapisurface.h>
 #include <gst/video/video-overlay-composition.h>
+#include <gst/vaapi/gstvaapisurfacepool.h>
 
 G_BEGIN_DECLS
 
@@ -146,6 +147,10 @@ gst_vaapi_context_get_size(
 GstVaapiSurface *
 gst_vaapi_context_get_surface(GstVaapiContext *context);
 
+GstVaapiSurfacePool *
+gst_vaapi_context_get_surface_pool(GstVaapiContext *context);
+
+
 guint
 gst_vaapi_context_get_surface_count(GstVaapiContext *context);
 
index dbfb406..f04a084 100644 (file)
@@ -50,6 +50,10 @@ struct _GstVaapiBaseEncoderPrivate {
   GMutex           *code_buffer_lock;
   GCond            *code_buffer_cond;
   GQueue           *available_code_buffers;
+
+  GstVaapiSurfacePool *surfaces_pool;
+
+  gboolean          need_flush;
 };
 
 G_DEFINE_TYPE(GstVaapiBaseEncoder, gst_vaapi_base_encoder, GST_TYPE_VAAPI_ENCODER);
@@ -83,9 +87,8 @@ static EncoderStatus base_put_raw_buffer_to_surface(GstVaapiBaseEncoder *base_en
 
 static EncoderStatus base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
                          GstVaapiDisplay *display, GstVaapiSurface *buffer_surface,
-                         gboolean is_key, GstClockTime timestamp,
-                         GstClockTime duration, VABufferID *coded_buf,
-                         GList **coded_pics);
+                         gboolean is_key, GstVaapiVideoBuffer *surface_buffer,
+                         VABufferID *coded_buf, GList **coded_pics);
 
 static VABufferID   *pop_available_coded_buffer(GstVaapiBaseEncoderPrivate *base_prv);
 static gboolean      push_available_coded_buffer(
@@ -137,9 +140,11 @@ gst_vaapi_base_encoder_class_init(GstVaapiBaseEncoderClass *klass)
   klass->validate_attributes = NULL;
   klass->pre_alloc_resource = NULL;
   klass->release_resource = NULL;
-  klass->prepare_frame = NULL;
+  klass->prepare_next_input_buffer = NULL;
+  klass->render_frame = NULL;
   klass->notify_frame = NULL;
   klass->copy_coded_frame = NULL;
+  klass->encode_frame_failed = NULL;
 
   /*
   object_class->set_property = gst_vaapi_base_encoder_set_property;
@@ -232,7 +237,6 @@ static void
 gst_vaapi_base_encoder_init(GstVaapiBaseEncoder *encoder)
 {
   GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
-  GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
   ENCODER_ASSERT(base_prv);
 
   /* init private values*/
@@ -246,6 +250,9 @@ gst_vaapi_base_encoder_init(GstVaapiBaseEncoder *encoder)
   base_prv->code_buffer_lock = g_mutex_new();
   base_prv->code_buffer_cond = g_cond_new();
   base_prv->available_code_buffers = g_queue_new();
+
+  base_prv->surfaces_pool = NULL;
+  base_prv->need_flush = FALSE;
 }
 
 void
@@ -286,7 +293,6 @@ gst_vaapi_base_encoder_uninitialize_default(GstVaapiEncoder* encoder, GstVaapiDi
 gboolean
 default_validate_encoder_parameters(GstVaapiBaseEncoder *encoder)
 {
-  GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
   if (!ENCODER_WIDTH(encoder) || !ENCODER_HEIGHT(encoder) || !ENCODER_FPS(encoder)) {
     return FALSE;
   }
@@ -302,10 +308,8 @@ gst_vaapi_base_encoder_open_default(GstVaapiEncoder* encoder, GstVaapiDisplay *d
 
   GstVaapiSurfacePool *surfaces_pool = private_data;
   GstVaapiContext *out_context = NULL;
-  VAProfile va_profile = -1;
 
   EncoderStatus ret = ENCODER_NO_ERROR;
-  VAStatus va_status = VA_STATUS_SUCCESS;
   gboolean check_attri_ret = TRUE;
   /*check and set default values*/
   if (base_class->validate_attributes) {
@@ -354,6 +358,9 @@ gst_vaapi_base_encoder_open_default(GstVaapiEncoder* encoder, GstVaapiDisplay *d
     "encoder <base_encoder_alloc_coded_buffers> failed.\n"
   );
   *context = out_context;
+
+  base_prv->surfaces_pool = gst_vaapi_context_get_surface_pool(out_context);
+  ENCODER_ASSERT(base_prv->surfaces_pool);
   return ENCODER_NO_ERROR;
 
 end:
@@ -376,12 +383,19 @@ gst_vaapi_base_encoder_close_default(GstVaapiEncoder* encoder, GstVaapiDisplay *
   EncoderStatus ret = ENCODER_NO_ERROR;
 
   /* release buffers first */
+  base_prv->need_flush = FALSE;
+
   if (base_class->release_resource) {
     base_class->release_resource(base_encoder, display, context);
   }
   base_encoder_release_coded_buffers(base_encoder, display, context);
   base_prv->frame_count = 0;
 
+  if (base_prv->surfaces_pool) {
+    g_object_unref(base_prv->surfaces_pool);
+    base_prv->surfaces_pool = NULL;
+  }
+
   return ret;
 }
 
@@ -438,6 +452,7 @@ base_encoder_release_coded_buffers(GstVaapiBaseEncoder *base_encoder, GstVaapiDi
 
   ENCODER_ASSERT(display);
   ENCODER_ASSERT(context);
+  VAAPI_UNUSED_ARG(va_status);
   VADisplay va_dpy = gst_vaapi_display_get_display(display);
 
   /* wait clear all available coded buffers*/
@@ -483,25 +498,55 @@ gst_vaapi_base_encoder_encode_default(GstVaapiEncoder* encoder, GstVaapiDisplay
   VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
   GstVaapiSurface *new_surface = NULL;
 
-  /* load picture to surface */
-  if (GST_VAAPI_IS_VIDEO_BUFFER(raw_pic)) {
-    buffer_surface = gst_vaapi_video_buffer_get_surface(GST_VAAPI_VIDEO_BUFFER(raw_pic));
-  } else {
-    new_surface = gst_vaapi_context_get_surface(context);
-    buffer_surface = new_surface;
-    ENCODER_CHECK_STATUS(buffer_surface, ENCODER_SURFACE_ERR, "base_pop_free_surface failed.\n");
+  /* Video Buffer */
+  GstVaapiVideoBuffer *video_buffer = NULL;
+
+  ENCODER_CHECK_STATUS(raw_pic || base_class->prepare_next_input_buffer,
+                       ENCODER_DATA_ERR, "Need a picture to encode");
+  if (raw_pic) {
+    /* load picture to surface */
+    if (GST_VAAPI_IS_VIDEO_BUFFER(raw_pic)) {
+      video_buffer = GST_VAAPI_VIDEO_BUFFER(raw_pic);
+      gst_buffer_ref(GST_BUFFER_CAST(video_buffer));
+    } else {
+      ENCODER_CHECK_STATUS(base_prv->surfaces_pool, ENCODER_SURFACE_ERR, "surface pool could not be found in context");
+      video_buffer = (GstVaapiVideoBuffer*)gst_vaapi_video_buffer_new_from_pool((GstVaapiVideoPool*)base_prv->surfaces_pool);
+      new_surface = gst_vaapi_video_buffer_get_surface(video_buffer); //gst_vaapi_context_get_surface(context);
+      ENCODER_CHECK_STATUS(new_surface, ENCODER_SURFACE_ERR, "base_pop_free_surface failed.\n");
+
+      /* put picture to new surface */
+      va_status = base_put_raw_buffer_to_surface(base_encoder, display, raw_pic, new_surface);
+      ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "va put buffer to surface failed.\n");
+
+      GST_BUFFER_TIMESTAMP(video_buffer) = GST_BUFFER_TIMESTAMP(raw_pic);
+      GST_BUFFER_DURATION(video_buffer) = GST_BUFFER_DURATION(raw_pic);
+    }
+  }else {
+    base_prv->need_flush = TRUE;
+  }
 
-    /* put picture to new surface */
-    va_status = base_put_raw_buffer_to_surface(base_encoder, display, raw_pic, buffer_surface);
-    ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "va put buffer to surface failed.\n");
+again:
+  if (base_class->prepare_next_input_buffer) {
+    GstVaapiVideoBuffer* tmp_buf = NULL;
+    ret = base_class->prepare_next_input_buffer(base_encoder, video_buffer, base_prv->need_flush, &tmp_buf);
+    base_prv->need_flush = FALSE;
+    if (video_buffer) {
+      gst_buffer_unref(GST_BUFFER_CAST(video_buffer));
+      video_buffer = NULL;
+    }
+    if (ret != ENCODER_NO_ERROR || !tmp_buf)
+      goto end;
+    video_buffer = tmp_buf;
   }
+
+  buffer_surface = gst_vaapi_video_buffer_get_surface(video_buffer);
   buffer_surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
   ENCODER_CHECK_STATUS(buffer_surface_id != VA_INVALID_SURFACE, ENCODER_SURFACE_ERR, "surface id == VA_INVALID_SURFACE.\n");
 
   /* begin surface*/
   ENCODER_ACQUIRE_DISPLAY_LOCK(display);
   va_status = vaBeginPicture(va_dpy, context_id, buffer_surface_id);
-  ENCODER_RELEASE_DISPLAY_LOCK(display);
+  //ENCODER_RELEASE_DISPLAY_LOCK(display);
 
   ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaBeginPicture error.\n");
 
@@ -510,7 +555,7 @@ gst_vaapi_base_encoder_encode_default(GstVaapiEncoder* encoder, GstVaapiDisplay
   ENCODER_CHECK_STATUS(coded_buf, ENCODER_ENC_RES_ERR, "dequeue_available_coded_buffer error.\n");
 
   /* prepare frame*/
-  ret = base_class->prepare_frame(base_encoder, display, context,
+  ret = base_class->render_frame(base_encoder, display, context,
                                   buffer_surface, base_prv->frame_count,
                                   *coded_buf, &is_key);
   /* prepare failed, push back */
@@ -520,24 +565,40 @@ gst_vaapi_base_encoder_encode_default(GstVaapiEncoder* encoder, GstVaapiDisplay
   ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ENCODER_PICTURE_ERR, "base_prepare_encoding failed.\n");
 
   /* end picture */
-  ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+  //ENCODER_ACQUIRE_DISPLAY_LOCK(display);
   va_status = vaEndPicture(va_dpy, context_id);
   ENCODER_RELEASE_DISPLAY_LOCK(display);
   ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaEndPicture error.\n");
 
   /*query surface result*/
   ret = base_query_encoding_status(base_encoder, display, buffer_surface,
-                                   is_key, GST_BUFFER_TIMESTAMP(raw_pic), GST_BUFFER_DURATION(raw_pic), coded_buf, coded_pics);
+                                   is_key, video_buffer, coded_buf, coded_pics);
   if (ENCODER_NO_ERROR != ret) {
     goto end;
   }
 
   base_prv->frame_count++;
 
+  if (base_class->prepare_next_input_buffer) {
+    if (video_buffer) {
+      gst_buffer_unref(GST_BUFFER_CAST(video_buffer));
+    }
+    video_buffer = NULL;
+    buffer_surface = NULL;
+    goto again;
+  }
+
 end:
   ENCODER_RELEASE_DISPLAY_LOCK(display);
-  if (new_surface) {
-    gst_vaapi_context_put_surface(context, new_surface);
+  if (ret > ENCODER_NO_ERROR) {
+    ret = ENCODER_NO_ERROR;
+  }
+  if (ret < 0 && base_class->encode_frame_failed) {
+    base_class->encode_frame_failed(base_encoder, video_buffer);
+  }
+  if (video_buffer) {
+    gst_buffer_unref(GST_BUFFER_CAST(video_buffer));
+    video_buffer = NULL;
   }
   return ret;
 }
@@ -558,6 +619,7 @@ pop_available_coded_buffer(GstVaapiBaseEncoderPrivate *base_prv)
 
 end:
   g_mutex_unlock(base_prv->code_buffer_lock);
+  VAAPI_UNUSED_ARG(ret);
   return coded_buf;
 }
 
@@ -578,9 +640,6 @@ base_put_raw_buffer_to_surface(GstVaapiBaseEncoder *base_encoder,
                                GstVaapiSurface *surface)
 {
   EncoderStatus ret = ENCODER_NO_ERROR;
-  VAStatus va_status = VA_STATUS_SUCCESS;
-  VAImage surface_image;
-  VADisplay va_dpy;
   GstVaapiImage *image;
   GstVaapiImageFormat image_format;
   guint8 *y_src = NULL, *u_src = NULL, *v_src = NULL;
@@ -593,7 +652,8 @@ base_put_raw_buffer_to_surface(GstVaapiBaseEncoder *base_encoder,
   GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
 
   ENCODER_ASSERT(display);
-  va_dpy = gst_vaapi_display_get_display(display);
+  VAAPI_UNUSED_ARG(pitchv);
+  VAAPI_UNUSED_ARG(v_dst);
   /*map image*/
   image = gst_vaapi_surface_derive_image(surface);
   gst_vaapi_image_map(image);
@@ -659,7 +719,7 @@ base_put_raw_buffer_to_surface(GstVaapiBaseEncoder *base_encoder,
 
   /*unmap image*/
   g_object_unref(image);
-  end:
+
   return ret;
 }
 
@@ -668,8 +728,7 @@ base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
                            GstVaapiDisplay *display,
                            GstVaapiSurface *buffer_surface,
                            gboolean is_key,
-                           GstClockTime timestamp,
-                           GstClockTime duration,
+                           GstVaapiVideoBuffer *surface_buffer,
                            VABufferID *coded_buf,
                            GList **coded_pics)
 {
@@ -683,11 +742,12 @@ base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
   GstVaapiBaseEncoderClass   *base_class = GST_VAAPI_BASE_ENCODER_GET_CLASS(base_encoder);
   GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
 
-  ENCODER_ASSERT(display && context);
+  ENCODER_ASSERT(display);
   VASurfaceID surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
   VADisplay va_dpy = gst_vaapi_display_get_display(display);
 
   ENCODER_ASSERT(coded_pics && *coded_pics == NULL);
+  VAAPI_UNUSED_ARG(has_coded_data);
 
   /* lock display */
   ENCODER_ACQUIRE_DISPLAY_LOCK(display);
@@ -698,7 +758,7 @@ base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
   va_status = vaQuerySurfaceStatus(va_dpy, surface_id, &surface_status);
   ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaQuerySurfaceStatus failed.\n");
   if (VASurfaceSkipped&surface_status) {
-    ENCODER_LOG_ERROR("frame skipped, dts:%" GST_TIME_FORMAT ".\n", GST_TIME_ARGS(timestamp));
+    ENCODER_LOG_ERROR("frame skipped, dts:%" GST_TIME_FORMAT ".\n", GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(surface_buffer)));
   }
 
   va_status = vaMapBuffer(va_dpy, *coded_buf, (void **)(&buf_list));
@@ -721,11 +781,12 @@ base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
                           base_encoder, buf_list->buf,
                           buf_list->size, coded_buf);
       }
-      GST_BUFFER_TIMESTAMP(ret_buffer) = timestamp;
-      GST_BUFFER_DURATION(ret_buffer) = duration;
+      GST_BUFFER_TIMESTAMP(ret_buffer) = GST_BUFFER_TIMESTAMP(surface_buffer);
+      GST_BUFFER_DURATION(ret_buffer) = GST_BUFFER_DURATION(surface_buffer);
       if (!is_key) {
         GST_BUFFER_FLAG_SET(ret_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
       }
+      GST_BUFFER_OFFSET_END(ret_buffer) = GST_BUFFER_OFFSET_END(surface_buffer);
       *coded_pics = g_list_append(*coded_pics, ret_buffer);
       buf_list = (VACodedBufferSegment*)buf_list->next;
       ENCODER_ASSERT(NULL == buf_list);
@@ -779,6 +840,7 @@ gst_vaapi_base_encoder_flush_default(GstVaapiEncoder* encoder, GstVaapiDisplay *
   GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
 
   base_prv->frame_count = 0;
+  base_prv->need_flush = TRUE;
   /*do we need destroy base_prv->seq_parameter? */
 
   //end:
index b15c0e7..0717d44 100644 (file)
@@ -2,6 +2,7 @@
 #define GST_VAAPI_BASE_ENCODER_H
 
 #include "gstvaapiencoder.h"
+#include "gst/vaapi/gstvaapivideobuffer.h"
 
 G_BEGIN_DECLS
 
@@ -32,9 +33,13 @@ struct _GstVaapiBaseEncoderClass {
   /* in <close> function */
   gboolean (*release_resource)      (GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context);
 
-  EncoderStatus (*prepare_frame)    (GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+  /* in <encode> function */
+  EncoderStatus (*prepare_next_input_buffer)(GstVaapiBaseEncoder* encoder, GstVaapiVideoBuffer *display_buf,
+                                             gboolean need_flush, GstVaapiVideoBuffer **out_buf);
+  EncoderStatus (*render_frame)     (GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
                                      GstVaapiContext *context, GstVaapiSurface *surface,
                                      guint frame_index, VABufferID coded_buf, gboolean *is_key);
+  void (*encode_frame_failed)       (GstVaapiBaseEncoder *encoder, GstVaapiVideoBuffer* buffer);
   void (*notify_frame)              (GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size);
   GstBuffer *(*copy_coded_frame)    (GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size, VABufferID *coded_buf);
 };
index b7338f0..d1ddf68 100644 (file)
@@ -9,6 +9,8 @@
 #include "gstvaapih264encode.h"
 #include "gstvaapih263encode.h"
 #include "gstvaapimpeg4encode.h"
+#include "gstvaapibaseencoder.h"
+
 
 /* gst_debug
      GST_DEBUG_CATEGORY_STATIC (gst_vaapi_encode_debug)
@@ -92,6 +94,8 @@ gst_vaapi_encode_class_init(GstVaapiEncodeClass *klass)
 
   element_class->change_state = gst_vaapi_encode_change_state;
 
+  klass->set_encoder_src_caps = NULL;
+
   /* Registering debug symbols for function pointers */
   GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_change_state);
   GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_get_caps);
@@ -267,6 +271,7 @@ gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf)
 {
   GstFlowReturn ret = GST_FLOW_OK;
   GstVaapiEncode *encode = GST_VAAPI_ENCODE(GST_OBJECT_PARENT(sink_pad));
+  GstVaapiEncodeClass *encode_class = GST_VAAPI_ENCODE_GET_CLASS(encode);
   EncoderStatus encoder_ret = ENCODER_NO_ERROR;
   GList *out_buffers = NULL;
   GstBuffer *tmp_buffer = NULL;
@@ -307,7 +312,7 @@ gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf)
                                  GST_FLOW_ERROR, "1st buffer caps' format type is not fourcc.\n");
       format = gst_value_get_fourcc (format_value);
       if (format) {
-        gst_vaapi_base_encoder_set_input_format(encode->encoder, format);
+        gst_vaapi_base_encoder_set_input_format(GST_VAAPI_BASE_ENCODER(encode->encoder), format);
       }
     }
 
@@ -341,6 +346,7 @@ gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf)
   #ifdef _MRST_
     encoder_ret = gst_vaapi_encoder_open(encode->encoder, surface_pool);
   #else
+    VAAPI_UNUSED_ARG(surface_pool);
     encoder_ret = gst_vaapi_encoder_open(encode->encoder, NULL);
   #endif
     ENCODER_CHECK_STATUS (ENCODER_NO_ERROR == encoder_ret, GST_FLOW_ERROR, "gst_vaapi_encoder_open failed.\n");
@@ -367,6 +373,9 @@ gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf)
       if (ENCODER_NO_ERROR == gst_vaapi_encoder_get_codec_data(encode->encoder, &codec_data) && codec_data) {
         gst_caps_set_simple(encode->srcpad_caps, "codec_data",GST_TYPE_BUFFER, codec_data, NULL);
       }
+      if (encode_class->set_encoder_src_caps) {
+        encode_class->set_encoder_src_caps(encode, encode->srcpad_caps);
+      }
       gst_pad_set_caps (encode->srcpad, encode->srcpad_caps);
       GST_BUFFER_CAPS(tmp_buffer) = gst_caps_ref(encode->srcpad_caps);
       ENCODER_LOG_INFO("gst_vaapi_encode_chain 1st push-buffer caps,\n%s", _encode_dump_caps(encode->srcpad_caps));
@@ -382,7 +391,7 @@ gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf)
   }
 
 end:
-  gst_mini_object_unref(GST_MINI_OBJECT(buf));
+  gst_buffer_unref(buf);
   return ret;
 
 }
index d123b91..107246b 100644 (file)
@@ -61,6 +61,7 @@ struct _GstVaapiEncode {
 
 struct _GstVaapiEncodeClass {
     GstElementClass     parent_class;
+    gboolean          (*set_encoder_src_caps)(GstVaapiEncode* encode, GstCaps *caps);
 };
 
 GType gst_vaapi_encode_get_type(void);
index 83bb9cd..e93f61f 100644 (file)
@@ -11,7 +11,7 @@ GST_DEBUG_CATEGORY_STATIC (gst_vaapi_encoder_debug);
 G_DEFINE_TYPE(GstVaapiEncoder, gst_vaapi_encoder, G_TYPE_OBJECT);
 
 static void gst_vaapi_encoder_class_init(GstVaapiEncoderClass *kclass);
-static voidgst_vaapi_encoder_init(GstVaapiEncoder *encoder);
+static void gst_vaapi_encoder_init(GstVaapiEncoder *encoder);
 static void gst_vaapi_encoder_finalize(GObject *object);
 
 static void
@@ -160,7 +160,7 @@ gst_vaapi_encoder_open(GstVaapiEncoder* encoder, void* private_data)
   if (VAAPI_ENC_INIT != encoder_prv->state) {
     return ENCODER_STATE_ERR;
   }
-  ENCODER_ASSERT(encoder_prv->context);
+  ENCODER_ASSERT(!encoder_prv->context);
 
   ENCODER_CHECK_STATUS(encoder_class->open, ENCODER_FUNC_PTR_ERR, "encoder <open> function pointer empty.\n");
   ret = encoder_class->open(encoder, encoder_prv->display, private_data, &encoder_prv->context);
index 3399898..b4b05e7 100644 (file)
 G_BEGIN_DECLS
 
 #define ENCODER_NO_ERROR       0
+#define ENCODER_BUFFER_WAITING 1
+#define ENCODER_BUFFER_EMPTY   2
+
+
 #define ENCODER_MEM_ERR       -1
 #define ENCODER_DISPLAY_ERR   -2
 #define ENCODER_CONFIG_ERR    -3
@@ -40,8 +44,8 @@ G_BEGIN_DECLS
 #endif
 
 #define ENCODER_CHECK_STATUS(exp, err_num, err_reason, ...)  \
-  ENCODER_ASSERT(exp);                             \
-  if (!(exp)) {                                    \
+  if (!(exp)) {                                   \
+    ENCODER_ASSERT(FALSE);                         \
     ret = err_num;                                 \
     ENCODER_LOG_ERROR(err_reason, ## __VA_ARGS__); \
     goto end;                                      \
index e016bc2..2301244 100644 (file)
@@ -80,7 +80,6 @@ static void
 gst_h263encode_class_init(GstH263EncodeClass *klass)
 {
   GObjectClass * const object_class = G_OBJECT_CLASS(klass);
-  GstVaapiEncodeClass * const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
 
   GST_DEBUG_CATEGORY_INIT (gst_vaapi_h263_encode_debug, "vaapih263encode", 0,
       "vaapih263encode element");
@@ -127,7 +126,6 @@ gst_h263encode_class_init(GstH263EncodeClass *klass)
 static void
 gst_h263encode_init(GstH263Encode *h263_encode, GstH263EncodeClass *klass)
 {
-  GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
   GstVaapiEncode *encode = GST_VAAPI_ENCODE(h263_encode);
   encode->encoder = GST_VAAPI_ENCODER(gst_h263_encoder_new());
   ENCODER_ASSERT(encode->encoder);
index a5ede63..86d8f2c 100644 (file)
@@ -38,14 +38,11 @@ static gboolean      gst_h263_validate_parameters(GstVaapiBaseEncoder* encoder);
 static gboolean      gst_h263_encoder_release_resource(
                             GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display,
                             GstVaapiContext *context);
-static EncoderStatus gst_h263_prepare_encoding(GstVaapiBaseEncoder *encoder,
+static EncoderStatus gst_h263_encoder_rendering(GstVaapiBaseEncoder *encoder,
                             GstVaapiDisplay *display, GstVaapiContext *context,
                             GstVaapiSurface *surface, guint frame_index,
                             VABufferID coded_buf, gboolean *is_key);
 
-static void          h263_release_buffers(GstH263Encoder *h263_encoder,
-                            GstVaapiDisplay *display);
-
 GstH263Encoder *
 gst_h263_encoder_new(void)
 {
@@ -69,7 +66,7 @@ gst_h263_encoder_class_init(GstH263EncoderClass *klass)
   base_class->validate_attributes = gst_h263_validate_parameters;
   base_class->pre_alloc_resource  = NULL;
   base_class->release_resource    = gst_h263_encoder_release_resource;
-  base_class->prepare_frame = gst_h263_prepare_encoding;
+  base_class->render_frame = gst_h263_encoder_rendering;
   base_class->notify_frame = NULL;
   base_class->copy_coded_frame = NULL;
 
@@ -84,7 +81,6 @@ static void
 gst_h263_encoder_init(GstH263Encoder *h263_encoder)
 {
   GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(h263_encoder);
-  GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(h263_encoder);
   ENCODER_ASSERT(h263_prv);
 
   /* init public */
@@ -107,7 +103,6 @@ gst_h263_encoder_finalize(GObject *object)
 {
   /*free private buffers*/
   GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
-  GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(object);
 
   if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
     gst_vaapi_encoder_uninitialize(encoder);
@@ -149,6 +144,8 @@ h263_release_parameters(GstH263Encoder *h263_encoder, GstVaapiDisplay *display)
   VADisplay va_dpy = gst_vaapi_display_get_display(display);
   VAStatus va_status = VA_STATUS_SUCCESS;
 
+  VAAPI_UNUSED_ARG(va_status);
+
   if (VA_INVALID_ID != h263_prv->seq_parameter) {
     va_status = vaDestroyBuffer(va_dpy, h263_prv->seq_parameter);
     h263_prv->seq_parameter = VA_INVALID_ID;
@@ -198,7 +195,7 @@ gst_h263_encoder_release_resource(GstVaapiBaseEncoder* encoder,
 }
 
 static EncoderStatus
-gst_h263_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+gst_h263_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
                      GstVaapiContext *context, GstVaapiSurface *surface,
                      guint frame_index, VABufferID coded_buf, gboolean *is_key)
 
index 5204a05..69b9989 100644 (file)
@@ -42,6 +42,7 @@ static void gst_h264encode_get_property (GObject * object, guint prop_id,
     GValue * value, GParamSpec * pspec);
 static gboolean _h264_check_valid_profile(guint profile);
 static gboolean _h264_check_valid_level(guint level);
+static gboolean  gst_h264encode_set_src_caps(GstVaapiEncode* encode, GstCaps *caps);
 
 
 /* h264 encode */
@@ -87,8 +88,7 @@ static void
 gst_h264encode_class_init(GstH264EncodeClass *klass)
 {
   GObjectClass * const object_class = G_OBJECT_CLASS(klass);
-  GstVaapiEncodeClass * const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
-
+  GstVaapiEncodeClass *const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
   GST_DEBUG_CATEGORY_INIT (gst_vaapi_h264_encode_debug, "vaapih264encode", 0,
       "vaapih264encode element");
 
@@ -96,6 +96,7 @@ gst_h264encode_class_init(GstH264EncodeClass *klass)
   object_class->set_property  = gst_h264encode_set_property;
   object_class->get_property  = gst_h264encode_get_property;
 
+  encode_class->set_encoder_src_caps = gst_h264encode_set_src_caps;
 
   g_object_class_install_property (object_class, H264_PROP_PROFILE,
           g_param_spec_uint ("profile",
@@ -159,7 +160,6 @@ gst_h264encode_class_init(GstH264EncodeClass *klass)
 static void
 gst_h264encode_init(GstH264Encode *h264_encode, GstH264EncodeClass *klass)
 {
-  GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
   GstVaapiEncode *encode = GST_VAAPI_ENCODE(h264_encode);
   encode->encoder = GST_VAAPI_ENCODER(gst_h264_encoder_new());
   ENCODER_ASSERT(encode->encoder);
@@ -280,7 +280,7 @@ gst_h264encode_get_property (GObject * object, guint prop_id,
 static gboolean
 _h264_check_valid_profile(guint profile)
 {
-   static const limit_profiles[] = {
+   static const guint limit_profiles[] = {
          H264_PROFILE_BASELINE,
          H264_PROFILE_MAIN,
          H264_PROFILE_HIGH
@@ -297,7 +297,7 @@ _h264_check_valid_profile(guint profile)
 static gboolean
 _h264_check_valid_level(guint level)
 {
-  static const limit_levels[] = {
+  static const guint limit_levels[] = {
         H264_LEVEL_10,
         H264_LEVEL_11,
         H264_LEVEL_12,
@@ -325,3 +325,14 @@ _h264_check_valid_level(guint level)
 }
 
 
+static gboolean
+gst_h264encode_set_src_caps(GstVaapiEncode* encode, GstCaps *caps)
+{
+  g_return_val_if_fail(caps,FALSE);
+  gst_caps_set_simple(caps, "stream-format", G_TYPE_STRING, "avc",
+                            "alignment", G_TYPE_STRING, "au",
+                            NULL);
+  return TRUE;
+}
+
+
index 10e9c45..4d7a039 100644 (file)
@@ -18,6 +18,9 @@
 #include "gst/vaapi/gstvaapivideobuffer.h"
 #include "gst/vaapi/gstvaapidisplay_priv.h"
 
+/* enable old lib va*/
+//#define _SIMPLE_LIB_VA_
+
 GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h264_encoder_debug);
 #define GST_CAT_DEFAULT gst_vaapi_h264_encoder_debug
 
@@ -29,6 +32,19 @@ GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h264_encoder_debug);
 
 #define REF_RECON_SURFACE_NUM   2
 
+#define ENTROPY_MODE_CAVLC      0
+#define ENTROPY_MODE_CABAC      1
+
+#define BR_CBR          0
+#define BR_VBR          1
+#define BR_CQP          2
+
+#define NAL_REF_IDC_NONE        0
+#define NAL_REF_IDC_LOW         1
+#define NAL_REF_IDC_MEDIUM      2
+#define NAL_REF_IDC_HIGH        3
+
+
 typedef enum {
   NAL_UNKNOWN     = 0,
   NAL_NON_IDR     = 1,
@@ -40,6 +56,13 @@ typedef enum {
   NAL_FILLER      = 12,
 }H264_NAL_TYPE;
 
+
+typedef enum {
+  SLICE_TYPE_P  = 0,
+  SLICE_TYPE_B  = 1,
+  SLICE_TYPE_I  = 2
+} H264_SLICE_TYPE;
+
 struct _GstH264EncodeBuffer {
   GstBuffer           buffer;
   VABufferID         *coded_id;
@@ -54,19 +77,36 @@ struct _GstH264EncoderPrivate {
   /* private data*/
   GQueue           *video_buffer_caches; /*not used for baseline*/
 
-  GstVaapiSurface  *ref_surface;  /* reference buffer*/
+  GstVaapiSurface  *ref_surface1;  /* reference buffer*/
+  GstVaapiSurface  *ref_surface2;  /* for B frames */
   GstVaapiSurface  *recon_surface; /* reconstruct buffer*/
 
   VABufferID        seq_parameter;
   VABufferID        pic_parameter;
   VABufferID        slice_parameter;
-  VAEncSliceParameterBuffer *slice_param_buffers;
+  VABufferID        packed_sps_par_buf;
+  VABufferID        packed_sps_data_buf;
+  VABufferID        packed_pps_par_buf;
+  VABufferID        packed_pps_data_buf;
+#ifdef _SIMPLE_LIB_VA_
+  VAEncSliceParameterBuffer     *slice_param_buffers;
+#else
+  VAEncSliceParameterBufferH264 *slice_param_buffers;
+#endif
   guint32           default_slice_height;
   guint32           slice_mod_mb_num;
+  guint32           default_cts_offset;
 
   GstBuffer        *sps_data;
   GstBuffer        *pps_data;
 
+  GQueue           *queued_buffers;  /* GstVaapiVideoBuffers with surface*/
+
+  guint32           gop_count;
+  guint32           cur_display_num;
+  guint32           cur_decode_num;
+  H264_SLICE_TYPE   cur_slice_type;
+  guint64           last_decode_time;
 };
 
 G_DEFINE_TYPE(GstH264Encoder, gst_h264_encoder, GST_TYPE_VAAPI_BASE_ENCODER);
@@ -104,8 +144,12 @@ static gboolean      gst_h264_encoder_alloc_slices(GstVaapiBaseEncoder *encoder,
                                     GstVaapiDisplay *display, GstVaapiContext *context);
 static gboolean      gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder,
                                     GstVaapiDisplay *display, GstVaapiContext *context);
-
-static EncoderStatus gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+static EncoderStatus gst_h264_encoder_prepare_next_buffer(GstVaapiBaseEncoder* encoder,
+                                    GstVaapiVideoBuffer *display_buf,  gboolean need_flush,
+                                    GstVaapiVideoBuffer **out_buf);
+static void          gst_h264_encoder_frame_failed(GstVaapiBaseEncoder *encoder,
+                                    GstVaapiVideoBuffer* buffer);
+static EncoderStatus gst_h264_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
                                     GstVaapiContext *context, GstVaapiSurface *surface,
                                     guint frame_index, VABufferID coded_buf, gboolean *is_key);
 static void          gst_h264_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size);
@@ -119,17 +163,20 @@ static void     h264_bitstream_init(H264Bitstream *bitstream, guint32 bit_capabi
 static gboolean h264_bitstream_write_uint(H264Bitstream *bitstream, guint32 value, guint32 bit_size);
 static gboolean h264_bitstream_align(H264Bitstream *bitstream, guint32 value);
 static gboolean h264_bitstream_write_ue(H264Bitstream *bitstream, guint32 value);
-static gboolean h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value);
+static gboolean h264_bitstream_write_se(H264Bitstream *bitstream, gint32 value);
 static gboolean h264_bitstream_write_trailing_bits(H264Bitstream *bitstream);
 
 static gboolean h264_bitstream_write_byte_array(H264Bitstream *bitstream, const guint8 *buf, guint32 byte_size);
 static void     h264_bitstream_destroy(H264Bitstream *bitstream, gboolean free_flag);
 static gboolean h264_bitstream_auto_grow(H264Bitstream *bitstream, guint32 extra_bit_size);
-static gboolean h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
-static gboolean h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
+static gboolean h264_bitstream_write_sps(H264Bitstream *bitstream, VAEncSequenceParameterBufferH264 *seq);
+static gboolean h264_bitstream_write_pps(H264Bitstream *bitstream, VAEncPictureParameterBufferH264 *pic);
+static gboolean h264_bitstream_write_nal_header(H264Bitstream *bitstream,
+                                    guint nal_ref_idc, guint nal_unit_type);
+
 static const guint8 *h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size);
 static gboolean h264_read_sps_attributes(const guint8 *sps_data, guint32 sps_size,
-                                guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc);
+                                    guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc);
 
 static void
 gst_h264_encoder_class_init(GstH264EncoderClass *klass)
@@ -148,9 +195,11 @@ gst_h264_encoder_class_init(GstH264EncoderClass *klass)
   base_class->validate_attributes = gst_h264_validate_parameters;
   base_class->pre_alloc_resource  = gst_h264_encoder_alloc_slices;
   base_class->release_resource    = gst_h264_encoder_release_resource;
-  base_class->prepare_frame = gst_h264_prepare_encoding;
+  base_class->prepare_next_input_buffer = gst_h264_encoder_prepare_next_buffer;
+  base_class->render_frame = gst_h264_encoder_rendering;
   base_class->notify_frame = gst_h264_notify_frame;
   base_class->copy_coded_frame = gst_h264_encoder_copy_coded_buffer;
+  base_class->encode_frame_failed = gst_h264_encoder_frame_failed;
 
   encoder_class->flush = gst_h264_encoder_flush;
 
@@ -291,18 +340,10 @@ h264_get_va_profile(guint32 profile)
   return (-1);
 }
 
-GstH264Encoder *
-gst_h264_encoder_new(void)
-{
-  return GST_H264_ENCODER(g_object_new(GST_TYPE_H264_ENCODER, NULL));
-}
-
-
 static void
 gst_h264_encoder_init(GstH264Encoder *encoder)
 {
   GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
-  GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
   ENCODER_ASSERT(h264_prv);
   h264_prv->public = encoder;
 
@@ -313,19 +354,33 @@ gst_h264_encoder_init(GstH264Encoder *encoder)
   /* init private values*/
   h264_prv->format = GST_MAKE_FOURCC('N','V','1','2');
   h264_prv->es_flag = TRUE;
+  //h264_prv->es_flag = FALSE;
 
-  h264_prv->ref_surface = NULL;
+  h264_prv->ref_surface1 = NULL;
+  h264_prv->ref_surface2 = NULL;
   h264_prv->recon_surface = NULL;
 
   h264_prv->seq_parameter = VA_INVALID_ID;
   h264_prv->pic_parameter = VA_INVALID_ID;
   h264_prv->slice_parameter = VA_INVALID_ID;
+  h264_prv->packed_sps_par_buf = VA_INVALID_ID;
+  h264_prv->packed_sps_data_buf = VA_INVALID_ID;
+  h264_prv->packed_pps_par_buf = VA_INVALID_ID;
+  h264_prv->packed_pps_data_buf = VA_INVALID_ID;
   h264_prv->slice_param_buffers = NULL;
   h264_prv->default_slice_height = 0;
   h264_prv->slice_mod_mb_num = 0;
 
   h264_prv->sps_data = NULL;
   h264_prv->pps_data = NULL;
+
+  h264_prv->queued_buffers = g_queue_new();
+  h264_prv->gop_count = 0;
+  h264_prv->cur_display_num = 0;
+  h264_prv->cur_decode_num = 0;
+  h264_prv->cur_slice_type = SLICE_TYPE_I;
+  h264_prv->last_decode_time = 0LL;
+  h264_prv->default_cts_offset = 0;
 }
 
 static void
@@ -352,10 +407,22 @@ gst_h264_encoder_finalize(GObject *object)
     h264_prv->slice_param_buffers = NULL;
   }
 
+  if (h264_prv->queued_buffers) {
+    ENCODER_ASSERT(g_queue_is_empty(h264_prv->queued_buffers));
+    g_queue_free(h264_prv->queued_buffers);
+    h264_prv->queued_buffers = NULL;
+  }
+
   G_OBJECT_CLASS(gst_h264_encoder_parent_class)->finalize(object);
 }
 
 
+GstH264Encoder *
+gst_h264_encoder_new(void)
+{
+  return GST_H264_ENCODER(g_object_new(GST_TYPE_H264_ENCODER, NULL));
+}
+
 static void
 gst_h264_encoder_init_public_values(GstH264Encoder* encoder)
 {
@@ -366,6 +433,7 @@ gst_h264_encoder_init_public_values(GstH264Encoder* encoder)
   encoder->init_qp = -1;
   encoder->min_qp = -1;
   encoder->slice_num = 0;
+  encoder->b_frame_num = 0;
 }
 
 void
@@ -389,7 +457,10 @@ gst_h264_validate_parameters(GstVaapiBaseEncoder *base_encoder)
   }
   gst_vaapi_base_encoder_set_va_profile(base_encoder, h264_get_va_profile(encoder->profile));
   if (!encoder->level) {
-    encoder->level = H264_DEFAULT_LEVEL;
+    if (encoder->profile <= H264_PROFILE_BASELINE)
+      encoder->level = H264_LEVEL_30;
+    else
+      encoder->level = H264_LEVEL_41;
   }
   if (!encoder->intra_period) {
     encoder->intra_period = H264_DEFAULT_INTRA_PERIOD;
@@ -407,7 +478,7 @@ gst_h264_validate_parameters(GstVaapiBaseEncoder *base_encoder)
 
   /* default compress ratio 1: (4*8*1.5) */
   if (!encoder->bitrate) {
-    encoder->bitrate = ENCODER_WIDTH(encoder)*ENCODER_HEIGHT(encoder)*ENCODER_FPS(encoder)/4;
+    encoder->bitrate = 0; //ENCODER_WIDTH(encoder)*ENCODER_HEIGHT(encoder)*ENCODER_FPS(encoder)/4;
   }
 
   if (!encoder->slice_num) {
@@ -433,6 +504,12 @@ gst_h264_validate_parameters(GstVaapiBaseEncoder *base_encoder)
   } else {
     h264_prv->slice_mod_mb_num = ((ENCODER_HEIGHT(encoder)+15)/16)%encoder->slice_num;
   }
+
+  if (encoder->b_frame_num) {
+    h264_prv->default_cts_offset = GST_SECOND/ENCODER_FPS(encoder);
+  } else {
+    h264_prv->default_cts_offset = 0;
+  }
   return TRUE;
 }
 
@@ -442,12 +519,12 @@ h264_encoder_release_parameters(GstH264Encoder *h264_encoder, GstVaapiDisplay *d
 {
   VAStatus va_status = VA_STATUS_SUCCESS;
   GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
-  guint32 i;
 
   gboolean is_locked = FALSE;
 
   ENCODER_ASSERT(display);
   ENCODER_ASSERT(context);
+  VAAPI_UNUSED_ARG(va_status);
   VADisplay va_dpy = gst_vaapi_display_get_display(display);
 
   ENCODER_ACQUIRE_DISPLAY_LOCK(display);
@@ -464,6 +541,23 @@ h264_encoder_release_parameters(GstH264Encoder *h264_encoder, GstVaapiDisplay *d
     h264_prv->slice_parameter = VA_INVALID_ID;
   }
 
+  if (VA_INVALID_ID != h264_prv->packed_sps_par_buf) {
+    va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_sps_par_buf);
+    h264_prv->packed_sps_par_buf = VA_INVALID_ID;
+  }
+  if (VA_INVALID_ID != h264_prv->packed_sps_data_buf) {
+    va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_sps_data_buf);
+    h264_prv->packed_sps_data_buf = VA_INVALID_ID;
+  }
+  if (VA_INVALID_ID != h264_prv->packed_pps_par_buf) {
+    va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_pps_par_buf);
+    h264_prv->packed_pps_par_buf = VA_INVALID_ID;
+  }
+  if (VA_INVALID_ID != h264_prv->packed_pps_data_buf) {
+    va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_pps_data_buf);
+    h264_prv->packed_pps_data_buf = VA_INVALID_ID;
+  }
+
   ENCODER_RELEASE_DISPLAY_LOCK(display);
 
   if (h264_prv->slice_param_buffers) {
@@ -471,9 +565,28 @@ h264_encoder_release_parameters(GstH264Encoder *h264_encoder, GstVaapiDisplay *d
     h264_prv->slice_param_buffers = NULL;
   }
 
+  if (h264_prv->sps_data) {
+    gst_buffer_unref(h264_prv->sps_data);
+    h264_prv->sps_data = NULL;
+  }
+  if (h264_prv->pps_data) {
+    gst_buffer_unref(h264_prv->pps_data);
+    h264_prv->pps_data = NULL;
+  }
+
   return TRUE;
 }
 
+static void
+h264_release_queued_buffers(GstH264EncoderPrivate *h264_prv)
+{
+    while (!g_queue_is_empty(h264_prv->queued_buffers)) {
+    GstBuffer* tmp = g_queue_pop_head(h264_prv->queued_buffers);
+    if (tmp)
+      gst_buffer_unref(tmp);
+  }
+}
+
 
 static gboolean
 gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context)
@@ -484,15 +597,31 @@ gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder, GstVaapiDisplay
 
   /* release buffers first */
   h264_encoder_release_parameters(h264_encoder, display, context);
+  h264_release_queued_buffers(h264_prv);
+  h264_prv->cur_display_num = 0;
+  h264_prv->cur_decode_num = 0;
+  h264_prv->cur_slice_type = SLICE_TYPE_I;
+  h264_prv->gop_count = 0;
+  h264_prv->last_decode_time = 0LL;
+  h264_prv->default_cts_offset = 0;
+
+  /*remove ref_surface1*/
+  if (h264_prv->ref_surface1) {
+    if (context) {
+      gst_vaapi_context_put_surface(context, h264_prv->ref_surface1);
+    } else {
+      g_object_unref(h264_prv->ref_surface1);
+    }
+    h264_prv->ref_surface1 = NULL;
+  }
 
-  /*remove ref_surface*/
-  if (h264_prv->ref_surface) {
+  if (h264_prv->ref_surface2) {
     if (context) {
-      gst_vaapi_context_put_surface(context, h264_prv->ref_surface);
+      gst_vaapi_context_put_surface(context, h264_prv->ref_surface2);
     } else {
-      g_object_unref(h264_prv->ref_surface);
+      g_object_unref(h264_prv->ref_surface2);
     }
-    h264_prv->ref_surface = NULL;
+    h264_prv->ref_surface2 = NULL;
   }
 
   /*remove recon_surface*/
@@ -505,14 +634,6 @@ gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder, GstVaapiDisplay
     h264_prv->recon_surface = NULL;
   }
 
-  if (h264_prv->sps_data) {
-    gst_buffer_unref(h264_prv->sps_data);
-    h264_prv->sps_data = NULL;
-  }
-  if (h264_prv->pps_data) {
-    gst_buffer_unref(h264_prv->pps_data);
-    h264_prv->pps_data = NULL;
-  }
   return ret;
 }
 
@@ -520,19 +641,123 @@ static gboolean
 gst_h264_encoder_alloc_slices(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display, GstVaapiContext *context)
 {
   gboolean ret = TRUE;
-  VAStatus va_status = VA_STATUS_SUCCESS;
   GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
   GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
 
-  h264_prv->slice_param_buffers = (VAEncSliceParameterBuffer*)g_malloc0_n(h264_encoder->slice_num,
-                                                     sizeof(h264_prv->slice_param_buffers[0]));
+  h264_prv->slice_param_buffers =
+#ifdef _SIMPLE_LIB_VA_
+  (VAEncSliceParameterBuffer*)
+#else
+  (VAEncSliceParameterBufferH264*)
+#endif
+          g_malloc0_n(h264_encoder->slice_num,
+              sizeof(h264_prv->slice_param_buffers[0]));
+
   return ret;
 }
 
+static void
+gst_h264_encoder_frame_failed(GstVaapiBaseEncoder *encoder, GstVaapiVideoBuffer* buffer)
+{
+  GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+  GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+
+  h264_release_queued_buffers(h264_prv);
+  h264_prv->cur_display_num = 0;
+  h264_prv->cur_decode_num = 0;
+  h264_prv->cur_slice_type = SLICE_TYPE_I;
+  h264_prv->gop_count = 0;
+  h264_prv->last_decode_time = 0LL;
+}
+
+static EncoderStatus
+gst_h264_encoder_prepare_next_buffer(GstVaapiBaseEncoder* encoder,
+                              GstVaapiVideoBuffer *display_buf, gboolean need_flush,
+                              GstVaapiVideoBuffer **out_buf)
+{
+  EncoderStatus ret = ENCODER_NO_ERROR;
+  GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+  GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+  GstVaapiVideoBuffer  *return_buf = NULL;
+  GstVaapiVideoBuffer  *tmp_next_buf = NULL;
+  guint64 pts = 0;
+  guint32 cts = 0;
+
+  if (NULL == display_buf && g_queue_is_empty(h264_prv->queued_buffers)) {
+    ret = ENCODER_BUFFER_EMPTY;
+    if (h264_prv->gop_count >= h264_encoder->intra_period || need_flush)
+      h264_prv->gop_count = 0;
+    goto end;
+  }
+
+  if (display_buf) {
+    ++h264_prv->gop_count;
+    gst_buffer_ref(GST_BUFFER_CAST(display_buf));
+    h264_prv->last_decode_time = GST_BUFFER_TIMESTAMP(display_buf);
+  }
+
+  /* first frame */
+  if (h264_prv->gop_count == 1) {
+    ENCODER_ASSERT(display_buf);
+    h264_prv->cur_display_num = 0;
+    h264_prv->cur_decode_num = 0;
+    h264_prv->cur_slice_type = SLICE_TYPE_I;
+    return_buf = display_buf;
+    goto end;
+  }
+
+  if (display_buf) {
+    if (h264_encoder->b_frame_num &&
+        h264_prv->gop_count < h264_encoder->intra_period &&
+        g_queue_get_length(h264_prv->queued_buffers) < h264_encoder->b_frame_num
+        )
+    {
+      g_queue_push_tail(h264_prv->queued_buffers, display_buf);
+      ret = ENCODER_BUFFER_WAITING;
+      goto end;
+    }
+    h264_prv->cur_slice_type = SLICE_TYPE_P;
+    h264_prv->cur_display_num = h264_prv->gop_count-1;
+    ++h264_prv->cur_decode_num;
+    return_buf = display_buf;
+  } else {
+    if (need_flush) {
+      return_buf = (GstVaapiVideoBuffer*)g_queue_pop_tail(h264_prv->queued_buffers);
+      h264_prv->cur_slice_type = SLICE_TYPE_P;
+      h264_prv->cur_display_num = h264_prv->gop_count - 1;
+      ++h264_prv->cur_decode_num;
+    } else {
+      return_buf = (GstVaapiVideoBuffer*)g_queue_pop_head(h264_prv->queued_buffers);
+      h264_prv->cur_slice_type = SLICE_TYPE_B;
+      h264_prv->cur_display_num = h264_prv->gop_count - 2 - g_queue_get_length(h264_prv->queued_buffers);
+    }
+  }
+
+end:
+  *out_buf = return_buf;
+  /* calculate cts/pts/dts */
+  if (return_buf) {
+    pts = GST_BUFFER_TIMESTAMP(return_buf);
+    tmp_next_buf = (GstVaapiVideoBuffer*)g_queue_peek_head(h264_prv->queued_buffers);
+    if (tmp_next_buf) {
+      GST_BUFFER_TIMESTAMP(return_buf) = GST_BUFFER_TIMESTAMP(tmp_next_buf);
+    } else if (SLICE_TYPE_B == h264_prv->cur_slice_type) {
+      GST_BUFFER_TIMESTAMP(return_buf) = h264_prv->last_decode_time;
+    }
+    cts = (pts + h264_prv->default_cts_offset - GST_BUFFER_TIMESTAMP(return_buf));
+    ENCODER_ASSERT(cts < 0x80000000);
+    if (cts > 0x80000000) {
+      cts = 0;
+    }
+    GST_BUFFER_OFFSET_END(return_buf) = cts;
+  }
+  return ret;
+}
 
 
+#ifdef _SIMPLE_LIB_VA_
 static EncoderStatus
-gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+gst_h264_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
                              GstVaapiContext *context, GstVaapiSurface *surface,
                              guint frame_index, VABufferID coded_buf, gboolean *is_key)
 {
@@ -540,11 +765,7 @@ gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display
   VAStatus va_status = VA_STATUS_SUCCESS;
   GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
   GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
-#ifdef _MRST_
   VAEncPictureParameterBufferH264 pic_h264;
-#else
-  VAEncPictureParameterBufferH264Baseline pic_h264;
-#endif
   VAEncSliceParameterBuffer *slice_h264 = NULL;
 
   gboolean is_locked = FALSE;
@@ -553,19 +774,14 @@ gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display
   VADisplay va_dpy = gst_vaapi_display_get_display(display);
   VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
 
-  *is_key = ((frame_index % h264_encoder->intra_period) == 0);
+  *is_key = (h264_prv->cur_slice_type == SLICE_TYPE_I);
 
   /* lock display */
   ENCODER_ACQUIRE_DISPLAY_LOCK(display);
   /*handle first surface_index*/
   /*only need first frame*/
   if (VA_INVALID_ID == h264_prv->seq_parameter) { /*first time*/
-  #ifdef _MRST_
-    VAEncSequenceParameterBufferH264 seq_h264 = {0};
-  #else
-    VAEncSequenceParameterBufferH264Baseline seq_h264 = {0};
-  #endif
-
+    VAEncSequenceParameterBufferH264 seq_h264 = { 0 };
     seq_h264.level_idc = h264_encoder->level; /* 3.0 */
     seq_h264.max_num_ref_frames = 1; /*Only I, P frames*/
     seq_h264.picture_width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
@@ -588,16 +804,16 @@ gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display
   }
 
   /* set pic_parameters*/
-  if (!h264_prv->ref_surface) {
-    h264_prv->ref_surface = gst_vaapi_context_get_surface(context);
-    ENCODER_CHECK_STATUS(h264_prv->ref_surface, ENCODER_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
+  if (!h264_prv->ref_surface1) {
+    h264_prv->ref_surface1 = gst_vaapi_context_get_surface(context);
+    ENCODER_CHECK_STATUS(h264_prv->ref_surface1, ENCODER_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
   }
   if (!h264_prv->recon_surface) {
     h264_prv->recon_surface = gst_vaapi_context_get_surface(context);
     ENCODER_CHECK_STATUS(h264_prv->recon_surface, ENCODER_SURFACE_ERR, "reconstructed surface, h264_pop_free_surface failed.\n");
   }
 
-  pic_h264.reference_picture = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface);
+  pic_h264.reference_picture = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface1);
   pic_h264.reconstructed_picture = GST_VAAPI_OBJECT_ID(h264_prv->recon_surface);
   pic_h264.coded_buf = coded_buf;
   pic_h264.picture_width = ENCODER_WIDTH(h264_encoder);
@@ -653,9 +869,9 @@ gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display
   va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->slice_parameter, 1);
   ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "rendering slice-parameters buffer failed.\n");
 
-  /*after finished, set ref_surface_index, recon_surface_index */
-  GstVaapiSurface *swap = h264_prv->ref_surface;
-  h264_prv->ref_surface = h264_prv->recon_surface;
+  /*after finished, set ref_surface1_index, recon_surface_index */
+  GstVaapiSurface *swap = h264_prv->ref_surface1;
+  h264_prv->ref_surface1 = h264_prv->recon_surface;
   h264_prv->recon_surface = swap;
 
   end:
@@ -663,6 +879,394 @@ gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display
   return ret;
 }
 
+#else  /* extended libva, new parameter structures*/
+
+static void h264_swap_surface(GstVaapiSurface **s1, GstVaapiSurface **s2)
+{
+  GstVaapiSurface *tmp;
+
+  g_return_if_fail(s1 && s2);
+  tmp = *s1;
+  *s1 = *s2;
+  *s2 = tmp;
+}
+
+static gboolean
+h264_recreate_seq_param(GstH264Encoder *h264_encoder,
+                        VADisplay va_dpy, VAContextID context_id)
+{
+  GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+  VAEncSequenceParameterBufferH264 seq_h264 = { 0 };
+  guint width_in_mbs, height_in_mbs;
+  gboolean ret = TRUE;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+
+  /* only once */
+  if (VA_INVALID_ID != h264_prv->seq_parameter)
+    return TRUE;
+
+  width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
+  height_in_mbs = (ENCODER_HEIGHT(h264_encoder)+15)/16;
+
+  seq_h264.seq_parameter_set_id = 0;
+  seq_h264.profile_idc = h264_encoder->profile;
+  seq_h264.level_idc = h264_encoder->level; /* 3.0 */
+  seq_h264.intra_period = h264_encoder->intra_period;
+  seq_h264.ip_period = 0;           // ?
+  seq_h264.max_num_ref_frames = (h264_encoder->b_frame_num < 2 ? 3 : h264_encoder->b_frame_num+1);  // ?, why 4
+  seq_h264.picture_width_in_mbs = width_in_mbs;
+  seq_h264.picture_height_in_mbs = height_in_mbs;
+  seq_h264.frame_mbs_only_flag = 1;
+  seq_h264.target_usage = 1;        // ?
+
+  if (h264_encoder->init_qp == -1)
+      seq_h264.rate_control_method = BR_CBR;
+  else if (h264_encoder->init_qp == -2)
+      seq_h264.rate_control_method = BR_VBR;
+  else {
+      assert(h264_encoder->init_qp >= 0 && h264_encoder->init_qp <= 51);
+      seq_h264.rate_control_method = BR_CQP;
+  }
+
+  if (h264_encoder->bitrate> 0)
+      seq_h264.bits_per_second = h264_encoder->bitrate; /* use kbps as input */
+  else
+      seq_h264.bits_per_second = 0;
+
+  if (seq_h264.rate_control_method == BR_VBR) {
+    seq_h264.max_bits_per_second = seq_h264.bits_per_second*1.5;
+    seq_h264.min_bits_per_second = seq_h264.bits_per_second*0.3;
+  }
+  seq_h264.initial_hrd_buffer_fullness = 0;   // ??
+  seq_h264.hrd_buffer_size = 0;
+  seq_h264.num_units_in_tick = 100;
+  seq_h264.time_scale = ENCODER_FPS(h264_encoder)*2*seq_h264.num_units_in_tick;
+
+  if (height_in_mbs*16 - ENCODER_HEIGHT(h264_encoder)) {
+    seq_h264.frame_cropping_flag = 1;
+    seq_h264.frame_crop_left_offset = 0;
+    seq_h264.frame_crop_right_offset = 0;
+    seq_h264.frame_crop_top_offset = 0;
+    seq_h264.frame_crop_bottom_offset =
+           (height_in_mbs * 16 - ENCODER_HEIGHT(h264_encoder))/(2 * (!seq_h264.frame_mbs_only_flag + 1));
+  }
+  seq_h264.pic_order_cnt_type = 0;   // pic order cnt
+  seq_h264.direct_8x8_inference_flag = 0;
+  seq_h264.log2_max_frame_num_minus4 = 4; // log2(seq_h264.intra_period)-3 : 0
+  seq_h264.log2_max_pic_order_cnt_lsb_minus4 = seq_h264.log2_max_frame_num_minus4+2;
+  seq_h264.vui_flag = 0; // 0? or 1?
+
+  va_status = vaCreateBuffer(va_dpy, context_id,
+                             VAEncSequenceParameterBufferType,
+                             sizeof(seq_h264), 1,
+                             &seq_h264, &h264_prv->seq_parameter);
+  ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+                     FALSE, "alloc seq-buffer failed.\n");
+
+  /*pack sps header buffer/data */
+  if (NULL == h264_prv->sps_data) {
+    VAEncPackedHeaderParameterBuffer packed_header_param_buffer = { 0 };
+    guint32 length_in_bits, offset_in_bytes;
+    guint8 *packed_seq_buffer = NULL;
+    H264Bitstream bitstream;
+    h264_bitstream_init(&bitstream, 128*8);
+    h264_bitstream_write_uint(&bitstream, 0x00000001, 32); /* start code*/
+    h264_bitstream_write_nal_header(&bitstream, NAL_REF_IDC_HIGH, NAL_SPS);
+    h264_bitstream_write_sps(&bitstream, &seq_h264);
+    ENCODER_ASSERT(BIT_STREAM_BIT_SIZE(&bitstream)%8 == 0);
+    length_in_bits = BIT_STREAM_BIT_SIZE(&bitstream);
+    packed_seq_buffer = BIT_STREAM_BUFFER(&bitstream);
+    //h264_prv->sps_data = gst_buffer_new_and_alloc((length_in_bits+7)/8);
+    //GST_BUFFER_SIZE(h264_prv->sps_data) = (length_in_bits+7)/8-4;
+    //memcpy(GST_BUFFER_DATA(h264_prv->sps_data), packed_seq_buffer+4, (length_in_bits+7)/8-4);
+
+    offset_in_bytes = 0;
+    packed_header_param_buffer.type = VAEncPackedHeaderSPS;
+    packed_header_param_buffer.insert_emulation_bytes = 1;
+    packed_header_param_buffer.skip_emulation_check_count = 5;
+    packed_header_param_buffer.num_headers = 1;
+    packed_header_param_buffer.length_in_bits = &length_in_bits;
+    packed_header_param_buffer.offset_in_bytes = &offset_in_bytes;
+    va_status = vaCreateBuffer(va_dpy,
+                               context_id,
+                               VAEncPackedHeaderParameterBufferType,
+                               sizeof(packed_header_param_buffer), 1,
+                               &packed_header_param_buffer,
+                               &h264_prv->packed_sps_par_buf);
+    ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+                         FALSE,
+                         "EncPackedSeqHeaderParameterBuffer failed");
+    va_status = vaCreateBuffer(va_dpy,
+                               context_id,
+                               VAEncPackedHeaderDataBufferType,
+                               (length_in_bits + 7) / 8, 1,
+                               packed_seq_buffer,
+                               &h264_prv->packed_sps_data_buf);
+    h264_bitstream_destroy(&bitstream, TRUE);
+    ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+                         FALSE,
+                         "EncPackedSeqHeaderDataBuffer failed");
+  }
+end:
+
+  return ret;
+}
+
+static gboolean
+h264_recreate_pic_param(GstH264Encoder *h264_encoder,
+                        VADisplay va_dpy, VAContextID context_id,
+                        VABufferID coded_buf)
+{
+  GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+  VAEncPictureParameterBufferH264 pic_h264;
+  gboolean ret = TRUE;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+
+  VAAPI_UNUSED_ARG(va_status);
+  memset(&pic_h264, 0, sizeof(pic_h264));
+  pic_h264.CurrPic.picture_id = GST_VAAPI_OBJECT_ID(h264_prv->recon_surface);
+  pic_h264.CurrPic.TopFieldOrderCnt = h264_prv->cur_display_num * 2;   // ??? /**/
+  pic_h264.ReferenceFrames[0].picture_id = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface1);
+  pic_h264.ReferenceFrames[1].picture_id = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface2);
+  pic_h264.ReferenceFrames[2].picture_id = VA_INVALID_ID;
+  pic_h264.CodedBuf = coded_buf;
+
+  pic_h264.seq_parameter_set_id = 0;
+  pic_h264.pic_parameter_set_id = 0;
+  pic_h264.last_picture = 0;
+  pic_h264.frame_num = (h264_prv->cur_slice_type == SLICE_TYPE_B ?
+                       (h264_prv->cur_decode_num + 1) : h264_prv->cur_decode_num);
+  pic_h264.coding_type = 0;
+  pic_h264.pic_init_qp = (h264_encoder->init_qp >= 0 ? h264_encoder->init_qp : 26);
+  pic_h264.num_ref_idx_l0_active_minus1 = 0;
+  pic_h264.num_ref_idx_l1_active_minus1 = 0;
+  pic_h264.pic_fields.bits.idr_pic_flag = (h264_prv->cur_slice_type == SLICE_TYPE_I);
+  pic_h264.pic_fields.bits.reference_pic_flag = (h264_prv->cur_slice_type != SLICE_TYPE_B);
+  pic_h264.pic_fields.bits.entropy_coding_mode_flag = ENTROPY_MODE_CABAC;
+  pic_h264.pic_fields.bits.weighted_pred_flag = 0;
+  pic_h264.pic_fields.bits.weighted_bipred_idc = 0;
+  pic_h264.pic_fields.bits.transform_8x8_mode_flag = 1;
+  pic_h264.pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+  if (VA_INVALID_ID != h264_prv->pic_parameter) { /* share the same pic_parameter*/
+    vaDestroyBuffer(va_dpy, h264_prv->pic_parameter);
+    h264_prv->pic_parameter = VA_INVALID_ID;
+  }
+  va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
+                               sizeof(pic_h264), 1, &pic_h264, &h264_prv->pic_parameter);
+
+  ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, FALSE, "creating pic-param buffer failed.\n");
+
+  //if (NULL == h264_prv->pps_data) {
+  if (VA_INVALID_ID == h264_prv->packed_pps_data_buf) {
+    VAEncPackedHeaderParameterBuffer packed_header_param_buffer = { 0 };
+    guint32 length_in_bits, offset_in_bytes;
+    guint8 *packed_pic_buffer = NULL;
+    H264Bitstream bitstream;
+    h264_bitstream_init(&bitstream, 128*8);
+    h264_bitstream_write_uint(&bitstream, 0x00000001, 32); /* start code*/
+    h264_bitstream_write_nal_header(&bitstream, NAL_REF_IDC_HIGH, NAL_PPS);
+    h264_bitstream_write_pps(&bitstream, &pic_h264);
+    ENCODER_ASSERT(BIT_STREAM_BIT_SIZE(&bitstream)%8 == 0);
+    length_in_bits = BIT_STREAM_BIT_SIZE(&bitstream);
+    packed_pic_buffer = BIT_STREAM_BUFFER(&bitstream);
+    //h264_prv->pps_data = gst_buffer_new_and_alloc((length_in_bits+7)/8);
+    //GST_BUFFER_SIZE(h264_prv->pps_data) = (length_in_bits+7)/8-4;
+    //memcpy(GST_BUFFER_DATA(h264_prv->pps_data), packed_pic_buffer+4, (length_in_bits+7)/8-4);
+
+    offset_in_bytes = 0;
+    packed_header_param_buffer.type = VAEncPackedHeaderPPS;
+    packed_header_param_buffer.insert_emulation_bytes = 1;
+    packed_header_param_buffer.skip_emulation_check_count = 5;
+    packed_header_param_buffer.num_headers = 1;
+    packed_header_param_buffer.length_in_bits = &length_in_bits;
+    packed_header_param_buffer.offset_in_bytes = &offset_in_bytes;
+
+    va_status = vaCreateBuffer(va_dpy,
+                               context_id,
+                               VAEncPackedHeaderParameterBufferType,
+                               sizeof(packed_header_param_buffer), 1,
+                               &packed_header_param_buffer,
+                               &h264_prv->packed_pps_par_buf);
+    ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+                         FALSE,
+                         "EncPackedPicHeaderParameterBuffer failed");
+
+    va_status = vaCreateBuffer(va_dpy,
+                               context_id,
+                               VAEncPackedHeaderDataBufferType,
+                               (length_in_bits + 7) / 8, 1,
+                               packed_pic_buffer,
+                               &h264_prv->packed_pps_data_buf);
+    h264_bitstream_destroy(&bitstream, TRUE);
+    ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+                         FALSE,
+                         "EncPackedPicHeaderDataBuffer failed");
+  }
+
+end:
+  return ret;
+}
+
+
+static gboolean
+h264_recreate_slice_param(GstH264Encoder *h264_encoder,
+                        VADisplay va_dpy, VAContextID context_id)
+{
+  GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+  VAEncSliceParameterBufferH264 *slice_h264 = NULL;
+  guint width_in_mbs;
+  gboolean ret = TRUE;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+
+  width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
+
+  int i = 0;
+  guint32 last_row_num = 0;
+  guint32 slice_mod_num = h264_prv->slice_mod_mb_num;
+
+  memset(h264_prv->slice_param_buffers, 0, h264_encoder->slice_num*sizeof(h264_prv->slice_param_buffers[0]));
+  for (i = 0; i < h264_encoder->slice_num; ++i) {
+    slice_h264 = &h264_prv->slice_param_buffers[i];
+
+    slice_h264->starting_macroblock_address = last_row_num*width_in_mbs;
+    slice_h264->number_of_mbs = width_in_mbs*h264_prv->default_slice_height;
+    last_row_num += h264_prv->default_slice_height;
+    if (slice_mod_num) {
+      slice_h264->number_of_mbs += width_in_mbs;
+      ++last_row_num;
+      --slice_mod_num;
+    }
+    slice_h264->pic_parameter_set_id = 0;
+    slice_h264->slice_type = h264_prv->cur_slice_type;
+    slice_h264->direct_spatial_mv_pred_flag = 0;
+    slice_h264->num_ref_idx_l0_active_minus1 = 0;
+    slice_h264->num_ref_idx_l1_active_minus1 = 0;
+    slice_h264->cabac_init_idc = 0;
+    slice_h264->slice_qp_delta = 0;
+    slice_h264->disable_deblocking_filter_idc = 0;
+    slice_h264->slice_alpha_c0_offset_div2 = 2;
+    slice_h264->slice_beta_offset_div2 = 2;
+    slice_h264->idr_pic_id = 0;
+
+    slice_h264->ref_pic_list_modification_flag_l0 = 0;
+    slice_h264->ref_pic_list_modification_flag_l1 = 0;
+
+  }
+  ENCODER_ASSERT(last_row_num == (ENCODER_HEIGHT(h264_encoder)+15)/16);
+
+  if (VA_INVALID_ID != h264_prv->slice_parameter) {
+    vaDestroyBuffer(va_dpy, h264_prv->slice_parameter);
+    h264_prv->slice_parameter = VA_INVALID_ID;
+  }
+  va_status = vaCreateBuffer(va_dpy,
+                             context_id,
+                             VAEncSliceParameterBufferType,
+                             sizeof(h264_prv->slice_param_buffers[0]),
+                             h264_encoder->slice_num,
+                             h264_prv->slice_param_buffers,
+                             &h264_prv->slice_parameter);
+  ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, FALSE, "creating slice-parameters buffer failed.\n");
+
+end:
+  return ret;
+}
+
+static EncoderStatus
+gst_h264_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+                             GstVaapiContext *context, GstVaapiSurface *surface,
+                             guint frame_index, VABufferID coded_buf, gboolean *is_key)
+{
+  EncoderStatus ret = ENCODER_NO_ERROR;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+  GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+  VABufferID va_buffers[64];
+  guint32    va_buffers_count = 0;
+  gboolean is_params_ok = TRUE;
+
+  gboolean is_locked = FALSE;
+
+  ENCODER_ASSERT(display && context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(display);
+  VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+
+  *is_key = (h264_prv->cur_slice_type == SLICE_TYPE_I);
+
+  if (!h264_prv->ref_surface1) {
+    h264_prv->ref_surface1 = gst_vaapi_context_get_surface(context);
+    ENCODER_CHECK_STATUS(h264_prv->ref_surface1, ENCODER_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
+  }
+  if (!h264_prv->ref_surface2) {
+    h264_prv->ref_surface2 = gst_vaapi_context_get_surface(context);
+    ENCODER_CHECK_STATUS(h264_prv->ref_surface2, ENCODER_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
+  }
+  if (!h264_prv->recon_surface) {
+    h264_prv->recon_surface = gst_vaapi_context_get_surface(context);
+    ENCODER_CHECK_STATUS(h264_prv->recon_surface, ENCODER_SURFACE_ERR, "reconstructed surface, h264_pop_free_surface failed.\n");
+  }
+
+  if (SLICE_TYPE_P == h264_prv->cur_slice_type) {
+    h264_swap_surface(&h264_prv->ref_surface1, &h264_prv->ref_surface2);
+  }
+
+  /* set sequence parameters, need set every time */
+  is_params_ok = h264_recreate_seq_param(h264_encoder, va_dpy, context_id);
+  ENCODER_CHECK_STATUS(is_params_ok, ENCODER_PARAMETER_ERR,
+                       "h264_recreate_seq_param failed");
+  /* set pic_parameters*/
+  is_params_ok = h264_recreate_pic_param(h264_encoder, va_dpy, context_id, coded_buf);
+  ENCODER_CHECK_STATUS(is_params_ok, ENCODER_PARAMETER_ERR,
+                       "h264_recreate_pic_param failed");
+  /* set slice parameters, support multiple slices */
+  is_params_ok = h264_recreate_slice_param(h264_encoder, va_dpy, context_id);
+  ENCODER_CHECK_STATUS(is_params_ok, ENCODER_PARAMETER_ERR,
+                       "h264_recreate_slice_param failed");
+
+  /* lock display */
+  ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+
+  /*render all buffers*/
+  if (VA_INVALID_ID != h264_prv->seq_parameter) {
+    va_buffers[va_buffers_count++] = h264_prv->seq_parameter;
+  }
+  if (VA_INVALID_ID != h264_prv->pic_parameter) {
+    va_buffers[va_buffers_count++] = h264_prv->pic_parameter;
+  }
+  if (VA_INVALID_ID != h264_prv->slice_parameter) {
+    va_buffers[va_buffers_count++] = h264_prv->slice_parameter;
+  }
+  if (SLICE_TYPE_I == h264_prv->cur_slice_type) {
+    if (VA_INVALID_ID != h264_prv->packed_sps_par_buf) {
+      va_buffers[va_buffers_count++] = h264_prv->packed_sps_par_buf;
+    }
+    if (VA_INVALID_ID != h264_prv->packed_sps_data_buf) {
+      va_buffers[va_buffers_count++] = h264_prv->packed_sps_data_buf;
+    }
+    if (VA_INVALID_ID != h264_prv->packed_pps_par_buf) {
+      va_buffers[va_buffers_count++] = h264_prv->packed_pps_par_buf;
+    }
+    if (VA_INVALID_ID != h264_prv->packed_pps_data_buf) {
+      va_buffers[va_buffers_count++] = h264_prv->packed_pps_data_buf;
+    }
+  }
+
+  va_status = vaRenderPicture(va_dpy, context_id, va_buffers, va_buffers_count);
+  ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR,
+                       "vaRenderH264Picture failed.\n");
+
+  /*after finished,  swap  recon and surface2*/
+  if (SLICE_TYPE_P == h264_prv->cur_slice_type ||
+      SLICE_TYPE_I == h264_prv->cur_slice_type) {
+    h264_swap_surface(&h264_prv->recon_surface, &h264_prv->ref_surface2);
+  }
+
+  end:
+  ENCODER_RELEASE_DISPLAY_LOCK(display);
+  return ret;
+}
+
+#endif
 
 static GstBuffer *
 gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
@@ -795,6 +1399,10 @@ gst_h264_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
   GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
 
   //h264_prv->frame_count = 0;
+  h264_prv->cur_display_num = 0;
+  h264_prv->cur_decode_num = 0;
+  h264_prv->cur_slice_type = SLICE_TYPE_I;
+  h264_prv->gop_count = g_queue_get_length(h264_prv->queued_buffers);
   //gst_vaapi_base_encoder_set_frame_notify((GST_VAAPI_BASE_ENCODER)encoder, TRUE);
 
   //end:
@@ -852,6 +1460,10 @@ int main_test(int argc, char* argv[])
   /*set buffers*/
   int box_width=8;
   int row_shift=0;
+
+  VAAPI_UNUSED_ARG(v_width);
+  VAAPI_UNUSED_ARG(u_width);
+  VAAPI_UNUSED_ARG(y_width);
   raw_buffer = (GstBuffer**)g_malloc0(raw_buffer_num*sizeof(GstBuffer*));
   for (i = 0; i < raw_buffer_num; i++) {
     raw_buffer[i] = gst_buffer_new_and_alloc(buffer_size);
@@ -1015,6 +1627,7 @@ h264_bitstream_write_uint(H264Bitstream *bitstream, guint32 value, guint32 bit_s
     return TRUE;
   }
 
+  VAAPI_UNUSED_ARG(ret);
   ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, bit_size), FALSE, "h264_bitstream_auto_grow failed.\n");
   byte_pos = (bitstream->bit_size>>3);
   bit_offset = (bitstream->bit_size&0x07);
@@ -1031,10 +1644,9 @@ h264_bitstream_write_uint(H264Bitstream *bitstream, guint32 value, guint32 bit_s
     bit_offset = 0;
   }
   ENCODER_ASSERT(cur_byte <= bitstream->buffer + bitstream->max_bit_capability/8);
-  return TRUE;
 
   end:
-  return FALSE;
+  return ret;
 }
 
 static gboolean h264_bitstream_align(H264Bitstream *bitstream, guint32 value)
@@ -1058,6 +1670,8 @@ h264_bitstream_write_byte_array(H264Bitstream *bitstream, const guint8 *buf, gui
   if (!byte_size) {
     return 0;
   }
+
+  VAAPI_UNUSED_ARG(ret);
   ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, byte_size<<3), FALSE, "h264_bitstream_auto_grow failed.\n");
   if (0 == (bitstream->bit_size&0x07)) {
     memcpy(&bitstream->buffer[bitstream->bit_size>>3], buf, byte_size);
@@ -1070,10 +1684,9 @@ h264_bitstream_write_byte_array(H264Bitstream *bitstream, const guint8 *buf, gui
       ++buf;
     }
   }
-  return TRUE;
 
 end:
-  return FALSE;
+  return ret;
 }
 
 static gboolean
@@ -1088,14 +1701,13 @@ h264_bitstream_write_ue(H264Bitstream *bitstream, guint32 value)
   }
   ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, 0, size_in_bits-1), FALSE, "h264_bitstream_write_ue failed.\n");
   ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, value, size_in_bits), FALSE, "h264_bitstream_write_ue failed.\n");
-  return TRUE;
 
 end:
-  return FALSE;
+  return ret;
 }
 
 static gboolean
-h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value)
+h264_bitstream_write_se(H264Bitstream *bitstream, gint32 value)
 {
   gboolean ret = TRUE;
   guint32 new_val;
@@ -1107,10 +1719,9 @@ h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value)
   }
 
   ENCODER_CHECK_STATUS(h264_bitstream_write_ue(bitstream, new_val), FALSE, "h264_bitstream_write_se failed.\n");
-  return TRUE;
 
-  end:
-  return FALSE;
+end:
+  return ret;
 }
 
 static gboolean
@@ -1154,89 +1765,166 @@ h264_bitstream_auto_grow(H264Bitstream *bitstream, guint32 extra_bit_size)
 }
 
 static gboolean
-h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
+h264_bitstream_write_nal_header(H264Bitstream *bitstream,
+                                 guint nal_ref_idc, guint nal_unit_type)
 {
-  guint32 constraint_set0_flag, constraint_set1_flag, constraint_set2_flag, constraint_set3_flag;
-  guint32 seq_parameter_set_id = 0;
+  h264_bitstream_write_uint(bitstream, 0, 1);
+  h264_bitstream_write_uint(bitstream, nal_ref_idc, 2);
+  h264_bitstream_write_uint(bitstream, nal_unit_type, 5);
+  return TRUE;
+}
 
-  /*need to set the values*/
-  guint32 log2_max_frame_num_minus4 = 0;  // 1? 3?
-  guint32 pic_order_cnt_type = 0; // Must be 0
-  guint32 log2_max_pic_order_cnt_lsb_minus4 = 0;  // 2 ? 4?
-  guint32 num_ref_frames = 1;  // only P frames
+static gboolean
+h264_bitstream_write_sps(H264Bitstream *bitstream,
+                     VAEncSequenceParameterBufferH264 *seq)
+{
+  guint32 constraint_set0_flag, constraint_set1_flag, constraint_set2_flag, constraint_set3_flag;
   guint32 gaps_in_frame_num_value_allowed_flag = 0; // ??
-  guint32 mb_width = (ENCODER_WIDTH(h264_prv->public)+15)/16; // mb_width
-  guint32 mb_height = (ENCODER_HEIGHT(h264_prv->public)+15)/16; // mb_height
-  guint32 frame_mbs_only_flag = 1; // only mbs
-  guint32 frame_cropping_flag = 0;
-  guint32 frame_crop_bottom_offset = 0;
-  guint32 vui_present_flag = 0; // no vui flags
 
+  guint32 b_qpprime_y_zero_transform_bypass = (seq->rate_control_method == BR_CQP);
+  guint32 residual_color_transform_flag = 0;
+  guint32 pic_height_in_map_units = (seq->frame_mbs_only_flag ?
+                                    seq->picture_height_in_mbs :
+                                    seq->picture_height_in_mbs/2);
+  guint32 mb_adaptive_frame_field = !seq->frame_mbs_only_flag;
+  guint32 i = 0;
 
-  constraint_set0_flag = h264_prv->public->profile == H264_PROFILE_BASELINE;
-  constraint_set1_flag = h264_prv->public->profile <= H264_PROFILE_MAIN;
+  constraint_set0_flag = seq->profile_idc == H264_PROFILE_BASELINE;
+  constraint_set1_flag = seq->profile_idc <= H264_PROFILE_MAIN;
   constraint_set2_flag = 0;
   constraint_set3_flag = 0;
 
-  if (mb_height * 16 - ENCODER_HEIGHT(h264_prv->public)) {
-    frame_cropping_flag = 1;
-    frame_crop_bottom_offset =
-        (mb_height * 16 - ENCODER_HEIGHT(h264_prv->public)) / (2 * (!frame_mbs_only_flag + 1));
-  }
-
-  h264_bitstream_write_uint(bitstream, h264_prv->public->profile, 8); /* profile_idc */
+  h264_bitstream_write_uint(bitstream, seq->profile_idc, 8);         /* profile_idc */
   h264_bitstream_write_uint(bitstream, constraint_set0_flag, 1);     /* constraint_set0_flag */
   h264_bitstream_write_uint(bitstream, constraint_set1_flag, 1);     /* constraint_set1_flag */
   h264_bitstream_write_uint(bitstream, constraint_set2_flag, 1);     /* constraint_set2_flag */
   h264_bitstream_write_uint(bitstream, constraint_set3_flag, 1);     /* constraint_set3_flag */
   h264_bitstream_write_uint(bitstream, 0, 4);                        /* reserved_zero_4bits */
-  h264_bitstream_write_uint(bitstream, h264_prv->public->level, 8);   /* level_idc */
-  h264_bitstream_write_ue(bitstream, seq_parameter_set_id);          /* seq_parameter_set_id */
+  h264_bitstream_write_uint(bitstream, seq->level_idc, 8);   /* level_idc */
+  h264_bitstream_write_ue(bitstream, seq->seq_parameter_set_id);     /* seq_parameter_set_id */
 
-  if (h264_prv->public->profile >= H264_PROFILE_HIGH) {
-      /* FIXME: fix for high profile */
-      ENCODER_ASSERT(0);
+  if (seq->profile_idc >= H264_PROFILE_HIGH) {
+    /* for high profile */
+    ENCODER_ASSERT(0);
+    h264_bitstream_write_ue(bitstream, seq->seq_fields.bits.chroma_format_idc); /* chroma_format_idc  = 1, 4:2:0*/
+    if (3 == seq->seq_fields.bits.chroma_format_idc) {
+      h264_bitstream_write_uint(bitstream, residual_color_transform_flag, 1);
+    }
+    h264_bitstream_write_ue(bitstream, seq->bit_depth_luma_minus8); /* bit_depth_luma_minus8 */
+    h264_bitstream_write_ue(bitstream, seq->bit_depth_chroma_minus8); /* bit_depth_chroma_minus8 */
+    h264_bitstream_write_uint(bitstream, b_qpprime_y_zero_transform_bypass, 1); /* b_qpprime_y_zero_transform_bypass */
+    ENCODER_ASSERT(seq->seq_fields.bits.seq_scaling_matrix_present_flag == 0);
+    h264_bitstream_write_uint(bitstream, seq->seq_fields.bits.seq_scaling_matrix_present_flag, 1); /*seq_scaling_matrix_present_flag  */
+
+    if (seq->seq_fields.bits.seq_scaling_matrix_present_flag) {
+      for (i = 0; i < (seq->seq_fields.bits.chroma_format_idc != 3 ? 8 : 12); i++) {
+        h264_bitstream_write_uint(bitstream, seq->seq_fields.bits.seq_scaling_list_present_flag, 1);
+        if (seq->seq_fields.bits.seq_scaling_list_present_flag) {
+          ENCODER_ASSERT(0);
+          /* FIXME, need write scaling list if seq_scaling_matrix_present_flag ==1*/
+        }
+      }
+    }
   }
 
-  h264_bitstream_write_ue(bitstream, log2_max_frame_num_minus4);    /* log2_max_frame_num_minus4 */
-  h264_bitstream_write_ue(bitstream, pic_order_cnt_type);           /* pic_order_cnt_type */
+  h264_bitstream_write_ue(bitstream, seq->log2_max_frame_num_minus4);    /* log2_max_frame_num_minus4 */
+  h264_bitstream_write_ue(bitstream, seq->pic_order_cnt_type);           /* pic_order_cnt_type */
 
-  if (pic_order_cnt_type == 0)
-      h264_bitstream_write_ue(bitstream, log2_max_pic_order_cnt_lsb_minus4);/* log2_max_pic_order_cnt_lsb_minus4 */
-  else {
-      ENCODER_ASSERT(0);
+  if (seq->pic_order_cnt_type == 0)
+    h264_bitstream_write_ue(bitstream, seq->log2_max_pic_order_cnt_lsb_minus4);/* log2_max_pic_order_cnt_lsb_minus4 */
+  else if (seq->pic_order_cnt_type == 1) {
+    ENCODER_ASSERT(0);
+    h264_bitstream_write_uint(bitstream, seq->seq_fields.bits.delta_pic_order_always_zero_flag, 1);
+    h264_bitstream_write_se(bitstream, seq->offset_for_non_ref_pic);
+    h264_bitstream_write_se(bitstream, seq->offset_for_top_to_bottom_field);
+    h264_bitstream_write_ue(bitstream, seq->num_ref_frames_in_pic_order_cnt_cycle);
+    for ( i = 0; i < seq->num_ref_frames_in_pic_order_cnt_cycle; i++) {
+      h264_bitstream_write_se(bitstream, seq->offset_for_ref_frame[i]);
+    }
   }
 
-  h264_bitstream_write_ue(bitstream, num_ref_frames);                            /* num_ref_frames */
+  h264_bitstream_write_ue(bitstream, seq->max_num_ref_frames);                   /* num_ref_frames */
   h264_bitstream_write_uint(bitstream, gaps_in_frame_num_value_allowed_flag, 1); /* gaps_in_frame_num_value_allowed_flag */
 
-  h264_bitstream_write_ue(bitstream, mb_width - 1);              /* pic_width_in_mbs_minus1 */
-  h264_bitstream_write_ue(bitstream, mb_height - 1);             /* pic_height_in_map_units_minus1 */
-  h264_bitstream_write_uint(bitstream, frame_mbs_only_flag, 1);  /* frame_mbs_only_flag */
+  h264_bitstream_write_ue(bitstream, seq->picture_width_in_mbs - 1);  /* pic_width_in_mbs_minus1 */
+  h264_bitstream_write_ue(bitstream, pic_height_in_map_units - 1);    /* pic_height_in_map_units_minus1 */
+  h264_bitstream_write_uint(bitstream, seq->frame_mbs_only_flag, 1);  /* frame_mbs_only_flag */
 
-  if (!frame_mbs_only_flag) { //ONLY mbs
+  if (!seq->frame_mbs_only_flag) { //ONLY mbs
       ENCODER_ASSERT(0);
+      h264_bitstream_write_uint(bitstream, mb_adaptive_frame_field, 1);
   }
 
-  h264_bitstream_write_uint(bitstream, 0, 1);                         /* direct_8x8_inference_flag */
-  h264_bitstream_write_uint(bitstream, frame_cropping_flag, 1);       /* frame_cropping_flag */
+  h264_bitstream_write_uint(bitstream, 0, 1);                           /* direct_8x8_inference_flag */
+  h264_bitstream_write_uint(bitstream, seq->frame_cropping_flag, 1);    /* frame_cropping_flag */
 
-  if (frame_cropping_flag) {
-      h264_bitstream_write_ue(bitstream, 0);                        /* frame_crop_left_offset */
-      h264_bitstream_write_ue(bitstream, 0);                        /* frame_crop_right_offset */
-      h264_bitstream_write_ue(bitstream, 0);                        /* frame_crop_top_offset */
-      h264_bitstream_write_ue(bitstream, frame_crop_bottom_offset); /* frame_crop_bottom_offset */
+  if (seq->frame_cropping_flag) {
+      h264_bitstream_write_ue(bitstream, seq->frame_crop_left_offset);  /* frame_crop_left_offset */
+      h264_bitstream_write_ue(bitstream, seq->frame_crop_right_offset); /* frame_crop_right_offset */
+      h264_bitstream_write_ue(bitstream, seq->frame_crop_top_offset);   /* frame_crop_top_offset */
+      h264_bitstream_write_ue(bitstream, seq->frame_crop_bottom_offset); /* frame_crop_bottom_offset */
   }
-
-  h264_bitstream_write_uint(bitstream, vui_present_flag, 1);                         /* vui_parameters_present_flag */
-  h264_bitstream_write_trailing_bits(bitstream);                             /* rbsp_trailing_bits */
+  ENCODER_ASSERT(seq->vui_flag == 0);
+  h264_bitstream_write_uint(bitstream, seq->vui_flag, 1);               /* vui_parameters_present_flag */
+  if (seq->vui_flag) {
+    /*FIXME, to write vui parameters*/
+  }
+  h264_bitstream_write_trailing_bits(bitstream);                        /* rbsp_trailing_bits */
   return TRUE;
+}
 
-  //end:
-  //return FALSE;
 
+static gboolean
+h264_bitstream_write_pps(H264Bitstream *bitstream,
+                        VAEncPictureParameterBufferH264 *pic)
+{
+  guint32 num_slice_groups_minus1 = 0;
+  guint32 pic_init_qs_minus26 = 0;
+  guint32 redundant_pic_cnt_present_flag = 0;
+
+  h264_bitstream_write_ue(bitstream, pic->pic_parameter_set_id); /* pic_parameter_set_id */
+  h264_bitstream_write_ue(bitstream, pic->seq_parameter_set_id); /* seq_parameter_set_id */
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.pic_order_present_flag, 1); /* pic_order_present_flag */
+  h264_bitstream_write_ue(bitstream, num_slice_groups_minus1); /*slice_groups-1*/
+
+  if (num_slice_groups_minus1 > 0) {
+    /*FIXME*/
+    ENCODER_ASSERT(0);
+  }
+  h264_bitstream_write_ue(bitstream, pic->num_ref_idx_l0_active_minus1);
+  h264_bitstream_write_ue(bitstream, pic->num_ref_idx_l1_active_minus1);
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.weighted_pred_flag, 1);
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.weighted_bipred_idc, 2);
+  h264_bitstream_write_se(bitstream, pic->pic_init_qp-26);  /* pic_init_qp_minus26 */
+  h264_bitstream_write_se(bitstream, pic_init_qs_minus26);  /* pic_init_qs_minus26 */
+  h264_bitstream_write_se(bitstream, pic->chroma_qp_index_offset); /*chroma_qp_index_offset*/
+
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.deblocking_filter_control_present_flag, 1);
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.constrained_intra_pred_flag, 1);
+  h264_bitstream_write_uint(bitstream, redundant_pic_cnt_present_flag, 1);
+
+  /*more_rbsp_data*/
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.transform_8x8_mode_flag, 1);
+  h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.pic_scaling_matrix_present_flag, 1);
+  if (pic->pic_fields.bits.pic_scaling_matrix_present_flag) {
+    ENCODER_ASSERT(0);
+    /* FIXME */
+    /*
+    for (i = 0; i <
+      (6+(-( (chroma_format_idc ! = 3) ? 2 : 6) * -pic->pic_fields.bits.transform_8x8_mode_flag));
+      i++) {
+      h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.pic_scaling_list_present_flag, 1);
+    }
+    */
+  }
+
+  h264_bitstream_write_se(bitstream, pic->second_chroma_qp_index_offset);
+  h264_bitstream_write_trailing_bits(bitstream);
+  return TRUE;
 }
 
+
 static const guint8 *
 h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size)
 {
@@ -1284,14 +1972,6 @@ h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size)
     return nal_start;
 }
 
-
-static gboolean
-h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
-{
-  ENCODER_ASSERT(0);
-  return TRUE;
-}
-
 static int draw_picture(int width, int height,
                          unsigned char *Y_start,
                          unsigned char *U_start,
index 694c81a..1739250 100644 (file)
@@ -69,6 +69,7 @@ struct _GstH264Encoder {
   guint32         init_qp;  /*default 24*/
   guint32         min_qp;   /*default 1*/
   guint32         slice_num;
+  guint32         b_frame_num;
 };
 
 struct _GstH264EncoderClass {
index 2fe1333..4c84ad6 100644 (file)
@@ -39,8 +39,6 @@ static void gst_mpeg4encode_set_property(GObject *object, guint prop_id,
     const GValue *value, GParamSpec *pspec);
 static void gst_mpeg4encode_get_property (GObject * object, guint prop_id,
     GValue * value, GParamSpec * pspec);
-static gboolean _mpeg4_check_valid_profile(guint profile);
-static gboolean _mpeg4_check_valid_level(guint level);
 
 
 /* mpeg4 encode */
@@ -84,7 +82,6 @@ static void
 gst_mpeg4encode_class_init(GstMPEG4EncodeClass *klass)
 {
   GObjectClass * const object_class = G_OBJECT_CLASS(klass);
-  GstVaapiEncodeClass * const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
 
   GST_DEBUG_CATEGORY_INIT (gst_vaapi_mpeg4_encode_debug, "vaapimpeg4encode", 0,
       "vaapimpeg4encode element");
@@ -140,7 +137,6 @@ gst_mpeg4encode_class_init(GstMPEG4EncodeClass *klass)
 static void
 gst_mpeg4encode_init(GstMPEG4Encode *mpeg4_encode, GstMPEG4EncodeClass *klass)
 {
-  GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
   GstVaapiEncode *encode = GST_VAAPI_ENCODE(mpeg4_encode);
   encode->encoder = GST_VAAPI_ENCODER(gst_mpeg4_encoder_new());
   ENCODER_ASSERT(encode->encoder);
index 6f44071..3ac9284 100644 (file)
@@ -45,7 +45,7 @@ static gboolean      gst_mpeg4_encoder_release_resource(
 static void          gst_mpeg4_notify_frame(GstVaapiBaseEncoder *encoder,
                          guint8 *buf, guint32 size);
 
-static EncoderStatus gst_mpeg4_prepare_encoding(GstVaapiBaseEncoder *encoder,
+static EncoderStatus gst_mpeg4_encoder_rendering(GstVaapiBaseEncoder *encoder,
                        GstVaapiDisplay *display, GstVaapiContext *context,
                        GstVaapiSurface *surface, guint frame_index,
                        VABufferID coded_buf, gboolean *is_key);
@@ -82,7 +82,7 @@ gst_mpeg4_encoder_class_init(GstMPEG4EncoderClass *klass)
   base_class->validate_attributes = gst_mpeg4_validate_parameters;
   base_class->pre_alloc_resource  = NULL;
   base_class->release_resource    = gst_mpeg4_encoder_release_resource;
-  base_class->prepare_frame = gst_mpeg4_prepare_encoding;
+  base_class->render_frame = gst_mpeg4_encoder_rendering;
   base_class->notify_frame = gst_mpeg4_notify_frame;
   base_class->copy_coded_frame = NULL;
 
@@ -96,7 +96,6 @@ static void
 gst_mpeg4_encoder_init(GstMPEG4Encoder *mpeg4_encoder)
 {
   GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
-  GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(mpeg4_encoder);
   ENCODER_ASSERT(mpeg4_prv);
 
   /* init public */
@@ -123,7 +122,6 @@ gst_mpeg4_encoder_finalize(GObject *object)
 {
   /*free private buffers*/
   GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
-  GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(object);
 
   if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
     gst_vaapi_encoder_uninitialize(encoder);
@@ -168,6 +166,8 @@ mpeg4_release_parameters(GstMPEG4Encoder *mpeg4_encoder, GstVaapiDisplay *displa
   VADisplay va_dpy = gst_vaapi_display_get_display(display);
   VAStatus va_status = VA_STATUS_SUCCESS;
 
+  VAAPI_UNUSED_ARG(va_status);
+
   if (VA_INVALID_ID != mpeg4_prv->seq_parameter) {
     va_status = vaDestroyBuffer(va_dpy, mpeg4_prv->seq_parameter);
     mpeg4_prv->seq_parameter = VA_INVALID_ID;
@@ -236,7 +236,7 @@ mpeg4_get_profile_level_indication(guint32 profile)
 
 
 static EncoderStatus
-gst_mpeg4_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+gst_mpeg4_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
                        GstVaapiContext *context, GstVaapiSurface *surface,
                        guint frame_index, VABufferID coded_buf, gboolean *is_key)
 {
@@ -343,8 +343,9 @@ end:
   return ret;
 }
 
+#if 0
 static GstBuffer *
-gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
+gst_mpeg4_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
             guint8 *frame, guint32 frame_size, VABufferID *coded_buf)
 
 {
@@ -352,11 +353,10 @@ gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
   GstBuffer* buffer = gst_buffer_new_and_alloc(frame_size);
   memcpy(GST_BUFFER_DATA(buffer), frame, frame_size);
 
-  #if 1
+  #if 0
   GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
-  GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
   if (mpeg4_encoder->profile == VAProfileMPEG4AdvancedSimple) {
-    guint8 *start_code = GST_BUFFER_DATA(buffer)+16;
+    guint8 *start_code = GST_BUFFER_DATA(buffer)+16; /*fix old issue of ASP in mrst platform*/
     if (start_code[0] == 0x01 && start_code[1] == 0x20
         && start_code[-1] == 0x00 && start_code[-2] == 0x00)
     {
@@ -367,6 +367,7 @@ gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
 
   return buffer;
 }
+#endif
 
 static void
 gst_mpeg4_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size)
@@ -388,7 +389,6 @@ gst_mpeg4_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
                        GstVaapiContext *context, GList **coded_pics)
 {
   GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
-  GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
 
   mpeg4_release_parameters(mpeg4_encoder, display);
   return ENCODER_NO_ERROR;