change h264encoder
authorWind Yuan <feng.yuan@intel.com>
Mon, 19 Sep 2011 01:47:36 +0000 (09:47 +0800)
committerZhong Cong <congx.zhong@intel.com>
Tue, 5 Feb 2013 07:37:10 +0000 (15:37 +0800)
gst/vaapiencode/gsth264encode.c [new file with mode: 0644]
gst/vaapiencode/gsth264encode.h [new file with mode: 0644]
gst/vaapiencode/h264encoder.c [new file with mode: 0644]
gst/vaapiencode/h264encoder.h [new file with mode: 0644]

diff --git a/gst/vaapiencode/gsth264encode.c b/gst/vaapiencode/gsth264encode.c
new file mode 100644 (file)
index 0000000..e7edf4c
--- /dev/null
@@ -0,0 +1,473 @@
+#include "gsth264encode.h"
+
+#include <string.h>
+#include <X11/Xlib.h>
+
+#include "gst/vaapi/gstvaapivideobuffer.h"
+
+
+#define PACKAGE "libgsth264encode"
+#define VERSION "0.1.0"
+
+#define GST_H264_ENCODE_CHECK_STATUS(exp, err_num, err_reason, ...)  \
+  H264_ASSERT(exp);                             \
+  if (!(exp)) {                                 \
+    ret_num = err_num;                              \
+    H264_LOG_ERROR(err_reason, ## __VA_ARGS__);                 \
+    goto finish;                                 \
+  }
+
+
+#define GST_H264ENCODE_GET_PRIVATE(obj)  (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_H264ENCODE, GstH264EncodePrivate))
+
+typedef struct _GstH264EncodePrivate GstH264EncodePrivate;
+
+static const GstElementDetails gst_h264encode_details =
+    GST_ELEMENT_DETAILS(
+        "VA-API h264 encoder",
+        "Codec/Encoder/Video",
+        "A VA-API based h264 encoder",
+        "Yuan Feng <feng.yuan@intel.com>");
+
+/* Default templates */
+#define GST_CAPS_CODEC(CODEC)                   \
+    CODEC ", "                                  \
+    "width  = (int) [ 1, MAX ], "               \
+    "height = (int) [ 1, MAX ]; "
+
+static const char gst_h264encode_sink_caps_str[] =
+    GST_CAPS_CODEC("video/x-raw-yuv, " "format = (fourcc) { I420 } ")
+    GST_CAPS_CODEC("video/x-raw-yuv, " "format = (fourcc) { NV12 } ")
+    GST_CAPS_CODEC("video/x-vaapi-surface ")
+    GST_CAPS_CODEC("video/x-raw-va")
+    ;
+
+static const char gst_h264encode_src_caps_str[] =
+    GST_CAPS_CODEC("video/x-h264");
+
+static GstStaticPadTemplate gst_h264encode_sink_factory =
+    GST_STATIC_PAD_TEMPLATE(
+        "sink",
+        GST_PAD_SINK,
+        GST_PAD_ALWAYS,
+        GST_STATIC_CAPS(gst_h264encode_sink_caps_str));
+
+static GstStaticPadTemplate gst_h264encode_src_factory =
+    GST_STATIC_PAD_TEMPLATE(
+        "src",
+        GST_PAD_SRC,
+        GST_PAD_ALWAYS,
+        GST_STATIC_CAPS(gst_h264encode_src_caps_str));
+
+GST_BOILERPLATE(
+    GstH264Encode,
+    gst_h264encode,
+    GstElement,
+    GST_TYPE_ELEMENT);
+
+enum {
+    PROP_0,
+};
+
+
+/*static extern*/
+static void gst_h264encode_finalize(GObject *object);
+static void gst_h264encode_set_property(GObject *object, guint prop_id,
+    const GValue *value, GParamSpec *pspec);
+static void gst_h264encode_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec);
+
+static gboolean gst_h264encode_set_caps(GstPad *sink_pad, GstCaps *caps);
+static GstCaps *gst_h264encode_get_caps(GstPad *sink_pad);
+static GstFlowReturn gst_h264encode_chain(GstPad *sink_pad, GstBuffer *buf);
+static GstStateChangeReturn gst_h264encode_change_state(GstElement *element, GstStateChange transition);
+static GstFlowReturn gst_h264encode_buffer_alloc(GstPad * pad, guint64 offset, guint size,
+                           GstCaps * caps, GstBuffer ** buf);
+
+static char* _h264_dump_caps(GstCaps *cpas);
+
+/*gst fix functions*/
+
+static void
+gst_h264encode_base_init(gpointer klass)
+{
+  GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+  gst_element_class_set_details(element_class, &gst_h264encode_details);
+
+  /* sink pad */
+  gst_element_class_add_pad_template(
+      element_class,
+      gst_static_pad_template_get(&gst_h264encode_sink_factory)
+  );
+
+  /* src pad */
+  gst_element_class_add_pad_template(
+      element_class,
+      gst_static_pad_template_get(&gst_h264encode_src_factory)
+  );
+}
+
+
+static void
+gst_h264encode_class_init(GstH264EncodeClass *klass)
+{
+  GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+  GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+  object_class->finalize      = gst_h264encode_finalize;
+  object_class->set_property  = gst_h264encode_set_property;
+  object_class->get_property  = gst_h264encode_get_property;
+
+  element_class->change_state = gst_h264encode_change_state;
+}
+
+static void
+gst_h264encode_finalize(GObject *object)
+{
+  GstH264Encode * const encode = GST_H264ENCODE(object);
+
+  if (encode->sinkpad_caps) {
+    gst_caps_unref(encode->sinkpad_caps);
+    encode->sinkpad_caps = NULL;
+  }
+  encode->sinkpad = NULL;
+
+  if (encode->srcpad_caps) {
+    gst_caps_unref(encode->srcpad_caps);
+    encode->srcpad_caps = NULL;
+  }
+  encode->srcpad = NULL;
+
+  if (encode->encoder) {
+      gst_h264_encoder_close(encode->encoder);
+      gst_h264_encoder_uninitialize(encode->encoder);
+      gst_h264_encoder_unref(encode->encoder);
+      encode->encoder = NULL;
+  }
+
+  if (encode->x_display) {
+    XCloseDisplay(encode->x_display);
+    encode->x_display = NULL;
+  }
+
+  G_OBJECT_CLASS(parent_class)->finalize(object);
+}
+
+static void
+gst_h264encode_init(GstH264Encode *encode, GstH264EncodeClass *klass)
+{
+  GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+  encode->sinkpad_caps       = NULL;
+  encode->srcpad_caps        = NULL;
+  encode->first_sink_frame   = TRUE;
+  encode->first_src_frame    = TRUE;
+  encode->x_display = NULL;
+
+  encode->encoder = gst_h264_encoder_new();
+  H264_ASSERT(encode->encoder);
+
+  /*sink pad */
+  encode->sinkpad = gst_pad_new_from_template(
+      gst_element_class_get_pad_template(element_class, "sink"),
+      "sink"
+  );
+  gst_pad_set_getcaps_function(encode->sinkpad, gst_h264encode_get_caps);
+  gst_pad_set_setcaps_function(encode->sinkpad, gst_h264encode_set_caps);
+  gst_pad_set_chain_function(encode->sinkpad, gst_h264encode_chain);
+  gst_pad_set_bufferalloc_function(encode->sinkpad, gst_h264encode_buffer_alloc);
+  /*gst_pad_set_event_function(encode->sinkpad, gst_h264encode_sink_event); */
+  /*gst_pad_use_fixed_caps(encode->sinkpad);*/
+  gst_element_add_pad(GST_ELEMENT(encode), encode->sinkpad);
+
+  /* src pad */
+  encode->srcpad = gst_pad_new_from_template(
+      gst_element_class_get_pad_template(element_class, "src"),
+      "src"
+  );
+  encode->srcpad_caps = NULL;
+
+  gst_pad_use_fixed_caps(encode->srcpad);
+  /*gst_pad_set_event_function(encode->srcpad, gst_h264encode_src_event);*/
+  gst_element_add_pad(GST_ELEMENT(encode), encode->srcpad);
+}
+
+
+static void
+gst_h264encode_set_property(GObject *object, guint prop_id,
+    const GValue *value, GParamSpec *pspec)
+{
+}
+
+static void
+gst_h264encode_get_property (GObject * object, guint prop_id,
+    GValue * value, GParamSpec * pspec)
+{
+}
+
+static gboolean
+gst_h264encode_set_caps(GstPad *sink_pad, GstCaps *caps)
+{
+  GstH264Encode *encode = GST_H264ENCODE(GST_OBJECT_PARENT(sink_pad));
+  encode->sinkpad_caps = caps;
+  H264_LOG_INFO("gst_h264encode_set_caps,\n%s", _h264_dump_caps(caps));
+  gst_caps_ref(caps);
+  return TRUE;
+}
+
+static GstCaps *
+gst_h264encode_get_caps(GstPad *sink_pad)
+{
+  GstCaps *caps = NULL;
+  GstH264Encode * const encode = GST_H264ENCODE(GST_OBJECT_PARENT(sink_pad));
+  if (encode->sinkpad_caps) {
+    gst_caps_ref(encode->sinkpad_caps);
+    H264_LOG_INFO("get caps,\n%s", _h264_dump_caps(encode->sinkpad_caps));
+    return encode->sinkpad_caps;
+  }
+  caps = gst_caps_copy(gst_pad_get_pad_template_caps(sink_pad));
+  return caps;
+}
+
+static GstStateChangeReturn
+gst_h264encode_change_state(GstElement *element, GstStateChange transition)
+{
+  GstH264Encode * const encode = GST_H264ENCODE(element);
+  GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+  switch (transition) {
+  case GST_STATE_CHANGE_READY_TO_PAUSED:
+    break;
+  case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+    break;
+  default:
+    break;
+  }
+
+  ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
+  if (ret != GST_STATE_CHANGE_SUCCESS)
+    return ret;
+
+  switch (transition) {
+  case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+    break;
+  case GST_STATE_CHANGE_PAUSED_TO_READY: {
+    gst_h264_encoder_close(encode->encoder);
+  }
+    break;
+  default:
+    break;
+  }
+  return GST_STATE_CHANGE_SUCCESS;
+}
+
+
+static GstFlowReturn
+gst_h264encode_chain(GstPad *sink_pad, GstBuffer *buf)
+{
+  GstFlowReturn ret_num = GST_FLOW_OK;
+  GstH264Encode *encode = GST_H264ENCODE(GST_OBJECT_PARENT(sink_pad));
+  H264Status h264ret = H264_NO_ERROR;
+  GList *out_buffers = NULL;
+  GstBuffer *tmp_buffer = NULL;
+
+  static guint input_count = 0;
+  static guint output_count = 0;
+
+  H264_ASSERT(encode && encode->encoder);
+  if (encode->first_sink_frame) {
+    /* get first buffer caps and set encoder values */
+    GstStructure *recv_struct, *src_struct;
+    GstCaps *recv_caps = GST_BUFFER_CAPS(buf);
+    gint width, height;
+    GValue const *framerate, *format_value;
+    gint fps_n, fps_d;
+    guint32 foramt;
+
+    H264_LOG_INFO("gst_h264encode_chain 1st recv-buffer caps,\n%s", _h264_dump_caps(recv_caps));
+
+    recv_struct = gst_caps_get_structure (recv_caps, 0);
+    GST_H264_ENCODE_CHECK_STATUS(NULL != recv_caps, GST_FLOW_ERROR, "gst_h264encode_chain, 1st buffer didn't have detailed caps.\n");
+    gst_structure_get_int (recv_struct, "width", &width);
+    gst_structure_get_int (recv_struct, "height", &height);
+    framerate = gst_structure_get_value (recv_struct, "framerate");
+    fps_n = gst_value_get_fraction_numerator (framerate);
+    fps_d = gst_value_get_fraction_denominator (framerate);
+    format_value = gst_structure_get_value (recv_struct, "format");
+    GST_H264_ENCODE_CHECK_STATUS(format_value && GST_TYPE_FOURCC == G_VALUE_TYPE(format_value),
+                                 GST_FLOW_ERROR, "1st buffer caps' format type is not fourcc.\n");
+    foramt = gst_value_get_fourcc (format_value);
+
+    encode->encoder->profile = 66;
+    encode->encoder->level = 30;
+    encode->encoder->width = width;
+    encode->encoder->height = height;
+    encode->encoder->frame_rate = fps_n/fps_d;
+    encode->encoder->bitrate = 3*1000*1000; // 3M
+    encode->encoder->intra_period = 30;
+
+    /*set src pad caps*/
+    if (encode->srcpad_caps) {
+      gst_caps_unref(encode->srcpad_caps);
+    }
+    encode->srcpad_caps = gst_caps_copy(gst_pad_get_pad_template_caps(encode->srcpad));
+    src_struct = gst_caps_get_structure(encode->srcpad_caps, 0);
+    gst_structure_set(src_struct, "width", G_TYPE_INT, width,
+                      "height", G_TYPE_INT, height,
+                      "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
+
+    /*set display and initialize encoder*/
+    gst_h264_encoder_set_input_format(encode->encoder, foramt);
+    if (GST_VAAPI_IS_VIDEO_BUFFER(buf)) {
+      GstVaapiDisplay *display = NULL;
+      GstVaapiVideoBuffer *video_buffer = GST_VAAPI_VIDEO_BUFFER(buf);
+      H264_ASSERT(video_buffer);
+      display = gst_vaapi_video_buffer_get_display(video_buffer);
+      if (display) {
+        GST_H264_ENCODE_CHECK_STATUS(gst_h264_encoder_set_display(encode->encoder,display)
+                                    , GST_FLOW_ERROR, "set display failed in gst_h264encode_chain.\n");
+      }
+    }
+    h264ret = gst_h264_encoder_initialize(encode->encoder);
+    GST_H264_ENCODE_CHECK_STATUS (H264_NO_ERROR == h264ret, GST_FLOW_ERROR, "h264_encoder_initialize failed.\n");
+
+    h264ret = gst_h264_encoder_open(encode->encoder);
+    GST_H264_ENCODE_CHECK_STATUS (H264_NO_ERROR == h264ret, GST_FLOW_ERROR, "gst_h264_encoder_open failed.\n");
+
+    encode->first_sink_frame = FALSE;
+  }
+
+  /*encoding frames*/
+  H264_ASSERT(gst_h264_encoder_get_state(encode->encoder) >= H264_ENC_OPENED);
+  ++input_count;
+  H264_LOG_INFO("input %d\n", input_count);
+  h264ret = gst_h264_encoder_encode(encode->encoder, buf, &out_buffers);
+  GST_H264_ENCODE_CHECK_STATUS (H264_NO_ERROR == h264ret, GST_FLOW_ERROR, "h264_encoder_encode failed.\n");
+
+  /*check results*/
+  while (out_buffers) {
+    tmp_buffer = out_buffers->data;
+    out_buffers = g_list_remove(out_buffers, tmp_buffer);
+    /*out_buffers = g_list_next(out_buffers);*/
+    if (encode->first_src_frame) {
+      GstBuffer *codec_data;
+      H264_ASSERT(encode->srcpad_caps);
+      /*replace codec data in src pad caps*/
+      if (H264_NO_ERROR == gst_h264_encoder_get_avcC_codec_data(encode->encoder, &codec_data)) {
+        gst_caps_set_simple(encode->srcpad_caps, "codec_data",GST_TYPE_BUFFER, codec_data, NULL);
+      }
+      gst_pad_set_caps (encode->srcpad, encode->srcpad_caps);
+      GST_BUFFER_CAPS(tmp_buffer) = gst_caps_ref(encode->srcpad_caps);
+      H264_LOG_INFO("gst_h264encode_chain 1st push-buffer caps,\n%s", _h264_dump_caps(encode->srcpad_caps));
+      encode->first_src_frame = FALSE;
+    }
+    ++output_count;
+    H264_LOG_INFO("output:%d, %" GST_TIME_FORMAT ", 0x%s\n",
+                   output_count,
+                   GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(tmp_buffer)),
+                   h264_dump_bytes(GST_BUFFER_DATA(tmp_buffer),
+                                  (GST_BUFFER_SIZE(tmp_buffer) > 16? 16: GST_BUFFER_SIZE(tmp_buffer))));
+    gst_pad_push(encode->srcpad, tmp_buffer);
+  }
+
+finish:
+  gst_mini_object_unref(GST_MINI_OBJECT(buf));
+  return ret_num;
+
+}
+
+static GstFlowReturn
+gst_h264encode_buffer_alloc(GstPad * pad, guint64 offset, guint size,
+                           GstCaps * caps, GstBuffer ** buf)
+{
+  GstH264Encode * const encode = GST_H264ENCODE(GST_OBJECT_PARENT(pad));
+  GstStructure *structure;
+  GstBuffer *buffer;
+  GstVaapiDisplay* display = NULL;
+  GstFlowReturn ret_num = GST_FLOW_ERROR;
+
+  structure = gst_caps_get_structure(caps, 0);
+  if (gst_structure_has_name(structure, "video/x-vaapi-surface")) {
+    H264_ASSERT(encode->encoder);
+    display = gst_h264_encoder_get_display(encode->encoder);
+    if (!display) {
+      gst_h264_encoder_initialize(encode->encoder);
+      display = gst_h264_encoder_get_display(encode->encoder);
+      GST_H264_ENCODE_CHECK_STATUS(display, GST_FLOW_ERROR, "gst_h264_encoder_get_display failed in gst_h264encode_buffer_alloc.\n");
+    }
+    buffer = gst_vaapi_video_buffer_new(display);
+  } else { /* video/x-raw-yuv */
+    buffer = gst_buffer_new_and_alloc(size);
+  }
+
+  GST_H264_ENCODE_CHECK_STATUS(buffer, GST_FLOW_ERROR, "gst_h264encode_buffer_alloc failed.\n");
+
+  GST_BUFFER_OFFSET (buffer) = offset;
+  gst_buffer_set_caps(buffer, caps);
+  *buf = buffer;
+  ret_num = GST_FLOW_OK;
+
+finish:
+  if (display) {
+    g_object_unref(display);
+  }
+  return ret_num;
+}
+
+
+
+static char*
+_h264_dump_caps(GstCaps *cpas)
+{
+  guint i = 0, j = 0;
+  GstStructure const *structure;
+  GValue const *value;
+  static char caps_string[4096*5];
+  char *tmp;
+
+  char *cur = caps_string;
+  memset(caps_string, 0, sizeof(caps_string));
+  for (i = 0; i < gst_caps_get_size(cpas); i++) {
+    structure = gst_caps_get_structure(cpas, i);
+    const char* caps_name = gst_structure_get_name (structure);
+    sprintf(cur, "cap_%02d:%s\n", i, caps_name);
+    cur += strlen(cur);
+
+    for (j = 0; j < gst_structure_n_fields(structure); j++) {
+      const char* name = gst_structure_nth_field_name(structure, j);
+      value = gst_structure_get_value(structure, name);
+      tmp = gst_value_serialize(value);
+      sprintf(cur, "\t%s:%s(%s)\n", name, tmp, G_VALUE_TYPE_NAME(value));
+      cur += strlen(cur);
+      g_free(tmp);
+    }
+  }
+
+  return caps_string;
+}
+
+
+
+
+/* plugin register*/
+static gboolean
+h264encode_init (GstPlugin * plugin)
+{
+  return gst_element_register (plugin, "vah264encode", GST_RANK_PRIMARY,
+      GST_TYPE_H264ENCODE);
+}
+
+/* gstreamer looks for this structure to register mrstcamsrc */
+GST_PLUGIN_DEFINE (
+    GST_VERSION_MAJOR,
+    GST_VERSION_MINOR,
+    "vaapiencode",
+    "Vaapi Encoder",
+    h264encode_init,
+    VERSION,
+    "LGPL",
+    "GStreamer",
+    "http://gstreamer.net/")
+
+
diff --git a/gst/vaapiencode/gsth264encode.h b/gst/vaapiencode/gsth264encode.h
new file mode 100644 (file)
index 0000000..67329ee
--- /dev/null
@@ -0,0 +1,65 @@
+/*
+ *  gstvaapidecode.h - VA-API video decoder
+ *
+ *  gstreamer-vaapi (C) 2010-2011 Splitted-Desktop Systems
+ *  Copyright (C) 2011 Intel Corporation
+ *
+ *  This program is free software; you can redistribute it and/or modify
+ *  it under the terms of the GNU General Public License as published by
+ *  the Free Software Foundation; either version 2 of the License, or
+ *  (at your option) any later version.
+ *
+ *  This program is distributed in the hope that it will be useful,
+ *  but WITHOUT ANY WARRANTY; without even the implied warranty of
+ *  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ *  GNU General Public License for more details.
+ *
+ *  You should have received a copy of the GNU General Public License
+ *  along with this program; if not, write to the Free Software
+ *  Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA
+ */
+
+#ifndef GST_H264ENCODE_H
+#define GST_H264ENCODE_H
+
+#include <gst/gst.h>
+#include "h264encoder.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_H264ENCODE             (gst_h264encode_get_type())
+#define GST_IS_H264ENCODE(obj)          (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264ENCODE))
+#define GST_IS_H264ENCODE_CLASS(klass)  (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264ENCODE))
+#define GST_H264ENCODE_GET_CLASS(obj)   (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264ENCODE, GstH264EncodeClass))
+#define GST_H264ENCODE(obj)             (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264ENCODE, GstH264Encode))
+#define GST_H264ENCODE_CLASS(klass)     (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264ENCODE, GstH264EncodeClass))
+
+
+typedef struct _GstH264Encode        GstH264Encode;
+typedef struct _GstH264EncodeClass   GstH264EncodeClass;
+
+struct _GstH264Encode {
+    GstElement          parent_instance;
+
+    GstPad             *sinkpad;
+    GstCaps            *sinkpad_caps;
+
+    GstPad             *srcpad;
+    GstCaps            *srcpad_caps;
+
+    GstH264Encoder     *encoder;
+    gboolean            first_sink_frame;
+    gboolean            first_src_frame;
+    void               *x_display;
+};
+
+struct _GstH264EncodeClass {
+    GstElementClass     parent_class;
+};
+
+GType gst_h264encode_get_type(void);
+
+G_END_DECLS
+
+#endif /* GST_H264ENCODE_H */
+
diff --git a/gst/vaapiencode/h264encoder.c b/gst/vaapiencode/h264encoder.c
new file mode 100644 (file)
index 0000000..c9d088f
--- /dev/null
@@ -0,0 +1,1736 @@
+
+#include "h264encoder.h"
+
+#include <string.h>
+#include <stdlib.h>
+#include <va/va.h>
+#include "va/va_x11.h"
+#include <X11/Xlib.h>
+#include <glib.h>
+
+#include "gst/gstclock.h"
+#include "gst/gstvalue.h"
+
+#include "gst/vaapi/gstvaapidisplay_x11.h"
+#include "gst/vaapi/gstvaapiobject.h"
+#include "gst/vaapi/gstvaapiobject_priv.h"
+#include "gst/vaapi/gstvaapicontext.h"
+#include "gst/vaapi/gstvaapisurface.h"
+#include "gst/vaapi/gstvaapivideobuffer.h"
+
+
+
+#define ENCPRV(encoder) GST_H264_ENCODER_GET_PRIVATE(encoder)
+
+#define H264_CHECK_STATUS(exp, err_num, err_reason, ...)  \
+  H264_ASSERT(exp);                             \
+  if (!(exp)) {                                 \
+    ret = err_num;                              \
+    H264_LOG_ERROR(err_reason, ## __VA_ARGS__);                 \
+    goto error;                                 \
+  }
+
+#define SHARE_CODED_BUF         0
+
+#define DEFAULT_SURFACE_NUMBER  3
+#define DEFAULT_CODEDBUF_NUM    5
+#define DEFAULT_SID_INPUT       0 // suface_ids[0]
+
+#define REF_RECON_SURFACE_NUM   2
+
+typedef struct _GstH264EncodeBuffer  GstH264EncodeBuffer;
+
+#define GST_TYPE_H264_ENCODE_BUFFER (gst_h264_encode_buffer_get_type())
+
+typedef enum {
+  NAL_UNKNOWN     = 0,
+  NAL_NON_IDR     = 1,
+  NAL_IDR         = 5,    /* ref_idc != 0 */
+  NAL_SEI         = 6,    /* ref_idc == 0 */
+  NAL_SPS         = 7,
+  NAL_PPS         = 8,
+  NAL_AUD         = 9,
+  NAL_FILLER      = 12,
+}H264_NAL_TYPE;
+
+struct _GstH264EncodeBuffer {
+  GstBuffer           buffer;
+  VABufferID         *coded_id;
+  GstH264EncoderPrivate *encoder;
+};
+
+struct _GstH264EncoderPrivate {
+  GstH264Encoder   *public;
+  uint32_t          format;   /*NV12, I420,*/
+  gboolean          es_flag;  /*elementary flag*/
+
+  /* private data*/
+  GstVaapiDisplay  *vaapi_display;
+  GstVaapiContext  *vaapi_context;
+  GQueue           *video_buffer_caches; /*not used for baseline*/
+
+  GstVaapiSurface  *ref_surface;  /* reference buffer*/
+  GstVaapiSurface  *recon_surface; /* reconstruct buffer*/
+
+  H264_Encode_State encode_state;
+
+  VABufferID        seq_parameter;
+  VABufferID        pic_parameter;
+  VABufferID        slice_parameter;
+
+  VABufferID       *coded_bufs;
+  uint32_t          coded_buf_num;
+  uint32_t          cur_coded_index;
+
+  /*total encoded frames*/
+  uint32_t          frame_count;
+
+  GstBuffer        *sps_data;
+  GstBuffer        *pps_data;
+
+  GMutex           *code_buffer_lock;
+  GCond            *code_buffer_cond;
+  GQueue           *available_code_buffers;
+
+};
+
+G_DEFINE_TYPE(GstH264Encoder, gst_h264_encoder, G_TYPE_OBJECT);
+
+
+// 4096-1
+#define H264_BITSTREAM_ALLOC_ALIGN_MASK 0x0FFF
+
+#define BIT_STREAM_BUFFER(stream)    ((stream)->buffer)
+#define BIT_STREAM_BIT_SIZE(stream)  ((stream)->bit_size)
+
+struct _H264Bitstream {
+  uint8_t  *buffer;
+  uint32_t  bit_size;
+  uint32_t  max_bit_capability;
+};
+
+typedef struct _H264Bitstream H264Bitstream;
+
+static const uint8_t h264_bit_mask[9] = {0x00, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF};
+
+static GstBufferClass *h264_encode_buffer_parent_class = NULL;
+
+static void gst_h264_encoder_finalize(GObject *object);
+
+static VAProfile h264_get_va_profile(uint32_t profile);
+static H264Status h264_encoder_alloc_buffers(GstH264EncoderPrivate *h264_prv);
+static H264Status h264_encoder_release_buffers(GstH264EncoderPrivate *h264_prv);
+static H264Status h264_put_raw_buffer_to_surface(GstH264EncoderPrivate *h264_prv,
+                                             GstBuffer *raw_pic,
+                                             VASurfaceID surface_id);
+
+static H264Status h264_prepare_encoding(GstH264EncoderPrivate *h264_prv,
+                                             GstBuffer *raw_pic, gboolean is_key,
+                                             VABufferID coded_buf);
+static H264Status h264_query_encoding_status(GstH264EncoderPrivate *h264_prv,
+                                         VASurfaceID surface_id,
+                                         gboolean is_key,
+                                         GstClockTime timestamp,
+                                         GstClockTime duration,
+                                         VABufferID *coded_buf,
+                                         GList **coded_pics);
+static H264Status
+h264_encoder_read_sps_pps(GstH264EncoderPrivate *h264_prv, const uint8_t *buf, uint32_t size);
+static GstBuffer *h264_encoder_create_coded_buffer(GstH264EncoderPrivate *h264_prv,
+                                                   uint8_t *frame,
+                                                   uint32_t frame_size,
+                                                   VABufferID *coded_buf);
+
+
+/*encoded buffer, for SHARE_CODED_BUF */
+static void gst_h264_encode_buffer_class_init (gpointer g_class, gpointer class_data);
+static GType gst_h264_encode_buffer_get_type (void);
+static void gst_h264_encode_buffer_finalize (GstH264EncodeBuffer *h264_buffer);
+static GstH264EncodeBuffer *gst_h264_encode_buffer_new(GstH264EncoderPrivate *h264_prv,
+                                                       VABufferID *coded_id);
+
+/* h264 bitstream functions */
+static void h264_bitstream_init(H264Bitstream *bitstream, uint32_t bit_capability);
+static gboolean h264_bitstream_write_uint(H264Bitstream *bitstream, uint32_t value, uint32_t bit_size);
+static gboolean h264_bitstream_align(H264Bitstream *bitstream, uint32_t value);
+static gboolean h264_bitstream_write_ue(H264Bitstream *bitstream, uint32_t value);
+static gboolean h264_bitstream_write_se(H264Bitstream *bitstream, int32_t value);
+static gboolean h264_bitstream_write_trailing_bits(H264Bitstream *bitstream);
+
+static gboolean h264_bitstream_write_byte_array(H264Bitstream *bitstream, const uint8_t *buf, uint32_t byte_size);
+static void h264_bitstream_destroy(H264Bitstream *bitstream, gboolean free_flag);
+static gboolean h264_bitstream_auto_grow(H264Bitstream *bitstream, uint32_t extra_bit_size);
+static gboolean h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
+static gboolean h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
+static const uint8_t *h264_next_nal(const uint8_t *buffer, uint32_t len, uint32_t *nal_size);
+static gboolean h264_read_sps_attributes(const uint8_t *sps_data, uint32_t sps_size,
+                                uint32_t *profile_idc, uint32_t *profile_comp, uint32_t *level_idc);
+
+static void
+gst_h264_encoder_class_init(GstH264EncoderClass *klass)
+{
+  GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+  g_type_class_add_private(klass, sizeof(GstH264EncoderPrivate));
+
+  object_class->finalize = gst_h264_encoder_finalize;
+  /*
+  object_class->set_property = gst_h264_encoder_set_property;
+  object_class->get_property = gst_h264_encoder_get_property;
+  */
+}
+
+
+static void
+gst_h264_encode_buffer_class_init (gpointer g_class, gpointer class_data)
+{
+  GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS(g_class);
+
+  h264_encode_buffer_parent_class = g_type_class_peek_parent(g_class);
+  H264_ASSERT(h264_encode_buffer_parent_class);
+
+  mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+      gst_h264_encode_buffer_finalize;
+}
+
+
+static GType
+gst_h264_encode_buffer_get_type (void)
+{
+  static GType s_h264_encode_buffer_type = 0;
+  if (G_UNLIKELY (s_h264_encode_buffer_type == 0)) {
+    static const GTypeInfo s_h264_encode_buffer_info = {
+      sizeof(GstBufferClass),
+      NULL,
+      NULL,
+      gst_h264_encode_buffer_class_init,
+      NULL,
+      NULL,
+      sizeof(GstH264EncodeBuffer),
+      0,
+      NULL,
+      NULL
+    };
+    s_h264_encode_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
+        "GstH264EncodeBuffer", &s_h264_encode_buffer_info, 0);
+  }
+  return s_h264_encode_buffer_type;
+}
+
+static void
+gst_h264_encode_buffer_finalize (GstH264EncodeBuffer *h264_buffer)
+{
+  GstH264EncoderPrivate *h264_prv = NULL;
+  VABufferID* coded_id = NULL;
+
+  h264_prv = h264_buffer->encoder;
+  coded_id = h264_buffer->coded_id;
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(h264_prv->vaapi_context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+
+  H264_ASSERT(h264_prv);
+  H264_ASSERT(coded_id && VA_INVALID_ID!= *coded_id);
+  H264_ASSERT(h264_prv->available_code_buffers);
+
+  g_mutex_lock(h264_prv->code_buffer_lock);
+  /*if (--(*h264_buffer->ref_coded_id) == 0) */
+  {
+    /*g_free(h264_buffer->ref_coded_id);*/
+    vaUnmapBuffer(va_dpy, *coded_id);
+    g_queue_push_head(h264_prv->available_code_buffers, coded_id);
+    g_cond_signal(h264_prv->code_buffer_cond);
+  }
+
+  g_mutex_unlock(h264_prv->code_buffer_lock);
+
+  if (GST_MINI_OBJECT_CLASS(h264_encode_buffer_parent_class)->finalize) {
+    GST_MINI_OBJECT_CLASS(h264_encode_buffer_parent_class)->finalize(GST_MINI_OBJECT(h264_buffer));
+  }
+}
+
+static GstH264EncodeBuffer *
+gst_h264_encode_buffer_new(GstH264EncoderPrivate *h264_prv,
+                           VABufferID *coded_id)
+{
+  GstH264EncodeBuffer *buf = (GstH264EncodeBuffer*)gst_mini_object_new(GST_TYPE_H264_ENCODE_BUFFER);
+  buf->coded_id = coded_id;
+  buf->encoder = h264_prv;
+  return buf;
+}
+
+
+static VASurfaceID
+h264_get_video_surface(GstH264EncoderPrivate *h264_prv, GstVaapiVideoBuffer *video_buffer)
+{
+  //ref_surface
+  VASurfaceID ret = VA_INVALID_SURFACE;
+  GstVaapiSurface *surface = gst_vaapi_video_buffer_get_surface(video_buffer);
+
+  H264_CHECK_STATUS(surface, VA_INVALID_SURFACE, "video buffer doesn't have a surface");
+  ret = (VASurfaceID)GST_VAAPI_OBJECT_ID(surface);
+
+#if 0
+  g_queue_push_tail(h264_prv->video_buffer_caches,video_buffer);
+  gst_buffer_ref(GST_BUFFER(video_buffer));
+#endif
+
+  return ret;
+
+  error:
+  return ret;
+}
+
+static void
+h264_release_video_surface(GstH264EncoderPrivate *h264_prv, VASurfaceID surface)
+{
+#if 0
+  H264_ASSERT(h264_prv->video_buffer_caches);
+  g_queue_find_custom(h264_prv->video_buffer_caches,xx, compare_func);
+  for (h264_prv->video_buffer_caches) {
+  }
+#endif
+}
+
+static GstVaapiSurface *
+h264_pop_free_surface(GstH264EncoderPrivate *h264_prv)
+{
+  H264_ASSERT(h264_prv && h264_prv->vaapi_context);
+
+  /*may need lock*/
+  GstVaapiSurface *surface = gst_vaapi_context_get_surface(h264_prv->vaapi_context);
+  return surface;
+}
+
+static void
+h264_push_free_surface(GstH264EncoderPrivate *h264_prv, GstVaapiSurface *surface)
+{
+  H264_ASSERT(surface);
+  if (!surface) {
+    return;
+  }
+  gst_vaapi_context_put_surface(h264_prv->vaapi_context, surface);
+}
+
+
+H264_Encode_State gst_h264_encoder_get_state(GstH264Encoder* encoder)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  return h264_prv->encode_state;
+}
+
+
+static VAProfile
+h264_get_va_profile(uint32_t profile)
+{
+  return VAProfileH264Baseline;
+}
+
+GstH264Encoder *
+gst_h264_encoder_new(void)
+{
+  return GST_H264_ENCODER(g_object_new(GST_TYPE_H264_ENCODER, NULL));
+}
+
+
+static void
+gst_h264_encoder_init(GstH264Encoder *encoder)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  H264_ASSERT(h264_prv);
+
+  /* init public attributes */
+  gst_h264_encoder_set_default_values(encoder);
+
+  /* init private values*/
+  h264_prv->public = encoder;
+  h264_prv->format = GST_MAKE_FOURCC('N','V','1','2');
+  h264_prv->es_flag = TRUE;
+
+  h264_prv->vaapi_display = NULL;
+  h264_prv->vaapi_context= NULL;
+  h264_prv->ref_surface = NULL;
+  h264_prv->recon_surface = NULL;
+  h264_prv->video_buffer_caches = g_queue_new();
+
+  h264_prv->encode_state = H264_ENC_NULL;
+  h264_prv->seq_parameter = VA_INVALID_ID;
+  h264_prv->pic_parameter = VA_INVALID_ID;
+  h264_prv->slice_parameter = VA_INVALID_ID;
+
+  h264_prv->coded_bufs = NULL;
+  h264_prv->coded_buf_num = DEFAULT_CODEDBUF_NUM;
+  h264_prv->frame_count = 0;
+  h264_prv->sps_data = NULL;
+  h264_prv->pps_data = NULL;
+  /*index init*/
+  h264_prv->cur_coded_index = 0;
+  /*init others*/
+  h264_prv->code_buffer_lock = g_mutex_new();
+  h264_prv->code_buffer_cond = g_cond_new();
+  h264_prv->available_code_buffers = g_queue_new();
+
+}
+
+static void
+gst_h264_encoder_finalize(GObject *object)
+{
+  /*free private buffers*/
+  GstH264EncoderPrivate *h264_prv = ENCPRV(object);
+
+  g_mutex_free(h264_prv->code_buffer_lock);
+  g_cond_free(h264_prv->code_buffer_cond);
+  if (h264_prv->available_code_buffers) {
+    g_queue_free(h264_prv->available_code_buffers);
+    h264_prv->available_code_buffers = NULL;
+  }
+
+  if (h264_prv->ref_surface) {
+    if (h264_prv->vaapi_context) {
+      h264_push_free_surface(h264_prv, h264_prv->ref_surface);
+    } else {
+      g_object_unref(h264_prv->ref_surface);
+    }
+    h264_prv->ref_surface = NULL;
+  }
+  if (h264_prv->recon_surface) {
+    if (h264_prv->vaapi_context) {
+      h264_push_free_surface(h264_prv, h264_prv->recon_surface);
+    } else {
+      g_object_unref(h264_prv->recon_surface);
+    }
+    h264_prv->recon_surface = NULL;
+  }
+  if (h264_prv->vaapi_context) {
+    g_object_unref(h264_prv->vaapi_context);
+    h264_prv->vaapi_context= NULL;
+  }
+  if (h264_prv->vaapi_display) {
+    g_object_unref(h264_prv->vaapi_display);
+    h264_prv->vaapi_display = NULL;
+  }
+  if (h264_prv->video_buffer_caches) {
+    g_queue_free(h264_prv->video_buffer_caches);
+    h264_prv->video_buffer_caches = NULL;
+  };
+  if (h264_prv->sps_data) {
+    gst_buffer_unref(h264_prv->sps_data);
+    h264_prv->sps_data = NULL;
+  }
+  if (h264_prv->pps_data) {
+    gst_buffer_unref(h264_prv->pps_data);
+    h264_prv->pps_data = NULL;
+  }
+}
+
+
+void
+gst_h264_encoder_set_default_values(GstH264Encoder* encoder)
+{
+  encoder->profile = 66;
+  encoder->level = 30;
+  encoder->width = 1280;
+  encoder->height = 720;
+  encoder->frame_rate = 30;
+  encoder->bitrate = 3*1000*1000; // 3M
+  encoder->intra_period = 30;
+  encoder->init_qp = 30;
+}
+
+void
+gst_h264_encoder_set_input_format(GstH264Encoder* encoder, uint32_t format)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  h264_prv->format = format;
+}
+
+void
+gst_h264_encoder_set_es_flag(GstH264Encoder* encoder, gboolean es)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  h264_prv->es_flag = es;
+}
+
+gboolean
+gst_h264_encoder_set_display(GstH264Encoder* encoder, GstVaapiDisplay *display)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  if (display == h264_prv->vaapi_display) {
+    return TRUE;
+  }
+
+  if (H264_ENC_INIT < h264_prv->encode_state) {
+    return FALSE;
+  }
+  if (h264_prv->vaapi_display) {
+    g_object_unref(h264_prv->vaapi_display);
+    h264_prv->vaapi_display = NULL;
+  }
+  h264_prv->vaapi_display = g_object_ref(display);
+  return TRUE;
+}
+
+GstVaapiDisplay *
+gst_h264_encoder_get_display(GstH264Encoder* encoder)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  return g_object_ref(h264_prv->vaapi_display);
+}
+
+
+H264Status
+gst_h264_encoder_initialize(GstH264Encoder* encoder)
+{
+  H264Status ret = H264_NO_ERROR;
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  int major_ver, minor_ver;
+
+  /* check state */
+  if (H264_ENC_INIT == h264_prv->encode_state) {
+    return H264_NO_ERROR;
+  }
+  H264_ASSERT(H264_ENC_NULL == h264_prv->encode_state);
+  if (H264_ENC_NULL != h264_prv->encode_state) {
+    return H264_STATE_ERR;
+  }
+
+/* create va_dpy*/
+  if (!h264_prv->vaapi_display) {
+    h264_prv->vaapi_display = gst_vaapi_display_x11_new(NULL);
+    H264_CHECK_STATUS(h264_prv->vaapi_display, H264_DISPLAY_ERR, "gst_vaapi_display_x11_new failed.\n");
+  }
+
+  h264_prv->encode_state = H264_ENC_INIT;
+  return H264_NO_ERROR;
+
+  error:
+  gst_h264_encoder_uninitialize(encoder);
+  return ret;
+}
+
+H264Status
+gst_h264_encoder_uninitialize(GstH264Encoder* encoder)
+{
+  H264Status ret = H264_NO_ERROR;
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+
+  /* release buffers first */
+  if (H264_ENC_NULL == h264_prv->encode_state) {
+    return H264_NO_ERROR;
+  }
+
+  if (H264_ENC_INIT < h264_prv->encode_state) {
+    gst_h264_encoder_close(encoder);
+  }
+
+  H264_ASSERT(H264_ENC_INIT == h264_prv->encode_state);
+
+  /* close va_dpy */
+  if (h264_prv->vaapi_display) {
+    g_object_unref(h264_prv->vaapi_display);
+    h264_prv->vaapi_display = NULL;
+  }
+
+  h264_prv->encode_state = H264_ENC_NULL;
+  return H264_NO_ERROR;
+
+  //error:
+  return ret;
+}
+
+
+
+H264Status
+gst_h264_encoder_open(GstH264Encoder* encoder)
+{
+  H264Status ret = H264_NO_ERROR;
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  VAProfile va_profile = h264_get_va_profile(encoder->profile);
+  VAEntrypoint entrypoints[5];
+  int num_entrypoints,slice_entrypoint;
+  VAConfigAttrib attrib[5];
+  VAStatus va_status = VA_STATUS_SUCCESS;
+
+  /* check state */
+  if (H264_ENC_OPENED == h264_prv->encode_state) {
+    return H264_NO_ERROR;
+  }
+  H264_ASSERT(H264_ENC_INIT == h264_prv->encode_state);
+  if (H264_ENC_INIT != h264_prv->encode_state) {
+    return H264_STATE_ERR;
+  }
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(!h264_prv->vaapi_context);
+  h264_prv->vaapi_context = gst_vaapi_context_new(h264_prv->vaapi_display,
+                        gst_vaapi_profile(va_profile),
+                        gst_vaapi_entrypoint(VAEntrypointEncSlice),
+                        h264_prv->public->width,
+                        h264_prv->public->height);
+  H264_CHECK_STATUS(h264_prv->vaapi_context, H264_CONTEXT_ERR, "gst_vaapi_context_new failed.\n");
+  ret = h264_encoder_alloc_buffers(h264_prv);
+  if (H264_NO_ERROR != ret) {
+    goto error;
+  }
+
+  h264_prv->encode_state = H264_ENC_OPENED;
+  return H264_NO_ERROR;
+
+  error:
+  // clear resources
+  gst_h264_encoder_close(encoder);
+  return ret;
+
+}
+
+H264Status
+gst_h264_encoder_close(GstH264Encoder* encoder)
+{
+  H264Status ret = H264_NO_ERROR;
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+
+  if (H264_ENC_INIT >= h264_prv->encode_state) {
+    return H264_NO_ERROR;
+  }
+
+  /* release buffers first */
+  if (H264_ENC_OPENED <= h264_prv->encode_state) {
+    h264_encoder_release_buffers(h264_prv);
+  }
+
+  /*remove ref_surface*/
+  if (h264_prv->ref_surface) {
+    if (h264_prv->vaapi_context) {
+      h264_push_free_surface(h264_prv, h264_prv->ref_surface);
+    } else {
+      g_object_unref(h264_prv->ref_surface);
+    }
+    h264_prv->ref_surface = NULL;
+  }
+
+  /*remove recon_surface*/
+  if (h264_prv->recon_surface) {
+    if (h264_prv->vaapi_context) {
+      h264_push_free_surface(h264_prv, h264_prv->recon_surface);
+    } else {
+      g_object_unref(h264_prv->recon_surface);
+    }
+    h264_prv->recon_surface = NULL;
+  }
+
+  /*remove vaapi_context*/
+  if (h264_prv->vaapi_context) {
+    g_object_unref(h264_prv->vaapi_context);
+    h264_prv->vaapi_context = NULL;
+  }
+
+  h264_prv->frame_count = 0;
+  h264_prv->encode_state = H264_ENC_INIT;
+
+  if (h264_prv->sps_data) {
+    gst_buffer_unref(h264_prv->sps_data);
+    h264_prv->sps_data = NULL;
+  }
+  if (h264_prv->pps_data) {
+    gst_buffer_unref(h264_prv->pps_data);
+    h264_prv->pps_data = NULL;
+  }
+  return ret;
+}
+
+static H264Status
+h264_encoder_alloc_buffers(GstH264EncoderPrivate *h264_prv)
+{
+  H264Status ret = H264_NO_ERROR;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  uint32_t i;
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(h264_prv->vaapi_context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+  VAContextID context_id = GST_VAAPI_OBJECT_ID(h264_prv->vaapi_context);
+
+  /* 1. create sequence parameter set */
+  {
+      VAEncSequenceParameterBufferH264 seq_h264 = {0};
+
+      seq_h264.level_idc = h264_prv->public->level; /* 3.0 */
+      seq_h264.max_num_ref_frames = 1; /*Only I, P frames*/
+      seq_h264.picture_width_in_mbs = (h264_prv->public->width+15)/16;
+      seq_h264.picture_height_in_mbs = (h264_prv->public->height+15)/16;
+
+      seq_h264.bits_per_second = h264_prv->public->bitrate;
+      seq_h264.frame_rate = h264_prv->public->frame_rate;
+      seq_h264.initial_qp = h264_prv->public->init_qp; /*qp_value; 15, 24, 26?*/
+      seq_h264.min_qp = 1;     /*0, 3, 10*/
+      seq_h264.basic_unit_size = 0;
+      seq_h264.intra_period = h264_prv->public->intra_period;
+      seq_h264.intra_idr_period = h264_prv->public->intra_period;
+
+      va_status = vaCreateBuffer(va_dpy, context_id,
+                                 VAEncSequenceParameterBufferType,
+                                 sizeof(seq_h264), 1, &seq_h264, &h264_prv->seq_parameter);
+      H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_ENC_RES_ERR, "alloc seq-buffer failed.\n");
+  }
+
+  /* 2. create coded buffers */
+  h264_prv->coded_bufs = (VABufferID*)g_malloc0(h264_prv->coded_buf_num * sizeof(h264_prv->coded_bufs[0]));
+  { // check width, height  ????
+      uint32_t codedbuf_size = (h264_prv->public->width * h264_prv->public->height * 400) / (16*16);
+      for (i = 0; i < h264_prv->coded_buf_num; i++) {
+        va_status = vaCreateBuffer(va_dpy, context_id,VAEncCodedBufferType,
+                                   codedbuf_size, 1, NULL, &h264_prv->coded_bufs[i]);
+
+        H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_ENC_RES_ERR, "alloc coded buffer failed.\n");
+      }
+  }
+
+  H264_ASSERT(h264_prv->available_code_buffers);
+
+  /* 3. init queue available_code_buffers */
+  g_mutex_lock(h264_prv->code_buffer_lock);
+  for (i = 0; i < h264_prv->coded_buf_num; i++) {
+    g_queue_push_tail (h264_prv->available_code_buffers, &h264_prv->coded_bufs[i]);
+  }
+  g_cond_signal(h264_prv->code_buffer_cond);
+  g_mutex_unlock(h264_prv->code_buffer_lock);
+
+  return H264_NO_ERROR;
+
+  error:
+  return ret;
+
+}
+
+static H264Status
+h264_encoder_release_buffers(GstH264EncoderPrivate *h264_prv)
+{
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  uint32_t available_buf_count = h264_prv->coded_buf_num;
+  uint32_t i;
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(h264_prv->vaapi_context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+
+  /* wait for all coded buffer freed*/
+  g_mutex_lock(h264_prv->code_buffer_lock);
+  while (available_buf_count) {
+    if (g_queue_is_empty(h264_prv->available_code_buffers)) {
+      g_cond_wait(h264_prv->code_buffer_cond, h264_prv->code_buffer_lock);
+    } else {
+      g_queue_pop_head(h264_prv->available_code_buffers);
+      available_buf_count--;
+    }
+  }
+  g_mutex_unlock(h264_prv->code_buffer_lock);
+
+  for (i = 0; i < h264_prv->coded_buf_num; i++) {
+    va_status = vaDestroyBuffer(va_dpy, h264_prv->coded_bufs[i]);
+  }
+  va_status = vaDestroyBuffer(va_dpy, h264_prv->seq_parameter);
+
+  if (h264_prv->coded_bufs) {
+    g_free(h264_prv->coded_bufs);
+    h264_prv->coded_bufs = NULL;
+  }
+
+  return H264_NO_ERROR;
+}
+
+H264Status
+gst_h264_encoder_encode(GstH264Encoder* encoder, GstBuffer *raw_pic, GList **coded_pics)
+{
+  H264Status ret = H264_NO_ERROR;
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  gboolean is_key = FALSE;
+  VABufferID* coded_buf = NULL;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  VASurfaceID  buffer_surface_id = VA_INVALID_SURFACE;
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(h264_prv->vaapi_context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+  VAContextID context_id = GST_VAAPI_OBJECT_ID(h264_prv->vaapi_context);
+  GstVaapiSurface *new_surface = NULL;
+
+  H264_CHECK_STATUS(h264_prv->encode_state >= H264_ENC_OPENED, H264_STATE_ERR, "H264 encoder was not opened.\n");
+  if (h264_prv->encode_state < H264_ENC_ENCODING) {
+    h264_prv->encode_state = H264_ENC_ENCODING;
+  }
+
+  if (GST_VAAPI_IS_VIDEO_BUFFER(raw_pic)) {
+    buffer_surface_id = h264_get_video_surface(h264_prv, GST_VAAPI_VIDEO_BUFFER(raw_pic));
+  } else {
+    new_surface = h264_pop_free_surface(h264_prv);
+    H264_CHECK_STATUS(new_surface, H264_SURFACE_ERR, "h264_pop_free_surface failed.\n");
+    buffer_surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(new_surface);
+    H264_CHECK_STATUS(buffer_surface_id != VA_INVALID_SURFACE, H264_SURFACE_ERR, "surface id == VA_INVALID_SURFACE.\n");
+
+    /*input picture to h264_prv->cur_surface_index*/
+    va_status = h264_put_raw_buffer_to_surface(h264_prv, raw_pic, buffer_surface_id);
+    H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "va put buffer to surface failed.\n");
+  }
+
+  /* begin picture, using default sid 0*/
+  va_status = vaBeginPicture(va_dpy, context_id, buffer_surface_id);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaBeginPicture error.\n");
+
+  /* set input buffers*/
+  is_key = ((h264_prv->frame_count % encoder->intra_period) == 0);
+
+  /*get valid coded buffer*/
+  g_mutex_lock(h264_prv->code_buffer_lock);
+  H264_ASSERT(h264_prv->available_code_buffers);
+  while (g_queue_is_empty(h264_prv->available_code_buffers)) {
+    g_cond_wait(h264_prv->code_buffer_cond, h264_prv->code_buffer_lock);
+  }
+
+  coded_buf = (VABufferID*)g_queue_pop_head (h264_prv->available_code_buffers);
+  ret = h264_prepare_encoding(h264_prv, raw_pic, is_key, *coded_buf);
+  if (H264_NO_ERROR != ret) {
+    g_queue_push_head(h264_prv->available_code_buffers, coded_buf);
+  }
+  g_mutex_unlock(h264_prv->code_buffer_lock);
+
+  H264_CHECK_STATUS(H264_NO_ERROR == ret, H264_PICTURE_ERR, "h264_prepare_encoding failed.\n");
+
+  /* end picture */
+  va_status = vaEndPicture(va_dpy, context_id);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaEndPicture error.\n");
+
+  /*query surface result*/
+  ret = h264_query_encoding_status(h264_prv, buffer_surface_id,
+                                   is_key, GST_BUFFER_TIMESTAMP(raw_pic), GST_BUFFER_DURATION(raw_pic), coded_buf, coded_pics);
+  if (H264_NO_ERROR != ret) {
+    goto error;
+  }
+
+  h264_prv->frame_count++;
+
+
+  error:
+  if (new_surface) {
+    h264_push_free_surface(h264_prv, new_surface);
+  }
+  return ret;
+}
+
+static H264Status
+h264_put_raw_buffer_to_surface(GstH264EncoderPrivate *h264_prv,
+                               GstBuffer *raw_pic,
+                               VASurfaceID surface_id)
+{
+  H264Status ret = H264_NO_ERROR;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  VAImage surface_image;
+  void *surface_p = NULL;
+  uint8_t *y_src, *u_src, *v_src;
+  uint8_t *y_dst, *u_dst, *v_dst;
+  int row, col;
+  H264_ASSERT(h264_prv->vaapi_display);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+
+  int y_size = h264_prv->public->width * h264_prv->public->height;
+  int u_size = (h264_prv->public->width >> 1) * (h264_prv->public->height >> 1);
+
+  va_status = vaDeriveImage(va_dpy, surface_id, &surface_image);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaDeriveImage error.\n");
+
+  vaMapBuffer(va_dpy, surface_image.buf, &surface_p);
+
+  /* copy buffer to surface */
+  H264_ASSERT(GST_BUFFER_SIZE(raw_pic) >= y_size + (y_size>>1));
+
+  y_src = GST_BUFFER_DATA(raw_pic);
+  u_src = y_src + y_size;
+  v_src = u_src + u_size;
+
+  y_dst = surface_p + surface_image.offsets[0];
+  u_dst = surface_p + surface_image.offsets[1];
+  v_dst = surface_p + surface_image.offsets[2];
+
+  /* copy from avcenc.c*/
+  /* Y plane */
+    for (row = 0; row < surface_image.height; row++) {
+        memcpy(y_dst, y_src, surface_image.width);
+        y_dst += surface_image.pitches[0];
+        y_src += h264_prv->public->width;
+    }
+
+    if (surface_image.format.fourcc == VA_FOURCC_NV12) { /* UV plane */
+      if (h264_prv->format == GST_MAKE_FOURCC('I','4','2','0')) {
+        for (row = 0; row < surface_image.height / 2; row++) {
+            for (col = 0; col < surface_image.width / 2; col++) {
+                u_dst[col * 2] = u_src[col];
+                u_dst[col * 2 + 1] = v_src[col];
+            }
+
+            u_dst += surface_image.pitches[1];
+            u_src += (h264_prv->public->width>>1);
+            v_src += (h264_prv->public->width>>1);
+        }
+      } else if (h264_prv->format == GST_MAKE_FOURCC('N','V','1','2')){
+        for (row = 0; row < surface_image.height / 2; row++) {
+          memcpy(u_dst, u_src, surface_image.width);
+          u_src += h264_prv->public->width;
+          u_dst += surface_image.pitches[1];
+        }
+      } else {
+        H264_ASSERT(0);
+      }
+    } else {
+        /* FIXME: fix this later */
+        H264_ASSERT(0);
+    }
+
+  vaUnmapBuffer(va_dpy, surface_image.buf);
+  vaDestroyImage(va_dpy, surface_image.image_id);
+
+  error:
+  return ret;
+}
+
+
+static H264Status
+h264_prepare_encoding(GstH264EncoderPrivate *h264_prv, GstBuffer *raw_pic, gboolean is_key, VABufferID coded_buf)
+{
+  H264Status ret = H264_NO_ERROR;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+
+  VAEncPictureParameterBufferH264 pic_h264;
+  VAEncSliceParameterBuffer slice_h264;
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(h264_prv->vaapi_context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+  VAContextID context_id = GST_VAAPI_OBJECT_ID(h264_prv->vaapi_context);
+
+  /*handle first surface_index*/
+  /*only need first frame*/
+  if (h264_prv->frame_count == 0) {
+    va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->seq_parameter, 1);
+    H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaRenderPicture seq-parameters failed.\n");
+  }
+
+  /* set pic_parameters*/
+  if (!h264_prv->ref_surface) {
+    h264_prv->ref_surface = h264_pop_free_surface(h264_prv);
+    H264_CHECK_STATUS(h264_prv->ref_surface, H264_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
+  }
+  if (!h264_prv->recon_surface) {
+    h264_prv->recon_surface = h264_pop_free_surface(h264_prv);
+    H264_CHECK_STATUS(h264_prv->recon_surface, H264_SURFACE_ERR, "reconstructed surface, h264_pop_free_surface failed.\n");
+  }
+  pic_h264.reference_picture = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface);
+  pic_h264.reconstructed_picture = GST_VAAPI_OBJECT_ID(h264_prv->recon_surface);
+  pic_h264.coded_buf = coded_buf;
+  pic_h264.picture_width = h264_prv->public->width;
+  pic_h264.picture_height = h264_prv->public->height;
+  pic_h264.last_picture = 0; // last pic or not
+
+  if (VA_INVALID_ID != h264_prv->pic_parameter) { /* share the same pic_parameter*/
+    vaDestroyBuffer(va_dpy, h264_prv->pic_parameter);
+    h264_prv->pic_parameter = VA_INVALID_ID;
+  }
+  va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
+                               sizeof(pic_h264), 1, &pic_h264, &h264_prv->pic_parameter);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_PICTURE_ERR, "creating pic-param buffer failed.\n");
+  va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->pic_parameter, 1);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_PICTURE_ERR, "rendering pic-param buffer failed.\n");
+
+  /*set slice parameters*/
+  /* one frame, one slice */
+  memset(&slice_h264, 0, sizeof(slice_h264));
+  slice_h264.start_row_number = 0;
+  slice_h264.slice_height = (h264_prv->public->height+15)/16; /* Measured by MB */
+  slice_h264.slice_flags.bits.is_intra = is_key;
+  slice_h264.slice_flags.bits.disable_deblocking_filter_idc = 0;
+  if (VA_INVALID_ID != h264_prv->slice_parameter) {
+    vaDestroyBuffer(va_dpy, h264_prv->slice_parameter);
+    h264_prv->slice_parameter = VA_INVALID_ID;
+  }
+  va_status = vaCreateBuffer(va_dpy, context_id, VAEncSliceParameterBufferType,
+                             sizeof(slice_h264), 1, &slice_h264, &h264_prv->slice_parameter);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_PICTURE_ERR, "creating slice-parameters buffer failed.\n");
+  va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->slice_parameter, 1);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_PICTURE_ERR, "rendering slice-parameters buffer failed.\n");
+
+
+  /*after finished, set ref_surface_index, recon_surface_index */
+  GstVaapiSurface *swap = h264_prv->ref_surface;
+  h264_prv->ref_surface = h264_prv->recon_surface;
+  h264_prv->recon_surface = swap;
+
+  /* error */
+  error:
+  return ret;
+}
+
+static H264Status
+h264_query_encoding_status(GstH264EncoderPrivate *h264_prv,
+                           VASurfaceID surface_id,
+                           gboolean is_key,
+                           GstClockTime timestamp,
+                           GstClockTime duration,
+                           VABufferID *coded_buf,
+                           GList **coded_pics)
+{
+  H264Status ret = H264_NO_ERROR;
+  VAStatus va_status = VA_STATUS_SUCCESS;
+  VASurfaceStatus surface_status = 0;
+  VACodedBufferSegment *buf_list = NULL;
+  GstBuffer* ret_buffer = NULL;
+  gboolean has_coded_data = FALSE;
+
+  H264_ASSERT(h264_prv->vaapi_display);
+  H264_ASSERT(h264_prv->vaapi_context);
+  VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+  //VAContextID context_id = GST_VAAPI_OBJECT_ID(h264_prv->vaapi_context);
+
+  H264_ASSERT(coded_pics && *coded_pics == NULL);
+
+  va_status = vaSyncSurface(va_dpy, surface_id);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_QUERY_STATUS_ERR, "vaSyncSurface failed.\n");
+
+  va_status = vaQuerySurfaceStatus(va_dpy, surface_id, &surface_status);
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_QUERY_STATUS_ERR, "vaQuerySurfaceStatus failed.\n");
+  if (VASurfaceSkipped&surface_status) {
+    H264_LOG_ERROR("frame skipped, dts:%" GST_TIME_FORMAT ".\n", GST_TIME_ARGS(timestamp));
+  }
+
+  va_status = vaMapBuffer(va_dpy, *coded_buf, (void **)(&buf_list));
+  H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_QUERY_STATUS_ERR, "vaMapBuffer failed.\n");
+
+  while (buf_list != NULL) {
+      if (!h264_prv->sps_data || !h264_prv->pps_data) {
+        h264_encoder_read_sps_pps(h264_prv, buf_list->buf, buf_list->size);
+      }
+      ret_buffer = h264_encoder_create_coded_buffer(h264_prv, buf_list->buf, buf_list->size, coded_buf);
+      GST_BUFFER_TIMESTAMP(ret_buffer) = timestamp;
+      GST_BUFFER_DURATION(ret_buffer) = duration;
+      if (!is_key) {
+        GST_BUFFER_FLAG_SET(ret_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+      }
+      *coded_pics = g_list_append(*coded_pics, ret_buffer);
+      buf_list = (VACodedBufferSegment*)buf_list->next;
+      H264_ASSERT(NULL == buf_list);
+      has_coded_data = TRUE;
+  }
+
+#if SHARE_CODED_BUF
+  if (!has_coded_data)
+#endif
+  { // if non-related, push back to available_code_buffers
+    g_mutex_lock(h264_prv->code_buffer_lock);
+    vaUnmapBuffer(va_dpy, *coded_buf);
+    g_queue_push_head(h264_prv->available_code_buffers, coded_buf);
+    g_cond_signal(h264_prv->code_buffer_cond);
+    g_mutex_unlock(h264_prv->code_buffer_lock);
+  }
+
+  return H264_NO_ERROR;
+
+  error:
+  return ret;
+}
+
+static GstBuffer *
+h264_encoder_create_coded_buffer(GstH264EncoderPrivate *h264_prv,
+                                        uint8_t *frame,
+                                        uint32_t frame_size,
+                                        VABufferID *coded_buf)
+{
+  GstBuffer *ret_buffer;
+  uint32_t   nal_size;
+  const uint8_t   *nal_start;
+  uint8_t  *frame_end;
+
+#if SHARE_CODED_BUF
+  ret_buffer = gst_h264_encode_buffer_new(h264_prv, coded_buf);
+  H264_ASSERT(ret_buffer);
+  GST_BUFFER_MALLOCDATA(ret_buffer) = NULL;
+  GST_BUFFER_DATA(ret_buffer) = buf_list->buf;
+  GST_BUFFER_SIZE(ret_buffer) = buf_list->size;
+#else
+  ret_buffer = gst_buffer_new();
+  H264_ASSERT(ret_buffer);
+  H264Bitstream bitstream;
+  h264_bitstream_init(&bitstream, (frame_size+32)*8);
+  h264_bitstream_align(&bitstream, 0);
+  H264_ASSERT(bitstream.bit_size == 0);
+
+  if (!h264_prv->es_flag) { /*nal format*/
+    h264_bitstream_write_byte_array(&bitstream, frame, frame_size);
+    H264_ASSERT(bitstream.bit_size == frame_size*8);
+  } else { /* elementary format */
+    frame_end = frame + frame_size;
+    nal_start = frame;
+    nal_size = 0;
+    while((nal_start = h264_next_nal(nal_start, frame_end-nal_start, &nal_size)) != NULL) {
+      H264_ASSERT(nal_size);
+      if (!nal_size) {
+        nal_start += nal_size;
+        continue;
+      }
+      h264_bitstream_write_uint(&bitstream, nal_size, 32);
+      h264_bitstream_write_byte_array(&bitstream, nal_start, nal_size);
+      nal_start += nal_size;
+    }
+  }
+  h264_bitstream_align(&bitstream, 0);
+
+  GST_BUFFER_MALLOCDATA(ret_buffer) =
+        GST_BUFFER_DATA(ret_buffer) = BIT_STREAM_BUFFER(&bitstream);
+  GST_BUFFER_SIZE(ret_buffer) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
+  h264_bitstream_destroy(&bitstream, FALSE);
+#endif
+
+  return ret_buffer;
+
+}
+
+static H264Status
+h264_encoder_read_sps_pps(GstH264EncoderPrivate *h264_prv, const uint8_t *buf, uint32_t size)
+{
+  const uint8_t *end = buf + size;
+  const uint8_t *nal_start = buf;
+  uint32_t nal_size = 0;
+  uint8_t nal_type;
+  GstBuffer *sps = NULL, *pps = NULL;
+
+  while((!sps || !pps) && (nal_start = h264_next_nal(nal_start, end-nal_start, &nal_size)) != NULL) {
+    if (!nal_size) {
+      nal_start += nal_size;
+      continue;
+    }
+
+    nal_type = (*nal_start)&0x1F;
+    switch (nal_type) {
+      case NAL_SPS: {
+        sps = gst_buffer_new_and_alloc(nal_size);
+        memcpy(GST_BUFFER_DATA(sps), nal_start, nal_size);
+        gst_buffer_replace(&h264_prv->sps_data, sps);
+        gst_buffer_unref(sps); /*don't set to NULL*/
+        break;
+      }
+
+      case NAL_PPS: {
+        pps = gst_buffer_new_and_alloc(nal_size);
+        memcpy(GST_BUFFER_DATA(pps), nal_start, nal_size);
+        gst_buffer_replace(&h264_prv->pps_data, pps);
+        gst_buffer_unref(pps);
+        break;
+      }
+
+      default:
+        break;
+    }
+    nal_start += nal_size;
+
+  }
+  if (!sps || !pps) {
+    return H264_DATA_NOT_READY;
+  }
+  return H264_NO_ERROR;
+}
+
+static gboolean
+h264_read_sps_attributes(const uint8_t *sps_data, uint32_t sps_size,
+                                uint32_t *profile_idc, uint32_t *profile_comp, uint32_t *level_idc)
+{
+  H264_ASSERT(profile_idc && profile_comp && level_idc);
+  H264_ASSERT(sps_size >= 4);
+  if (sps_size < 4) {
+    return FALSE;
+  }
+  /*skip sps_data[0], nal_type*/
+  *profile_idc = sps_data[1];
+  *profile_comp = sps_data[2];
+  *level_idc = sps_data[3];
+  return TRUE;
+}
+
+
+H264Status
+gst_h264_encoder_flush(GstH264Encoder* encoder, GList *coded_pics)
+{
+  H264Status ret = H264_NO_ERROR;
+  //GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  //VAProfile va_profile = h264_get_va_profile(encoder->profile);
+
+  //error:
+  return ret;
+}
+
+/*test*/
+static int draw_picture(int width, int height,
+                         unsigned char *Y_start,
+                         unsigned char *U_start,
+                         unsigned char *V_start,
+                         int UV_interleave, int box_width, int row_shift);
+
+int main_test(int argc, const char* argv[])
+{
+  H264Status ret = H264_NO_ERROR;
+  GstH264Encoder *encoder = NULL;
+
+  GList *coded_pics = NULL;
+  GstBuffer **raw_buffer = NULL;
+  const uint32_t raw_buffer_num = 20;
+
+  GstBuffer *tmp_buffer;
+
+  uint32_t i = 0, k = 0;
+
+  gst_init (&argc, &argv);
+
+  g_type_init();
+  if (!g_thread_supported ())
+    g_thread_init (NULL);
+
+  encoder = gst_h264_encoder_new();
+  H264_ASSERT(encoder && ret == H264_NO_ERROR);
+
+  encoder->profile = 64;
+  encoder->level = 30;
+  encoder->width = 1280;
+  encoder->height = 720;
+  encoder->frame_rate = 10;
+  encoder->bitrate = 512*1000;
+  encoder->intra_period = 30;
+  ret = gst_h264_encoder_initialize(encoder);
+  H264_ASSERT(ret == H264_NO_ERROR);
+  ret = gst_h264_encoder_open(encoder);
+  H264_ASSERT(ret == H264_NO_ERROR);
+
+  uint32_t buffer_size = encoder->width * encoder->height *3 /2;
+  uint32_t y_width = encoder->width, y_size = encoder->width * encoder->height;
+  uint32_t u_width = encoder->width/2, u_size = (encoder->width/2) * (encoder->height/2);
+  uint32_t v_width = encoder->width/2;
+  uint8_t *y_src, *u_src, *v_src;
+
+  /*set buffers*/
+  int box_width=8;
+  int row_shift=0;
+  raw_buffer = (GstBuffer**)g_malloc0(raw_buffer_num*sizeof(GstBuffer*));
+  for (i = 0; i < raw_buffer_num; i++) {
+    raw_buffer[i] = gst_buffer_new_and_alloc(buffer_size);
+    y_src = GST_BUFFER_DATA(raw_buffer[i]);
+    u_src = y_src + y_size;
+    v_src = u_src + u_size;
+
+    draw_picture(encoder->width, encoder->height, y_src, u_src, v_src, 0, box_width, row_shift);
+    row_shift++;
+    if (row_shift==(2*box_width)) row_shift= 0;
+  }
+
+  FILE *fp = fopen("tmp.h264", "wb");
+  H264_ASSERT(fp);
+
+  k = 0;
+
+  for (i = 0; i < 50; i++) {
+    coded_pics = NULL;
+    ret = gst_h264_encoder_encode(encoder, raw_buffer[k], &coded_pics);
+    H264_ASSERT(H264_NO_ERROR == ret);
+    ++k;
+    if (k >= raw_buffer_num) k = 0;
+
+    while (coded_pics) {
+      tmp_buffer = coded_pics->data;
+      coded_pics = g_list_remove(coded_pics, tmp_buffer);
+      fwrite(GST_BUFFER_DATA(tmp_buffer), GST_BUFFER_SIZE(tmp_buffer), 1, fp);
+      printf("F:%d, S:%d, %s\n", i, GST_BUFFER_SIZE(tmp_buffer), h264_dump_bytes(GST_BUFFER_DATA(tmp_buffer)+4, 8));
+      gst_buffer_unref(tmp_buffer);
+    }
+  }
+  fclose(fp);
+
+  ret = gst_h264_encoder_close(encoder);
+  H264_ASSERT(H264_NO_ERROR == ret);
+
+  for (i = 0; i < raw_buffer_num; i++) {
+    gst_buffer_unref(raw_buffer[i]);
+  }
+  g_free(raw_buffer);
+  gst_h264_encoder_unref(encoder);
+
+  return 0;
+}
+
+char *h264_dump_bytes(const uint8_t *buf, uint32_t num)
+{
+  static char tmp[1024];
+  uint32_t i = 0;
+  memset(tmp, 0, sizeof(tmp));
+
+  char *p = tmp;
+  for (i = 0; i < num; i++) {
+    snprintf(p, 1024-(p-tmp), "%02x", (uint8_t)buf[i]);
+    p += strlen(p);
+  }
+  return tmp;
+}
+
+H264Status
+gst_h264_encoder_get_avcC_codec_data(GstH264Encoder *encoder, GstBuffer **buffer)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  GstBuffer *avc_codec;
+  const uint32_t configuration_version = 0x01;
+  const uint32_t length_size_minus_one = 0x03;
+  uint32_t profile, profile_comp, level_idc;
+
+  H264_ASSERT(buffer);
+  if (!h264_prv->sps_data || !h264_prv->pps_data) {
+    return H264_DATA_NOT_READY;
+  }
+
+  if (FALSE == h264_read_sps_attributes(GST_BUFFER_DATA(h264_prv->sps_data),
+                                   GST_BUFFER_SIZE(h264_prv->sps_data),
+                                   &profile, &profile_comp, &level_idc))
+  {
+    H264_ASSERT(0);
+    return H264_DATA_ERR;
+  }
+
+  H264Bitstream bitstream;
+  h264_bitstream_init(&bitstream,
+                     (GST_BUFFER_SIZE(h264_prv->sps_data)+GST_BUFFER_SIZE(h264_prv->pps_data) + 32)*8);
+
+  /*codec_data*/
+  h264_bitstream_write_uint(&bitstream, configuration_version, 8);
+  h264_bitstream_write_uint(&bitstream, profile, 8);
+  h264_bitstream_write_uint(&bitstream, profile_comp, 8);
+  h264_bitstream_write_uint(&bitstream, level_idc, 8);
+  h264_bitstream_write_uint(&bitstream, h264_bit_mask[6], 6); /*111111*/
+  h264_bitstream_write_uint(&bitstream, length_size_minus_one, 2);
+  h264_bitstream_write_uint(&bitstream, h264_bit_mask[3], 3); /*111*/
+
+  /*write sps*/
+  h264_bitstream_write_uint(&bitstream, 1, 5);   /* sps count = 1*/
+  H264_ASSERT( BIT_STREAM_BIT_SIZE(&bitstream)%8 == 0);
+  h264_bitstream_write_uint(&bitstream, GST_BUFFER_SIZE(h264_prv->sps_data), 16);
+  h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->sps_data),
+                                              GST_BUFFER_SIZE(h264_prv->sps_data));
+
+  /*write pps*/
+  h264_bitstream_write_uint(&bitstream, 1, 8); /*pps count = 1*/
+  h264_bitstream_write_uint(&bitstream, GST_BUFFER_SIZE(h264_prv->pps_data), 16);
+  h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->pps_data),
+                                              GST_BUFFER_SIZE(h264_prv->pps_data));
+
+  avc_codec = gst_buffer_new();
+  GST_BUFFER_MALLOCDATA(avc_codec) =
+         GST_BUFFER_DATA(avc_codec) =
+         BIT_STREAM_BUFFER(&bitstream);
+  GST_BUFFER_SIZE(avc_codec) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
+  h264_bitstream_destroy(&bitstream, FALSE);
+  *buffer = avc_codec;
+
+  return H264_NO_ERROR;
+}
+
+H264Status
+gst_h264_encoder_get_nal_codec_data(GstH264Encoder *encoder, GstBuffer **buffer)
+{
+  GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+  GstBuffer *nal_sps_pps;
+
+  H264_ASSERT(buffer);
+  if (!h264_prv->sps_data || !h264_prv->pps_data) {
+    return H264_DATA_NOT_READY;
+  }
+
+  H264Bitstream bitstream;
+  h264_bitstream_init(&bitstream,
+                     (GST_BUFFER_SIZE(h264_prv->sps_data)+GST_BUFFER_SIZE(h264_prv->pps_data) + 8)*8);
+
+  /*0x000001 start code*/
+  h264_bitstream_write_uint(&bitstream, 0x000001, 24);
+  h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->sps_data),
+                                              GST_BUFFER_SIZE(h264_prv->sps_data));
+  h264_bitstream_write_uint(&bitstream, 0x000001, 24);
+  h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->pps_data),
+                                              GST_BUFFER_SIZE(h264_prv->pps_data));
+
+  nal_sps_pps = gst_buffer_new();
+  GST_BUFFER_MALLOCDATA(nal_sps_pps) =
+         GST_BUFFER_DATA(nal_sps_pps) =
+         BIT_STREAM_BUFFER(&bitstream);
+  GST_BUFFER_SIZE(nal_sps_pps) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
+  h264_bitstream_destroy(&bitstream, FALSE);
+  *buffer = nal_sps_pps;
+  return H264_NO_ERROR;
+}
+
+static void
+h264_bitstream_init(H264Bitstream *bitstream, uint32_t bit_capability)
+{
+  bitstream->bit_size = 0;
+  bitstream->buffer = NULL;
+  bitstream->max_bit_capability = 0;
+  if (bit_capability) {
+    h264_bitstream_auto_grow(bitstream, bit_capability);
+  }
+}
+
+static gboolean
+h264_bitstream_write_uint(H264Bitstream *bitstream, uint32_t value, uint32_t bit_size)
+{
+  gboolean ret = TRUE;
+  uint32_t byte_pos, bit_offset;
+  uint8_t  *cur_byte;
+  uint32_t fill_bits;
+
+  if(!bit_size) {
+    return TRUE;
+  }
+
+  H264_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, bit_size), FALSE, "h264_bitstream_auto_grow failed.\n");
+  byte_pos = (bitstream->bit_size>>3);
+  bit_offset = (bitstream->bit_size&0x07);
+  cur_byte = bitstream->buffer + byte_pos;
+  H264_ASSERT(bit_offset < 8 && bitstream->bit_size <= bitstream->max_bit_capability);
+
+  while (bit_size) {
+    fill_bits = ((8-bit_offset) < bit_size ? (8-bit_offset) : bit_size);
+    bit_size -= fill_bits;
+    bitstream->bit_size += fill_bits;
+
+    *cur_byte |= ((value>>bit_size) & h264_bit_mask[fill_bits])<<(8-bit_offset-fill_bits);
+    ++cur_byte;
+    bit_offset = 0;
+  }
+  H264_ASSERT(cur_byte <= bitstream->buffer + bitstream->max_bit_capability/8);
+  return TRUE;
+
+  error:
+  return FALSE;
+}
+
+static gboolean h264_bitstream_align(H264Bitstream *bitstream, uint32_t value)
+{
+  uint32_t bit_offset, bit_left;
+
+  bit_offset = (bitstream->bit_size&0x07);
+  if (!bit_offset) {
+    return TRUE;
+  }
+  bit_left = 8 - bit_offset;
+  if (value) value = h264_bit_mask[bit_left];
+  return h264_bitstream_write_uint(bitstream, value, bit_left);
+}
+
+
+static gboolean
+h264_bitstream_write_byte_array(H264Bitstream *bitstream, const uint8_t *buf, uint32_t byte_size)
+{
+  gboolean ret = TRUE;
+  if (!byte_size) {
+    return 0;
+  }
+  H264_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, byte_size<<3), FALSE, "h264_bitstream_auto_grow failed.\n");
+  if (0 == (bitstream->bit_size&0x07)) {
+    memcpy(&bitstream->buffer[bitstream->bit_size>>3], buf, byte_size);
+    bitstream->bit_size += (byte_size<<3);
+  } else {
+    H264_ASSERT(0);
+    while(byte_size) {
+      h264_bitstream_write_uint(bitstream, *buf, 8);
+      --byte_size;
+      ++buf;
+    }
+  }
+  return TRUE;
+
+  error:
+  return FALSE;
+}
+
+static gboolean
+h264_bitstream_write_ue(H264Bitstream *bitstream, uint32_t value)
+{
+  gboolean ret = TRUE;
+  uint32_t size_in_bits = 0;
+  uint32_t tmp_value = ++value;
+  while (tmp_value) {
+    ++size_in_bits;
+    tmp_value >>= 1;
+  }
+  H264_CHECK_STATUS(h264_bitstream_write_uint(bitstream, 0, size_in_bits-1), FALSE, "h264_bitstream_write_ue failed.\n");
+  H264_CHECK_STATUS(h264_bitstream_write_uint(bitstream, value, size_in_bits), FALSE, "h264_bitstream_write_ue failed.\n");
+  return TRUE;
+
+  error:
+  return FALSE;
+}
+
+static gboolean
+h264_bitstream_write_se(H264Bitstream *bitstream, int32_t value)
+{
+  gboolean ret = TRUE;
+  uint32_t new_val;
+
+  if (value <= 0) {
+    new_val = -(value<<1);
+  } else {
+    new_val = (value<<1) - 1;
+  }
+
+  H264_CHECK_STATUS(h264_bitstream_write_ue(bitstream, new_val), FALSE, "h264_bitstream_write_se failed.\n");
+  return TRUE;
+
+  error:
+  return FALSE;
+}
+
+static gboolean
+h264_bitstream_write_trailing_bits(H264Bitstream *bitstream)
+{
+    h264_bitstream_write_uint(bitstream, 1, 1);
+    h264_bitstream_align(bitstream, 0);
+    return TRUE;
+}
+
+static void
+h264_bitstream_destroy(H264Bitstream *bitstream, gboolean free_flag)
+{
+  if (bitstream->buffer && free_flag) {
+    free (bitstream->buffer);
+  }
+  bitstream->buffer = NULL;
+  bitstream->bit_size = 0;
+  bitstream->max_bit_capability = 0;
+}
+
+static gboolean
+h264_bitstream_auto_grow(H264Bitstream *bitstream, uint32_t extra_bit_size)
+{
+  uint32_t new_bit_size = extra_bit_size + bitstream->bit_size;
+  uint32_t clear_pos;
+
+  H264_ASSERT(bitstream->bit_size <= bitstream->max_bit_capability);
+  if (new_bit_size <= bitstream->max_bit_capability) {
+    return TRUE;
+  }
+
+  new_bit_size = ((new_bit_size + H264_BITSTREAM_ALLOC_ALIGN_MASK)
+                &(~H264_BITSTREAM_ALLOC_ALIGN_MASK));
+  H264_ASSERT(new_bit_size%(H264_BITSTREAM_ALLOC_ALIGN_MASK+1) == 0);
+  clear_pos = ((bitstream->bit_size+7)>>3);
+  bitstream->buffer = realloc(bitstream->buffer, new_bit_size>>3);
+  memset(bitstream->buffer+clear_pos, 0, (new_bit_size>>3)-clear_pos);
+  bitstream->max_bit_capability = new_bit_size;
+  return TRUE;
+}
+
+static gboolean
+h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
+{
+  uint32_t constraint_set0_flag, constraint_set1_flag, constraint_set2_flag, constraint_set3_flag;
+  uint32_t seq_parameter_set_id = 0;
+
+  /*need to set the values*/
+  uint32_t log2_max_frame_num_minus4 = 0;  // 1? 3?
+  uint32_t pic_order_cnt_type = 0; // Must be 0
+  uint32_t log2_max_pic_order_cnt_lsb_minus4 = 0;  // 2 ? 4?
+  uint32_t num_ref_frames = 1;  // only P frames
+  uint32_t gaps_in_frame_num_value_allowed_flag = 0; // ??
+  uint32_t mb_width = (h264_prv->public->width+15)/16; // mb_width
+  uint32_t mb_height = (h264_prv->public->height+15)/16; // mb_height
+  int32_t frame_mbs_only_flag = 1; // only mbs
+  uint32_t frame_cropping_flag = 0;
+  uint32_t frame_crop_bottom_offset = 0;
+  uint32_t vui_present_flag = 0; // no vui flags
+
+
+  constraint_set0_flag = h264_prv->public->profile == H264_PROFILE_BASELINE;
+  constraint_set1_flag = h264_prv->public->profile <= H264_PROFILE_MAIN;
+  constraint_set2_flag = 0;
+  constraint_set3_flag = 0;
+
+  if (mb_height * 16 - h264_prv->public->height) {
+    frame_cropping_flag = 1;
+    frame_crop_bottom_offset =
+        (mb_height * 16 - h264_prv->public->height) / (2 * (!frame_mbs_only_flag + 1));
+  }
+
+  h264_bitstream_write_uint(bitstream, h264_prv->public->profile, 8); /* profile_idc */
+  h264_bitstream_write_uint(bitstream, constraint_set0_flag, 1);     /* constraint_set0_flag */
+  h264_bitstream_write_uint(bitstream, constraint_set1_flag, 1);     /* constraint_set1_flag */
+  h264_bitstream_write_uint(bitstream, constraint_set2_flag, 1);     /* constraint_set2_flag */
+  h264_bitstream_write_uint(bitstream, constraint_set3_flag, 1);     /* constraint_set3_flag */
+  h264_bitstream_write_uint(bitstream, 0, 4);                        /* reserved_zero_4bits */
+  h264_bitstream_write_uint(bitstream, h264_prv->public->level, 8);   /* level_idc */
+  h264_bitstream_write_ue(bitstream, seq_parameter_set_id);          /* seq_parameter_set_id */
+
+  if (h264_prv->public->profile >= H264_PROFILE_HIGH) {
+      /* FIXME: fix for high profile */
+      H264_ASSERT(0);
+  }
+
+  h264_bitstream_write_ue(bitstream, log2_max_frame_num_minus4);    /* log2_max_frame_num_minus4 */
+  h264_bitstream_write_ue(bitstream, pic_order_cnt_type);           /* pic_order_cnt_type */
+
+  if (pic_order_cnt_type == 0)
+      h264_bitstream_write_ue(bitstream, log2_max_pic_order_cnt_lsb_minus4);/* log2_max_pic_order_cnt_lsb_minus4 */
+  else {
+      H264_ASSERT(0);
+  }
+
+  h264_bitstream_write_ue(bitstream, num_ref_frames);                            /* num_ref_frames */
+  h264_bitstream_write_uint(bitstream, gaps_in_frame_num_value_allowed_flag, 1); /* gaps_in_frame_num_value_allowed_flag */
+
+  h264_bitstream_write_ue(bitstream, mb_width - 1);              /* pic_width_in_mbs_minus1 */
+  h264_bitstream_write_ue(bitstream, mb_height - 1);             /* pic_height_in_map_units_minus1 */
+  h264_bitstream_write_uint(bitstream, frame_mbs_only_flag, 1);  /* frame_mbs_only_flag */
+
+  if (!frame_mbs_only_flag) { //ONLY mbs
+      H264_ASSERT(0);
+  }
+
+  h264_bitstream_write_uint(bitstream, 0, 1);                         /* direct_8x8_inference_flag */
+  h264_bitstream_write_uint(bitstream, frame_cropping_flag, 1);       /* frame_cropping_flag */
+
+  if (frame_cropping_flag) {
+      h264_bitstream_write_ue(bitstream, 0);                        /* frame_crop_left_offset */
+      h264_bitstream_write_ue(bitstream, 0);                        /* frame_crop_right_offset */
+      h264_bitstream_write_ue(bitstream, 0);                        /* frame_crop_top_offset */
+      h264_bitstream_write_ue(bitstream, frame_crop_bottom_offset); /* frame_crop_bottom_offset */
+  }
+
+  h264_bitstream_write_uint(bitstream, vui_present_flag, 1);                         /* vui_parameters_present_flag */
+  h264_bitstream_write_trailing_bits(bitstream);                             /* rbsp_trailing_bits */
+  return TRUE;
+
+  //error:
+  //return FALSE;
+
+}
+
+static const uint8_t *
+h264_next_nal(const uint8_t *buffer, uint32_t len, uint32_t *nal_size)
+{
+    const uint8_t *cur = buffer;
+    const uint8_t *end = buffer + len;
+    const uint8_t *nal_start = NULL;
+    uint32_t flag = 0xFFFFFFFF;
+    uint32_t nal_start_len = 0;
+
+    H264_ASSERT(len >= 0 && buffer && nal_size);
+    if (len < 3) {
+        *nal_size = len;
+        nal_start = (len ? buffer : NULL);
+        return nal_start;
+    }
+
+    /*locate head postion*/
+    if (!buffer[0] && !buffer[1]) {
+        if (buffer[2] == 1) { // 0x000001
+            nal_start_len = 3;
+        } else if (!buffer[2] && len >=4 && buffer[3] == 1) { //0x00000001
+            nal_start_len = 4;
+        }
+    }
+    nal_start = buffer + nal_start_len;
+    cur = nal_start;
+
+    /*find next nal start position*/
+    while (cur < end) {
+        flag = ((flag<<8) | ((*cur++)&0xFF));
+        if (flag == 0x00000001) {
+            *nal_size = cur - 4 - nal_start;
+            break;
+        } else if ((flag&0x00FFFFFF) == 0x00000001) {
+            *nal_size = cur - 3 - nal_start;
+            break;
+        }
+    }
+    if (cur >= end) {
+      *nal_size = end - nal_start;
+      if (nal_start >= end) {
+        nal_start = NULL;
+      }
+    }
+    return nal_start;
+}
+
+
+static gboolean
+h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
+{
+  H264_ASSERT(0);
+  return TRUE;
+}
+
+static int draw_picture(int width, int height,
+                         unsigned char *Y_start,
+                         unsigned char *U_start,
+                         unsigned char *V_start,
+                         int UV_interleave, int box_width, int row_shift)
+{
+    int row;
+    int field = 0;
+    int Y_pitch = width;
+    int U_pitch = width/2;
+    int V_pitch = width/2;
+
+    /* copy Y plane */
+    for (row=0;row<height;row++) {
+        unsigned char *Y_row = Y_start + row * Y_pitch;
+        int jj, xpos, ypos;
+
+        ypos = (row / box_width) & 0x1;
+
+        /* fill garbage data into the other field */
+        if (((field == 1) && (row &1))
+            || ((field == 2) && ((row &1)==0))) {
+            memset(Y_row, 0xff, width);
+            continue;
+        }
+
+        for (jj=0; jj<width; jj++) {
+            xpos = ((row_shift + jj) / box_width) & 0x1;
+
+            if ((xpos == 0) && (ypos == 0))
+                Y_row[jj] = 0xeb;
+            if ((xpos == 1) && (ypos == 1))
+                Y_row[jj] = 0xeb;
+
+            if ((xpos == 1) && (ypos == 0))
+                Y_row[jj] = 0x10;
+            if ((xpos == 0) && (ypos == 1))
+                Y_row[jj] = 0x10;
+        }
+    }
+
+    /* copy UV data */
+    for( row =0; row < height/2; row++) {
+        unsigned short value = 0x80;
+
+        /* fill garbage data into the other field */
+        if (((field == 1) && (row &1))
+            || ((field == 2) && ((row &1)==0))) {
+            value = 0xff;
+        }
+
+        if (UV_interleave) {
+            unsigned short *UV_row = (unsigned short *)(U_start + row * U_pitch);
+
+            memset(UV_row, value, width);
+        } else {
+            unsigned char *U_row = U_start + row * U_pitch;
+            unsigned char *V_row = V_start + row * V_pitch;
+
+            memset (U_row,value,width/2);
+            memset (V_row,value,width/2);
+        }
+    }
+    return 0;
+}
+
+
+
diff --git a/gst/vaapiencode/h264encoder.h b/gst/vaapiencode/h264encoder.h
new file mode 100644 (file)
index 0000000..6203f0b
--- /dev/null
@@ -0,0 +1,140 @@
+
+#ifndef _GST_H264_ENCODER_H_
+#define _GST_H264_ENCODER_H_
+
+#include <stdio.h>
+#include <stdint.h>
+
+#include "gst/gstbuffer.h"
+#include "gst/vaapi/gstvaapidisplay.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#ifdef DEBUG
+#include <assert.h>
+#define H264_ASSERT(exp) assert(exp)
+#else
+#define H264_ASSERT(exp)
+#endif
+
+#define H264_NO_ERROR       0
+#define H264_MEM_ERR       -1
+#define H264_DISPLAY_ERR   -2
+#define H264_CONFIG_ERR    -3
+#define H264_CONTEXT_ERR    -3
+#define H264_STATE_ERR     -4
+#define H264_ENC_RES_ERR   -5
+#define H264_PICTURE_ERR   -6
+#define H264_SURFACE_ERR   -7
+#define H264_QUERY_STATUS_ERR -8
+#define H264_DATA_NOT_READY   -9
+#define H264_DATA_ERR      -10
+
+
+
+#define H264_LOG_ERROR(...) fprintf(stdout, ## __VA_ARGS__)
+#define H264_LOG_DEBUG(...) fprintf(stdout, ## __VA_ARGS__)
+#define H264_LOG_INFO(...)  fprintf(stdout, ## __VA_ARGS__)
+
+
+typedef int                                 H264Status;
+typedef void*                               VADisplay;
+typedef struct _GstH264Encoder              GstH264Encoder;
+typedef struct _GstH264EncoderPrivate       GstH264EncoderPrivate;
+typedef struct _GstH264EncoderClass         GstH264EncoderClass;
+
+
+#define GST_TYPE_H264_ENCODER             (gst_h264_encoder_get_type())
+#define GST_IS_H264_ENCODER(obj)          (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264_ENCODER))
+#define GST_IS_H264_ENCODER_CLASS(klass)  (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264_ENCODER))
+#define GST_H264_ENCODER_GET_CLASS(obj)   (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264_ENCODER, GstH264EncoderClass))
+#define GST_H264_ENCODER(obj)             (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_ENCODER, GstH264Encoder))
+#define GST_H264_ENCODER_CLASS(klass)     (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264_ENCODER, GstH264EncoderClass))
+#define GST_H264_ENCODER_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj),GST_TYPE_H264_ENCODER,GstH264EncoderPrivate))
+
+typedef enum {
+  H264_ENC_NULL,
+  H264_ENC_INIT,
+  H264_ENC_OPENED,
+  H264_ENC_ENCODING,
+} H264_Encode_State;
+
+typedef enum {
+  H264_PROFILE_BASELINE = 66,
+  H264_PROFILE_MAIN     = 77,
+  H264_PROFILE_EXTENDED = 88,
+  H264_PROFILE_HIGH    = 100,
+  H264_PROFILE_HIGH10  = 110,
+  H264_PROFILE_HIGH422 = 122,
+  H264_PROFILE_HIGH444 = 144,
+  H264_PROFILE_HIGH444_PREDICTIVE = 244,
+} H264_Profile;
+
+#define H264_DEFAULT_INIT_QP 26
+
+struct _GstH264Encoder {
+  GObject parent;   /*based on gobject*/
+
+  uint32_t profile;
+  uint32_t level;
+  uint32_t width;
+  uint32_t height;
+  uint32_t frame_rate;
+  uint32_t bitrate;
+  uint32_t intra_period;
+  int32_t  init_qp;  /*default 26*/
+  /* private data; */
+};
+
+struct _GstH264EncoderClass {
+    GObjectClass parent_class;
+};
+
+
+GType    gst_h264_encoder_get_type(void);
+
+GstH264Encoder *gst_h264_encoder_new(void);
+static inline void gst_h264_encoder_unref (GstH264Encoder * encoder)
+{
+  g_object_unref (encoder);
+}
+
+void     gst_h264_encoder_set_input_format(GstH264Encoder* encoder, uint32_t format);
+void     gst_h264_encoder_set_es_flag(GstH264Encoder* encoder, gboolean es);
+gboolean gst_h264_encoder_set_display(GstH264Encoder* encoder, GstVaapiDisplay *display);
+GstVaapiDisplay *gst_h264_encoder_get_display(GstH264Encoder* encoder);
+
+
+H264Status gst_h264_encoder_initialize(GstH264Encoder* encoder);
+H264Status gst_h264_encoder_uninitialize(GstH264Encoder* encoder);
+
+/*
+ set attributes here
+*/
+void       gst_h264_encoder_set_default_values(GstH264Encoder* encoder);
+
+
+/**/
+H264Status gst_h264_encoder_open(GstH264Encoder* encoder);
+H264Status gst_h264_encoder_close(GstH264Encoder* encoder);
+
+
+H264Status gst_h264_encoder_encode(GstH264Encoder* encoder, GstBuffer *raw_pic, GList **coded_pics);
+H264Status gst_h264_encoder_flush(GstH264Encoder* encoder, GList *coded_pics);
+
+H264_Encode_State gst_h264_encoder_get_state(GstH264Encoder* encoder);
+
+/*other functions*/
+char      *h264_dump_bytes(const uint8_t *buf, uint32_t num);
+H264Status gst_h264_encoder_get_avcC_codec_data(GstH264Encoder* encoder, GstBuffer **buffer);
+H264Status gst_h264_encoder_get_nal_codec_data(GstH264Encoder* encoder, GstBuffer **buffer);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /*_GST_H264_ENCODER_H_ */
+