if (!vaapi_check_status(status, "vaCreateConfig()"))
goto end;
+ VASurfaceID *surface_ids = surfaces->data;
+ int surface_num = surfaces->len;
+
+ if (VAEntrypointEncSlice == va_entrypoint) {
+ surface_ids = NULL;
+ surface_num = 0;
+ }
+
GST_VAAPI_DISPLAY_LOCK(display);
status = vaCreateContext(
GST_VAAPI_DISPLAY_VADISPLAY(display),
priv->config_id,
priv->width, priv->height,
VA_PROGRESSIVE,
- (VASurfaceID *)surfaces->data, surfaces->len,
+ (VASurfaceID *)surface_ids, surface_num,
&context_id
);
GST_VAAPI_DISPLAY_UNLOCK(display);
-SUBDIRS = vaapi
+SUBDIRS = vaapi vaapiencode
# Extra clean files so that maintainer-clean removes *everything*
MAINTAINERCLEANFILES = Makefile.in
--- /dev/null
+plugin_LTLIBRARIES = libgstvaapiencode.la
+
+libgstvaapi_CFLAGS = \
+ $(LIBVA_CFLAGS) \
+ -I$(top_srcdir)/gst-libs
+
+libgstvaapi_LIBS = \
+ $(top_builddir)/gst-libs/gst/vaapi/libgstvaapi-x11-$(GST_MAJORMINOR).la
+
+libgstvaapiencode_la_SOURCES = \
+ gstvaapiencoder.c \
+ gstvaapibaseencoder.c \
+ gstvaapih264encoder.c \
+ gstvaapih263encoder.c \
+ gstvaapimpeg4encoder.c \
+ gstvaapiencode.c \
+ gstvaapih264encode.c \
+ gstvaapih263encode.c \
+ gstvaapimpeg4encode.c \
+ $(NULL)
+
+noinst_HEADERS = \
+ gstvaapiencoder.h \
+ gstvaapibaseencoder.h \
+ gstvaapih264encoder.h \
+ gstvaapih263encoder.h \
+ gstvaapimpeg4encoder.h \
+ gstvaapiencode.h \
+ gstvaapih264encode.h \
+ gstvaapih263encode.h \
+ gstvaapimpeg4encode.h \
+ $(NULL)
+
+libgstvaapiencode_la_CFLAGS = \
+ $(libgstvaapi_CFLAGS) \
+ $(GST_CFLAGS) \
+ $(GST_BASE_CFLAGS) \
+ $(GST_VIDEO_CFLAGS) \
+ $(GST_PLUGINS_BASE_CFLAGS)
+
+libgstvaapiencode_la_LIBADD = \
+ $(libgstvaapi_LIBS) \
+ $(GST_LIBS) \
+ $(GST_BASE_LIBS) \
+ $(GST_VIDEO_LIBS) \
+ $(GST_PLUGINS_BASE_LIBS)
+
+libgstvaapiencode_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstvaapiencode_la_LIBTOOLFLAGS = --tag=disable-static
+
+# Extra clean files so that maintainer-clean removes *everything*
+MAINTAINERCLEANFILES = Makefile.in
--- /dev/null
+
+#include "gstvaapibaseencoder.h"
+
+#include <string.h>
+#include <stdlib.h>
+#include <glib.h>
+#include <X11/Xlib.h>
+
+#include <va/va.h>
+#include "va/va_x11.h"
+
+#include "gst/gstclock.h"
+#include "gst/gstvalue.h"
+
+#include "gst/vaapi/gstvaapiobject.h"
+#include "gst/vaapi/gstvaapiobject_priv.h"
+#include "gst/vaapi/gstvaapicontext.h"
+#include "gst/vaapi/gstvaapisurface.h"
+#include "gst/vaapi/gstvaapisurfacepool.h"
+#include "gst/vaapi/gstvaapivideobuffer.h"
+#include "gst/vaapi/gstvaapidisplay_priv.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_base_encoder_debug);
+#define GST_CAT_DEFAULT gst_vaapi_base_encoder_debug
+
+#define VA_INVALID_PROFILE 0xffffffff
+#define DEFAULT_VA_CODEDBUF_NUM 4
+
+#define GST_TYPE_ENCODER_SHARED_BUFFER (gst_base_encode_buffer_get_type())
+
+static GstMiniObjectClass *gst_encoder_share_buffer_parent_class = NULL;
+
+typedef struct _GstEncoderShareBuffer GstEncoderShareBuffer;
+struct _GstEncoderShareBuffer {
+ GstBuffer buffer;
+ VABufferID *coded_id;
+ GstVaapiBaseEncoder *encoder;
+};
+
+
+struct _GstVaapiBaseEncoderPrivate {
+ guint32 format; /*NV12, I420,*/
+ VAProfile profile;
+ /*total encoded frames*/
+ guint32 frame_count;
+ gboolean frame_notify_flag;
+
+ VABufferID *coded_bufs;
+ guint32 coded_buf_num;
+ GMutex *code_buffer_lock;
+ GCond *code_buffer_cond;
+ GQueue *available_code_buffers;
+};
+
+G_DEFINE_TYPE(GstVaapiBaseEncoder, gst_vaapi_base_encoder, GST_TYPE_VAAPI_ENCODER);
+
+static EncoderStatus gst_vaapi_base_encoder_initialize_default(
+ GstVaapiEncoder* encoder, GstVaapiDisplay *display);
+static EncoderStatus gst_vaapi_base_encoder_uninitialize_default(
+ GstVaapiEncoder* encoder, GstVaapiDisplay *display);
+static EncoderStatus gst_vaapi_base_encoder_open_default(GstVaapiEncoder* encoder,
+ GstVaapiDisplay *display, void* private_data,
+ GstVaapiContext **context);
+static EncoderStatus gst_vaapi_base_encoder_close_default(GstVaapiEncoder* encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+static EncoderStatus gst_vaapi_base_encoder_encode_default(GstVaapiEncoder* encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context,
+ GstBuffer *raw_pic, GList **coded_pics);
+static EncoderStatus gst_vaapi_base_encoder_flush_default(GstVaapiEncoder* encoder,
+ GstVaapiDisplay *display,
+ GstVaapiContext *context,
+ GList **coded_pics);
+static GstBuffer *gst_vaapi_base_encoder_copy_buffer_default(GstVaapiBaseEncoder *encoder,
+ guint8 *frame, guint32 frame_size, VABufferID *coded_buf);
+
+
+static gboolean base_encoder_alloc_coded_buffers(GstVaapiBaseEncoder *base_encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+static EncoderStatus base_encoder_release_coded_buffers(GstVaapiBaseEncoder *base_encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+static EncoderStatus base_put_raw_buffer_to_surface(GstVaapiBaseEncoder *base_encoder,
+ GstVaapiDisplay *display, GstBuffer *raw_pic, GstVaapiSurface *surface);
+
+static EncoderStatus base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
+ GstVaapiDisplay *display, GstVaapiSurface *buffer_surface,
+ gboolean is_key, GstClockTime timestamp,
+ GstClockTime duration, VABufferID *coded_buf,
+ GList **coded_pics);
+
+static VABufferID *pop_available_coded_buffer(GstVaapiBaseEncoderPrivate *base_prv);
+static gboolean push_available_coded_buffer(
+ GstVaapiBaseEncoderPrivate *base_prv, VABufferID *buf);
+
+static void
+gst_vaapi_base_encoder_finalize(GObject *object)
+{
+ /*free private buffers*/
+ GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(object);
+
+ if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
+ gst_vaapi_encoder_uninitialize(encoder);
+ }
+
+ g_mutex_free(base_prv->code_buffer_lock);
+ g_cond_free(base_prv->code_buffer_cond);
+ if (base_prv->available_code_buffers) {
+ g_queue_free(base_prv->available_code_buffers);
+ base_prv->available_code_buffers = NULL;
+ }
+
+ G_OBJECT_CLASS(gst_vaapi_base_encoder_parent_class)->finalize(object);
+}
+
+
+static void
+gst_vaapi_base_encoder_class_init(GstVaapiBaseEncoderClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiEncoderClass * const encoder_class = GST_VAAPI_ENCODER_CLASS(klass);
+ g_type_class_add_private(klass, sizeof(GstVaapiBaseEncoderPrivate));
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_base_encoder_debug, "gst_vaapi_base_encoder", 0,
+ "gst_vaapi_base_encoder element");
+
+ object_class->finalize = gst_vaapi_base_encoder_finalize;
+
+ encoder_class->initialize = gst_vaapi_base_encoder_initialize_default;
+ encoder_class->uninitialize = gst_vaapi_base_encoder_uninitialize_default;
+ encoder_class->open = gst_vaapi_base_encoder_open_default;
+ encoder_class->close = gst_vaapi_base_encoder_close_default;
+ encoder_class->encode = gst_vaapi_base_encoder_encode_default;
+ encoder_class->flush = gst_vaapi_base_encoder_flush_default;
+ encoder_class->get_codec_data = NULL;
+
+ /* user defined functions*/
+ klass->validate_attributes = NULL;
+ klass->pre_alloc_resource = NULL;
+ klass->release_resource = NULL;
+ klass->prepare_frame = NULL;
+ klass->notify_frame = NULL;
+ klass->copy_coded_frame = NULL;
+
+ /*
+ object_class->set_property = gst_vaapi_base_encoder_set_property;
+ object_class->get_property = gst_vaapi_base_encoder_get_property;
+ */
+}
+
+static void
+gst_encoder_share_buffer_finalize (GstEncoderShareBuffer *base_buffer)
+{
+ GstVaapiBaseEncoder *encoder = NULL;
+ VABufferID* coded_id = NULL;
+ GstVaapiDisplay *display = NULL;
+ GstVaapiBaseEncoderPrivate *encoder_prv = NULL;
+
+ gboolean is_locked = FALSE;
+
+ encoder = base_buffer->encoder;
+ coded_id = base_buffer->coded_id;
+ display = ENCODER_DISPLAY(encoder);
+ encoder_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+
+ ENCODER_ASSERT(display);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+
+ ENCODER_ASSERT(encoder_prv);
+ ENCODER_ASSERT(coded_id && VA_INVALID_ID!= *coded_id);
+
+ /*if (--(*base_buffer->ref_coded_id) == 0) */
+ {
+ /*g_free(base_buffer->ref_coded_id);*/
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ vaUnmapBuffer(va_dpy, *coded_id);
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ push_available_coded_buffer(encoder_prv, coded_id);
+ }
+
+ if (GST_MINI_OBJECT_CLASS(gst_encoder_share_buffer_parent_class)->finalize) {
+ GST_MINI_OBJECT_CLASS(gst_encoder_share_buffer_parent_class)->finalize(GST_MINI_OBJECT(base_buffer));
+ }
+}
+
+static void
+gst_encode_share_buffer_class_init (gpointer g_class, gpointer class_data)
+{
+ GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS(g_class);
+
+ gst_encoder_share_buffer_parent_class = g_type_class_peek_parent(g_class);
+ ENCODER_ASSERT(gst_encoder_share_buffer_parent_class);
+
+ mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+ gst_encoder_share_buffer_finalize;
+}
+
+
+static GType
+gst_base_encode_buffer_get_type (void)
+{
+ static GType s_base_encode_buffer_type = 0;
+ if (G_UNLIKELY (s_base_encode_buffer_type == 0)) {
+ static const GTypeInfo s_base_encode_buffer_info = {
+ sizeof(GstBufferClass),
+ NULL,
+ NULL,
+ gst_encode_share_buffer_class_init,
+ NULL,
+ NULL,
+ sizeof(GstEncoderShareBuffer),
+ 0,
+ NULL,
+ NULL
+ };
+ s_base_encode_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
+ "GstEncoderShareBuffer", &s_base_encode_buffer_info, 0);
+ }
+ return s_base_encode_buffer_type;
+}
+
+static GstEncoderShareBuffer *
+gst_base_encode_share_buffer_new(GstVaapiBaseEncoder *encoder, VABufferID *coded_id)
+{
+ GstEncoderShareBuffer *buf = (GstEncoderShareBuffer*)gst_mini_object_new(GST_TYPE_ENCODER_SHARED_BUFFER);
+ buf->coded_id = coded_id;
+ buf->encoder = encoder;
+ return buf;
+}
+
+
+static void
+gst_vaapi_base_encoder_init(GstVaapiBaseEncoder *encoder)
+{
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ ENCODER_ASSERT(base_prv);
+
+ /* init private values*/
+ base_prv->format = 0;
+ base_prv->profile= VA_INVALID_PROFILE;
+ base_prv->frame_count = 0;
+ base_prv->frame_notify_flag = FALSE;
+
+ base_prv->coded_bufs = NULL;
+ base_prv->coded_buf_num = DEFAULT_VA_CODEDBUF_NUM;
+ base_prv->code_buffer_lock = g_mutex_new();
+ base_prv->code_buffer_cond = g_cond_new();
+ base_prv->available_code_buffers = g_queue_new();
+}
+
+void
+gst_vaapi_base_encoder_set_frame_notify(GstVaapiBaseEncoder *encoder, gboolean flag)
+{
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+ base_prv->frame_notify_flag = flag;
+}
+
+gboolean
+gst_vaapi_base_encoder_set_va_profile(GstVaapiBaseEncoder *encoder, guint profile)
+{
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+ base_prv->profile = profile;
+ return TRUE;
+}
+
+void
+gst_vaapi_base_encoder_set_input_format(GstVaapiBaseEncoder* encoder, guint32 format)
+{
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+ base_prv->format = format;
+}
+
+EncoderStatus
+gst_vaapi_base_encoder_initialize_default(GstVaapiEncoder* encoder, GstVaapiDisplay *display)
+{
+ return ENCODER_NO_ERROR;
+}
+
+EncoderStatus
+gst_vaapi_base_encoder_uninitialize_default(GstVaapiEncoder* encoder, GstVaapiDisplay *display)
+{
+ return ENCODER_NO_ERROR;
+
+}
+
+gboolean
+default_validate_encoder_parameters(GstVaapiBaseEncoder *encoder)
+{
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+ if (!ENCODER_WIDTH(encoder) || !ENCODER_HEIGHT(encoder) || !ENCODER_FPS(encoder)) {
+ return FALSE;
+ }
+ return TRUE;
+}
+
+EncoderStatus
+gst_vaapi_base_encoder_open_default(GstVaapiEncoder* encoder, GstVaapiDisplay *display, void* private_data, GstVaapiContext **context)
+{
+ GstVaapiBaseEncoder* base_encoder = GST_VAAPI_BASE_ENCODER(encoder);
+ GstVaapiBaseEncoderClass *base_class = GST_VAAPI_BASE_ENCODER_GET_CLASS(encoder);
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+
+ GstVaapiSurfacePool *surfaces_pool = private_data;
+ GstVaapiContext *out_context = NULL;
+ VAProfile va_profile = -1;
+
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ gboolean check_attri_ret = TRUE;
+ /*check and set default values*/
+ if (base_class->validate_attributes) {
+ check_attri_ret = base_class->validate_attributes(base_encoder);
+ } else {
+ check_attri_ret = default_validate_encoder_parameters(base_encoder);
+ }
+ ENCODER_CHECK_STATUS(check_attri_ret, ENCODER_PARAMETER_ERR, "vaapi encoder paramerter error.\n");
+ ENCODER_CHECK_STATUS(VA_INVALID_PROFILE != base_prv->profile, ENCODER_PROFILE_ERR, "vaapi encoder profile not set.\n");
+
+ ENCODER_ASSERT(ENCODER_DISPLAY(encoder));
+
+#ifdef _MRST_
+ out_context = g_object_new(
+ GST_VAAPI_TYPE_CONTEXT,
+ "display", display,
+ "id", GST_VAAPI_ID(VA_INVALID_ID),
+ "entrypoint", gst_vaapi_entrypoint(VAEntrypointEncSlice),
+ "width", ENCODER_WIDTH(encoder),
+ "height", ENCODER_HEIGHT(encoder),
+ NULL
+ );
+ if (surfaces_pool) {
+ gst_vaapi_context_set_surface_pool(out_context, surfaces_pool);
+ }
+ g_object_set(out_context, "profile", gst_vaapi_profile(base_prv->profile), NULL);
+
+#else
+ VAAPI_UNUSED_ARG(surfaces_pool);
+ out_context = gst_vaapi_context_new(display,
+ gst_vaapi_profile(base_prv->profile),
+ gst_vaapi_entrypoint(VAEntrypointEncSlice),
+ ENCODER_WIDTH(encoder),
+ ENCODER_HEIGHT(encoder));
+#endif
+ ENCODER_CHECK_STATUS(out_context, ENCODER_CONTEXT_ERR, "gst_vaapi_context_new failed.\n");
+ ENCODER_CHECK_STATUS(VA_INVALID_ID != GST_VAAPI_OBJECT_ID(out_context), ENCODER_CONTEXT_ERR, "gst_vaapi_context_new failed.\n");
+
+ if (base_class->pre_alloc_resource) {
+ ENCODER_CHECK_STATUS(base_class->pre_alloc_resource(base_encoder, display, out_context),
+ ENCODER_MEM_ERR, "encoder <pre_alloc_resource> failed.\n");
+ }
+ ENCODER_CHECK_STATUS(
+ base_encoder_alloc_coded_buffers(base_encoder, display, out_context),
+ ENCODER_MEM_ERR,
+ "encoder <base_encoder_alloc_coded_buffers> failed.\n"
+ );
+ *context = out_context;
+ return ENCODER_NO_ERROR;
+
+end:
+ // clear resources
+ if (ENCODER_NO_ERROR != ret) {
+ gst_vaapi_base_encoder_close_default(encoder, display, out_context);
+ if (out_context) {
+ g_object_unref(out_context);
+ }
+ }
+ return ret;
+}
+
+EncoderStatus
+gst_vaapi_base_encoder_close_default(GstVaapiEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
+ GstVaapiBaseEncoder* base_encoder = GST_VAAPI_BASE_ENCODER(encoder);
+ GstVaapiBaseEncoderClass *base_class = GST_VAAPI_BASE_ENCODER_GET_CLASS(encoder);
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(encoder);
+ EncoderStatus ret = ENCODER_NO_ERROR;
+
+ /* release buffers first */
+ if (base_class->release_resource) {
+ base_class->release_resource(base_encoder, display, context);
+ }
+ base_encoder_release_coded_buffers(base_encoder, display, context);
+ base_prv->frame_count = 0;
+
+ return ret;
+}
+
+static gboolean
+base_encoder_alloc_coded_buffers(GstVaapiBaseEncoder *base_encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
+
+ ENCODER_ASSERT(display && context);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ gboolean is_locked = FALSE;
+ guint i = 0;
+ gboolean ret = TRUE;
+ guint32 buffer_size = (ENCODER_WIDTH(base_encoder) * ENCODER_HEIGHT(base_encoder) * 400) / (16*16);
+
+ ENCODER_ASSERT(base_prv->available_code_buffers);
+ ENCODER_ASSERT(!base_prv->coded_bufs);
+
+ base_prv->coded_bufs = (VABufferID*)g_malloc0(base_prv->coded_buf_num * sizeof(base_prv->coded_bufs[0]));
+
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ for (i = 0; i < base_prv->coded_buf_num; i++) {
+ va_status = vaCreateBuffer(va_dpy, context_id,VAEncCodedBufferType,
+ buffer_size, 1, NULL, &base_prv->coded_bufs[i]);
+ if (VA_STATUS_SUCCESS != va_status)
+ break;
+ }
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, FALSE, "create coded buffer failed.\n");
+
+ /* init queue available_code_buffers */
+ g_mutex_lock(base_prv->code_buffer_lock);
+ for (i = 0; i < base_prv->coded_buf_num; i++) {
+ g_queue_push_head(base_prv->available_code_buffers, &base_prv->coded_bufs[i]);
+ }
+ g_cond_signal(base_prv->code_buffer_cond);
+ g_mutex_unlock(base_prv->code_buffer_lock);
+
+end:
+ return ret;
+
+}
+
+static EncoderStatus
+base_encoder_release_coded_buffers(GstVaapiBaseEncoder *base_encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
+ guint32 available_buf_count = base_prv->coded_buf_num;
+ guint32 i;
+ gboolean is_locked = FALSE;
+
+ ENCODER_ASSERT(display);
+ ENCODER_ASSERT(context);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+
+ /* wait clear all available coded buffers*/
+ g_mutex_lock(base_prv->code_buffer_lock);
+ while (available_buf_count) {
+ if (g_queue_is_empty(base_prv->available_code_buffers)) {
+ g_cond_wait(base_prv->code_buffer_cond, base_prv->code_buffer_lock);
+ } else {
+ g_queue_pop_head(base_prv->available_code_buffers);
+ available_buf_count--;
+ }
+ }
+ g_mutex_unlock(base_prv->code_buffer_lock);
+
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ for (i = 0; i < base_prv->coded_buf_num; i++) {
+ va_status = vaDestroyBuffer(va_dpy, base_prv->coded_bufs[i]);
+ }
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+
+ return ENCODER_NO_ERROR;
+}
+
+EncoderStatus
+gst_vaapi_base_encoder_encode_default(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstBuffer *raw_pic, GList **coded_pics)
+{
+ GstVaapiBaseEncoder* base_encoder = GST_VAAPI_BASE_ENCODER(encoder);
+ GstVaapiBaseEncoderClass *base_class = GST_VAAPI_BASE_ENCODER_GET_CLASS(encoder);
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
+
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ gboolean is_key = FALSE;
+ VABufferID* coded_buf = NULL;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VASurfaceID buffer_surface_id = VA_INVALID_SURFACE;
+ GstVaapiSurface *buffer_surface = NULL;
+
+ gboolean is_locked = FALSE;
+
+ ENCODER_ASSERT(display && context);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+ GstVaapiSurface *new_surface = NULL;
+
+ /* load picture to surface */
+ if (GST_VAAPI_IS_VIDEO_BUFFER(raw_pic)) {
+ buffer_surface = gst_vaapi_video_buffer_get_surface(GST_VAAPI_VIDEO_BUFFER(raw_pic));
+ } else {
+ new_surface = gst_vaapi_context_get_surface(context);
+ buffer_surface = new_surface;
+ ENCODER_CHECK_STATUS(buffer_surface, ENCODER_SURFACE_ERR, "base_pop_free_surface failed.\n");
+
+ /* put picture to new surface */
+ va_status = base_put_raw_buffer_to_surface(base_encoder, display, raw_pic, buffer_surface);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "va put buffer to surface failed.\n");
+ }
+ buffer_surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
+ ENCODER_CHECK_STATUS(buffer_surface_id != VA_INVALID_SURFACE, ENCODER_SURFACE_ERR, "surface id == VA_INVALID_SURFACE.\n");
+
+ /* begin surface*/
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ va_status = vaBeginPicture(va_dpy, context_id, buffer_surface_id);
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaBeginPicture error.\n");
+
+ /*get valid coded buffer*/
+ coded_buf = pop_available_coded_buffer(base_prv);
+ ENCODER_CHECK_STATUS(coded_buf, ENCODER_ENC_RES_ERR, "dequeue_available_coded_buffer error.\n");
+
+ /* prepare frame*/
+ ret = base_class->prepare_frame(base_encoder, display, context,
+ buffer_surface, base_prv->frame_count,
+ *coded_buf, &is_key);
+ /* prepare failed, push back */
+ if (ENCODER_NO_ERROR != ret) {
+ push_available_coded_buffer(base_prv, coded_buf);
+ }
+ ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ENCODER_PICTURE_ERR, "base_prepare_encoding failed.\n");
+
+ /* end picture */
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ va_status = vaEndPicture(va_dpy, context_id);
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaEndPicture error.\n");
+
+ /*query surface result*/
+ ret = base_query_encoding_status(base_encoder, display, buffer_surface,
+ is_key, GST_BUFFER_TIMESTAMP(raw_pic), GST_BUFFER_DURATION(raw_pic), coded_buf, coded_pics);
+ if (ENCODER_NO_ERROR != ret) {
+ goto end;
+ }
+
+ base_prv->frame_count++;
+
+end:
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ if (new_surface) {
+ gst_vaapi_context_put_surface(context, new_surface);
+ }
+ return ret;
+}
+
+static VABufferID *
+pop_available_coded_buffer(GstVaapiBaseEncoderPrivate *base_prv)
+{
+ VABufferID *coded_buf = NULL;
+ gboolean ret = TRUE;
+
+ g_mutex_lock(base_prv->code_buffer_lock);
+
+ ENCODER_CHECK_STATUS(base_prv->available_code_buffers, FALSE, "coded buffer not found");
+ while (g_queue_is_empty(base_prv->available_code_buffers)) {
+ g_cond_wait(base_prv->code_buffer_cond, base_prv->code_buffer_lock);
+ }
+ coded_buf = (VABufferID*)g_queue_pop_head (base_prv->available_code_buffers);
+
+end:
+ g_mutex_unlock(base_prv->code_buffer_lock);
+ return coded_buf;
+}
+
+static gboolean
+push_available_coded_buffer(GstVaapiBaseEncoderPrivate *base_prv, VABufferID *buf)
+{
+ g_mutex_lock(base_prv->code_buffer_lock);
+ g_queue_push_head(base_prv->available_code_buffers, buf);
+ g_cond_signal(base_prv->code_buffer_cond);
+ g_mutex_unlock(base_prv->code_buffer_lock);
+ return TRUE;
+}
+
+static EncoderStatus
+base_put_raw_buffer_to_surface(GstVaapiBaseEncoder *base_encoder,
+ GstVaapiDisplay *display,
+ GstBuffer *raw_pic,
+ GstVaapiSurface *surface)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VAImage surface_image;
+ VADisplay va_dpy;
+ GstVaapiImage *image;
+ GstVaapiImageFormat image_format;
+ guint8 *y_src = NULL, *u_src = NULL, *v_src = NULL;
+ guint8 *y_dst = NULL, *u_dst = NULL, *v_dst = NULL;
+ int y_size = 0, u_size = 0;
+ int row = 0, col = 0;
+ guint32 plane_count = 0;
+ guint32 image_width = 0, image_height = 0;
+ guint32 pitchy = 0, pitchu = 0, pitchv = 0;
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
+
+ ENCODER_ASSERT(display);
+ va_dpy = gst_vaapi_display_get_display(display);
+ /*map image*/
+ image = gst_vaapi_surface_derive_image(surface);
+ gst_vaapi_image_map(image);
+
+ image_format = gst_vaapi_image_get_format(image);
+ image_width = gst_vaapi_image_get_width(image);
+ image_height = gst_vaapi_image_get_height(image);
+
+ /* copy buffer to surface */
+ ENCODER_ASSERT(GST_BUFFER_SIZE(raw_pic) >= y_size + (y_size>>1));
+
+ y_size = ENCODER_WIDTH(base_encoder) * ENCODER_HEIGHT(base_encoder);
+ u_size = ((ENCODER_WIDTH(base_encoder)+1) >> 1) * ((ENCODER_HEIGHT(base_encoder)+1) >> 1);
+
+ y_src = GST_BUFFER_DATA(raw_pic);
+ u_src = y_src + y_size;
+ v_src = u_src + u_size;
+
+ plane_count = gst_vaapi_image_get_plane_count(image);
+ y_dst = gst_vaapi_image_get_plane(image, 0);
+ u_dst = gst_vaapi_image_get_plane(image, 1);
+ pitchy = gst_vaapi_image_get_pitch(image, 0);
+ pitchu = gst_vaapi_image_get_pitch(image, 1);
+
+ if (plane_count > 2) {
+ v_dst = gst_vaapi_image_get_plane(image, 2);
+ pitchv = gst_vaapi_image_get_pitch(image, 2);
+ }
+
+ /* copy from avcenc.c*/
+ /* Y plane */
+ for (row = 0; row < image_height; row++) {
+ memcpy(y_dst, y_src, image_width);
+ y_dst += pitchy;
+ y_src += ENCODER_WIDTH(base_encoder);
+ }
+
+ if (GST_VAAPI_IMAGE_NV12 == image_format) { /* UV plane */
+ if (GST_VAAPI_IMAGE_I420 == base_prv->format) {
+ for (row = 0; row < image_height / 2; row++) {
+ for (col = 0; col < image_width / 2; col++) {
+ u_dst[col * 2] = u_src[col];
+ u_dst[col * 2 + 1] = v_src[col];
+ }
+
+ u_dst += pitchu;
+ u_src += (ENCODER_WIDTH(base_encoder)>>1);
+ v_src += (ENCODER_WIDTH(base_encoder)>>1);
+ }
+ } else if (GST_VAAPI_IMAGE_NV12 == base_prv->format){
+ for (row = 0; row < image_height / 2; row++) {
+ memcpy(u_dst, u_src, image_width);
+ u_src += ENCODER_WIDTH(base_encoder);
+ u_dst += pitchu;
+ }
+ } else {
+ ENCODER_ASSERT(0);
+ }
+ } else {
+ /* FIXME: fix this later */
+ ENCODER_ASSERT(0);
+ }
+
+ /*unmap image*/
+ g_object_unref(image);
+ end:
+ return ret;
+}
+
+static EncoderStatus
+base_query_encoding_status(GstVaapiBaseEncoder *base_encoder,
+ GstVaapiDisplay *display,
+ GstVaapiSurface *buffer_surface,
+ gboolean is_key,
+ GstClockTime timestamp,
+ GstClockTime duration,
+ VABufferID *coded_buf,
+ GList **coded_pics)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ VASurfaceStatus surface_status = 0;
+ VACodedBufferSegment *buf_list = NULL;
+ GstBuffer* ret_buffer = NULL;
+ gboolean has_coded_data = FALSE;
+ gboolean is_locked = FALSE;
+ GstVaapiBaseEncoderClass *base_class = GST_VAAPI_BASE_ENCODER_GET_CLASS(base_encoder);
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
+
+ ENCODER_ASSERT(display && context);
+ VASurfaceID surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+
+ ENCODER_ASSERT(coded_pics && *coded_pics == NULL);
+
+ /* lock display */
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+
+ va_status = vaSyncSurface(va_dpy, surface_id);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaSyncSurface failed.\n");
+
+ va_status = vaQuerySurfaceStatus(va_dpy, surface_id, &surface_status);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaQuerySurfaceStatus failed.\n");
+ if (VASurfaceSkipped&surface_status) {
+ ENCODER_LOG_ERROR("frame skipped, dts:%" GST_TIME_FORMAT ".\n", GST_TIME_ARGS(timestamp));
+ }
+
+ va_status = vaMapBuffer(va_dpy, *coded_buf, (void **)(&buf_list));
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaMapBuffer failed.\n");
+
+ /*unlock display*/
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+
+ while (buf_list != NULL) {
+ if (base_prv->frame_notify_flag && base_class->notify_frame) {
+ base_class->notify_frame(base_encoder, buf_list->buf, buf_list->size);
+ }
+
+ if (base_class->copy_coded_frame) {
+ ret_buffer = base_class->copy_coded_frame(
+ base_encoder, buf_list->buf,
+ buf_list->size, coded_buf);
+ } else {
+ ret_buffer = gst_vaapi_base_encoder_copy_buffer_default(
+ base_encoder, buf_list->buf,
+ buf_list->size, coded_buf);
+ }
+ GST_BUFFER_TIMESTAMP(ret_buffer) = timestamp;
+ GST_BUFFER_DURATION(ret_buffer) = duration;
+ if (!is_key) {
+ GST_BUFFER_FLAG_SET(ret_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ *coded_pics = g_list_append(*coded_pics, ret_buffer);
+ buf_list = (VACodedBufferSegment*)buf_list->next;
+ ENCODER_ASSERT(NULL == buf_list);
+ has_coded_data = TRUE;
+ }
+
+#if SHARE_CODED_BUF
+ if (!has_coded_data)
+#endif
+ { // if non-related, push back to available_code_buffers
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ vaUnmapBuffer(va_dpy, *coded_buf);
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ push_available_coded_buffer(base_prv, coded_buf);
+ }
+
+ return ENCODER_NO_ERROR;
+
+end:
+ /*unlock display*/
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ return ret;
+}
+
+static GstBuffer *
+gst_vaapi_base_encoder_copy_buffer_default(GstVaapiBaseEncoder *encoder,
+ guint8 *frame,
+ guint32 frame_size,
+ VABufferID *coded_buf)
+{
+ GstBuffer *ret_buffer = NULL;
+#if SHARE_CODED_BUF
+ ret_buffer = gst_base_encode_share_buffer_new(encoder, coded_buf);
+ ENCODER_ASSERT(ret_buffer);
+ GST_BUFFER_MALLOCDATA(ret_buffer) = NULL;
+ GST_BUFFER_DATA(ret_buffer) = frame;
+ GST_BUFFER_SIZE(ret_buffer) = frame_size;
+#else
+ ret_buffer = gst_buffer_new_and_alloc(frame_size);
+ memcpy(GST_BUFFER_DATA(ret_buffer),frame, frame_size);
+#endif
+ return ret_buffer;
+}
+
+EncoderStatus
+gst_vaapi_base_encoder_flush_default(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GList **coded_pics)
+{
+ GstVaapiBaseEncoder* base_encoder = GST_VAAPI_BASE_ENCODER(encoder);
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiBaseEncoderPrivate *base_prv = GST_VAAPI_BASE_ENCODER_GET_PRIVATE(base_encoder);
+
+ base_prv->frame_count = 0;
+ /*do we need destroy base_prv->seq_parameter? */
+
+ //end:
+ return ret;
+}
+
+
--- /dev/null
+#ifndef GST_VAAPI_BASE_ENCODER_H
+#define GST_VAAPI_BASE_ENCODER_H
+
+#include "gstvaapiencoder.h"
+
+G_BEGIN_DECLS
+
+typedef struct _GstVaapiBaseEncoder GstVaapiBaseEncoder;
+typedef struct _GstVaapiBaseEncoderPrivate GstVaapiBaseEncoderPrivate;
+typedef struct _GstVaapiBaseEncoderClass GstVaapiBaseEncoderClass;
+
+#define GST_TYPE_VAAPI_BASE_ENCODER (gst_vaapi_base_encoder_get_type())
+#define GST_IS_VAAPI_BASE_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VAAPI_BASE_ENCODER))
+#define GST_IS_VAAPI_BASE_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VAAPI_BASE_ENCODER))
+#define GST_VAAPI_BASE_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VAAPI_BASE_ENCODER, GstVaapiBaseEncoderClass))
+#define GST_VAAPI_BASE_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VAAPI_BASE_ENCODER, GstVaapiBaseEncoder))
+#define GST_VAAPI_BASE_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VAAPI_BASE_ENCODER, GstVaapiBaseEncoderClass))
+#define GST_VAAPI_BASE_ENCODER_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj),GST_TYPE_VAAPI_BASE_ENCODER, GstVaapiBaseEncoderPrivate))
+
+
+struct _GstVaapiBaseEncoder {
+ GstVaapiEncoder parent;
+};
+
+struct _GstVaapiBaseEncoderClass {
+ GstVaapiEncoderClass parent_class;
+
+ /* in <open> function*/
+ gboolean (*validate_attributes) (GstVaapiBaseEncoder* encoder);
+ gboolean (*pre_alloc_resource) (GstVaapiBaseEncoder *base_encoder, GstVaapiDisplay *display, GstVaapiContext *context);
+
+ /* in <close> function */
+ gboolean (*release_resource) (GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context);
+
+ EncoderStatus (*prepare_frame) (GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key);
+ void (*notify_frame) (GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size);
+ GstBuffer *(*copy_coded_frame) (GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size, VABufferID *coded_buf);
+};
+
+/*
+struct _GstVaapiBaseEncoderPrivate {
+ GstVaapiDisplay *display;
+ GstVaapiContext *context;
+ //VAAPI_Encode_State state;
+};
+*/
+
+GType gst_vaapi_base_encoder_get_type(void);
+void gst_vaapi_base_encoder_set_frame_notify(GstVaapiBaseEncoder *encoder, gboolean flag);
+gboolean gst_vaapi_base_encoder_set_va_profile(GstVaapiBaseEncoder *encoder, guint profile);
+void gst_vaapi_base_encoder_set_input_format(GstVaapiBaseEncoder* encoder, guint32 format);
+
+
+G_END_DECLS
+
+#endif
+
--- /dev/null
+#include "gstvaapiencode.h"
+
+#include <string.h>
+#include <X11/Xlib.h>
+
+#include "gst/vaapi/gstvaapivideobuffer.h"
+#include "gst/vaapi/gstvaapisurfacepool.h"
+
+#include "gstvaapih264encode.h"
+#include "gstvaapih263encode.h"
+#include "gstvaapimpeg4encode.h"
+
+/* gst_debug
+ GST_DEBUG_CATEGORY_STATIC (gst_vaapi_encode_debug)
+ #define GST_CAT_DEFAULT gst_vaapi_encode_debug
+ //class_init
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_encode_debug, "vaapiencode", 0,
+ "vaapiencode element");
+*/
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_encode_debug);
+#define GST_CAT_DEFAULT gst_vaapi_encode_debug
+
+#define GST_VAAPI_ENCODE_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VAAPI_ENCODE, GstVaapiEncodePrivate))
+
+typedef struct _GstVaapiEncodePrivate GstVaapiEncodePrivate;
+
+
+GST_BOILERPLATE(
+ GstVaapiEncode,
+ gst_vaapi_encode,
+ GstElement,
+ GST_TYPE_ELEMENT);
+
+enum {
+ PROP_0,
+};
+
+
+/*static extern*/
+static void gst_vaapi_encode_finalize(GObject *object);
+static void gst_vaapi_encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec);
+static void gst_vaapi_encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_vaapi_encode_set_caps(GstPad *sink_pad, GstCaps *caps);
+static GstCaps *gst_vaapi_encode_get_caps(GstPad *sink_pad);
+static GstFlowReturn gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf);
+static GstStateChangeReturn gst_vaapi_encode_change_state(GstElement *element, GstStateChange transition);
+static GstFlowReturn gst_vaapi_encode_buffer_alloc(GstPad * pad, guint64 offset, guint size,
+ GstCaps * caps, GstBuffer ** buf);
+
+static char* _encode_dump_caps(GstCaps *cpas);
+
+/*gst fix functions*/
+
+static void
+gst_vaapi_encode_base_init(gpointer klass)
+{
+ #if 0
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+ gst_element_class_set_details(element_class, &gst_vaapi_encode_details);
+
+ /* sink pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_vaapi_encode_sink_factory)
+ );
+
+ /* src pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_vaapi_encode_src_factory)
+ );
+ #endif
+}
+
+
+static void
+gst_vaapi_encode_class_init(GstVaapiEncodeClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+ object_class->finalize = gst_vaapi_encode_finalize;
+ object_class->set_property = gst_vaapi_encode_set_property;
+ object_class->get_property = gst_vaapi_encode_get_property;
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_encode_debug, "vaapiencode", 0,
+ "vaapiencode element");
+
+ element_class->change_state = gst_vaapi_encode_change_state;
+
+ /* Registering debug symbols for function pointers */
+ GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_change_state);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_get_caps);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_set_caps);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_chain);
+ GST_DEBUG_REGISTER_FUNCPTR (gst_vaapi_encode_buffer_alloc);
+}
+
+static void
+gst_vaapi_encode_finalize(GObject *object)
+{
+ GstVaapiEncode * const encode = GST_VAAPI_ENCODE(object);
+
+ if (encode->sinkpad_caps) {
+ gst_caps_unref(encode->sinkpad_caps);
+ encode->sinkpad_caps = NULL;
+ }
+ encode->sinkpad = NULL;
+
+ if (encode->srcpad_caps) {
+ gst_caps_unref(encode->srcpad_caps);
+ encode->srcpad_caps = NULL;
+ }
+ encode->srcpad = NULL;
+
+ if (encode->encoder) {
+ gst_vaapi_encoder_close(encode->encoder);
+ gst_vaapi_encoder_uninitialize(encode->encoder);
+ gst_vaapi_encoder_unref(encode->encoder);
+ encode->encoder = NULL;
+ }
+
+ G_OBJECT_CLASS(parent_class)->finalize(object);
+}
+
+static void
+gst_vaapi_encode_init(GstVaapiEncode *encode, GstVaapiEncodeClass *klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+ encode->sinkpad_caps = NULL;
+ encode->srcpad_caps = NULL;
+ encode->first_sink_frame = TRUE;
+ encode->first_src_frame = TRUE;
+
+ encode->encoder = NULL;
+
+ /*sink pad */
+ encode->sinkpad = gst_pad_new_from_template(
+ gst_element_class_get_pad_template(element_class, "sink"),
+ "sink"
+ );
+ gst_pad_set_getcaps_function(encode->sinkpad, gst_vaapi_encode_get_caps);
+ gst_pad_set_setcaps_function(encode->sinkpad, gst_vaapi_encode_set_caps);
+ gst_pad_set_chain_function(encode->sinkpad, gst_vaapi_encode_chain);
+ gst_pad_set_bufferalloc_function(encode->sinkpad, gst_vaapi_encode_buffer_alloc);
+ /*gst_pad_set_event_function(encode->sinkpad, gst_vaapi_encode_sink_event); */
+ /*gst_pad_use_fixed_caps(encode->sinkpad);*/
+ gst_element_add_pad(GST_ELEMENT(encode), encode->sinkpad);
+
+ /* src pad */
+ encode->srcpad = gst_pad_new_from_template(
+ gst_element_class_get_pad_template(element_class, "src"),
+ "src"
+ );
+ encode->srcpad_caps = NULL;
+
+ gst_pad_use_fixed_caps(encode->srcpad);
+ /*gst_pad_set_event_function(encode->srcpad, gst_vaapi_encode_src_event);*/
+ gst_element_add_pad(GST_ELEMENT(encode), encode->srcpad);
+}
+
+
+static void
+gst_vaapi_encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ ENCODER_ASSERT(encode->encoder);
+
+ switch (prop_id) {
+ }
+}
+
+static void
+gst_vaapi_encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ ENCODER_ASSERT(encode->encoder);
+
+ switch (prop_id) {
+ }
+}
+
+static gboolean
+gst_vaapi_encode_set_caps(GstPad *sink_pad, GstCaps *caps)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(GST_OBJECT_PARENT(sink_pad));
+ GstStructure *structure;
+ gint width = 0, height = 0;
+ gint fps_n = 0, fps_d = 0;
+ const GValue *fps_value = NULL;
+ encode->sinkpad_caps = caps;
+ gst_caps_ref(caps);
+ ENCODER_LOG_INFO("gst_vaapi_encode_set_caps,\n%s", _encode_dump_caps(caps));
+
+ structure = gst_caps_get_structure (caps, 0);
+ if (gst_structure_get_int (structure, "width", &width)) {
+ encode->encoder->width = width;
+ }
+ if (gst_structure_get_int (structure, "height", &height)) {
+ encode->encoder->height = height;
+ }
+ fps_value = gst_structure_get_value (structure, "framerate");
+ if (fps_value) {
+ fps_n = gst_value_get_fraction_numerator (fps_value);
+ fps_d = gst_value_get_fraction_denominator (fps_value);
+ encode->encoder->frame_rate = fps_n/fps_d;
+ }
+ return TRUE;
+}
+
+static GstCaps *
+gst_vaapi_encode_get_caps(GstPad *sink_pad)
+{
+ GstCaps *caps = NULL;
+ GstVaapiEncode * const encode = GST_VAAPI_ENCODE(GST_OBJECT_PARENT(sink_pad));
+ if (encode->sinkpad_caps) {
+ gst_caps_ref(encode->sinkpad_caps);
+ ENCODER_LOG_INFO("get caps,\n%s", _encode_dump_caps(encode->sinkpad_caps));
+ return encode->sinkpad_caps;
+ }
+ caps = gst_caps_copy(gst_pad_get_pad_template_caps(sink_pad));
+ return caps;
+}
+
+static GstStateChangeReturn
+gst_vaapi_encode_change_state(GstElement *element, GstStateChange transition)
+{
+ GstVaapiEncode * const encode = GST_VAAPI_ENCODE(element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS(parent_class)->change_state(element, transition);
+ if (ret != GST_STATE_CHANGE_SUCCESS)
+ return ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY: {
+ gst_vaapi_encoder_close(encode->encoder);
+ }
+ break;
+ default:
+ break;
+ }
+ return GST_STATE_CHANGE_SUCCESS;
+}
+
+
+static GstFlowReturn
+gst_vaapi_encode_chain(GstPad *sink_pad, GstBuffer *buf)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(GST_OBJECT_PARENT(sink_pad));
+ EncoderStatus encoder_ret = ENCODER_NO_ERROR;
+ GList *out_buffers = NULL;
+ GstBuffer *tmp_buffer = NULL;
+
+ static guint input_count = 0;
+ static guint output_count = 0;
+
+ ENCODER_ASSERT(encode && encode->encoder);
+ if (encode->first_sink_frame) {
+ /* get first buffer caps and set encoder values */
+ GstStructure *recv_struct, *src_struct;
+ GstCaps *recv_caps = GST_BUFFER_CAPS(buf);
+ gint width, height;
+ GValue const *framerate, *format_value;
+ gint fps_n, fps_d;
+ guint32 format;
+ GstVaapiSurfacePool *surface_pool = NULL;
+
+ ENCODER_LOG_INFO("gst_vaapi_encode_chain 1st recv-buffer caps,\n%s", _encode_dump_caps(recv_caps));
+
+ recv_struct = gst_caps_get_structure (recv_caps, 0);
+ ENCODER_CHECK_STATUS(NULL != recv_caps, GST_FLOW_ERROR, "gst_vaapi_encode_chain, 1st buffer didn't have detailed caps.\n");
+ if (gst_structure_get_int (recv_struct, "width", &width)) {
+ encode->encoder->width = width;
+ }
+ if (gst_structure_get_int (recv_struct, "height", &height)) {
+ encode->encoder->height = height;
+ }
+ framerate = gst_structure_get_value (recv_struct, "framerate");
+ if (framerate) {
+ fps_n = gst_value_get_fraction_numerator (framerate);
+ fps_d = gst_value_get_fraction_denominator (framerate);
+ encode->encoder->frame_rate = fps_n/fps_d;
+ }
+ format_value = gst_structure_get_value (recv_struct, "format");
+ if (format_value && GST_IS_H264ENCODE(encode)) {
+ ENCODER_CHECK_STATUS(format_value && GST_TYPE_FOURCC == G_VALUE_TYPE(format_value),
+ GST_FLOW_ERROR, "1st buffer caps' format type is not fourcc.\n");
+ format = gst_value_get_fourcc (format_value);
+ if (format) {
+ gst_vaapi_base_encoder_set_input_format(encode->encoder, format);
+ }
+ }
+
+ /*set src pad caps*/
+ if (encode->srcpad_caps) {
+ gst_caps_unref(encode->srcpad_caps);
+ }
+ encode->srcpad_caps = gst_caps_copy(gst_pad_get_pad_template_caps(encode->srcpad));
+ src_struct = gst_caps_get_structure(encode->srcpad_caps, 0);
+ gst_structure_set(src_struct, "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
+
+ /*set display and initialize encoder*/
+ if (GST_VAAPI_IS_VIDEO_BUFFER(buf)) {
+ GstVaapiDisplay *display = NULL;
+ GstVaapiVideoBuffer *video_buffer = GST_VAAPI_VIDEO_BUFFER(buf);
+ ENCODER_ASSERT(video_buffer);
+ display = gst_vaapi_video_buffer_get_display(video_buffer);
+
+ #ifdef _MRST_
+ surface_pool = GST_VAAPI_SURFACE_POOL(gst_vaapi_video_buffer_get_surface_pool(video_buffer));
+ #endif
+ if (display) {
+ ENCODER_CHECK_STATUS(gst_vaapi_encoder_set_display(encode->encoder,display)
+ , GST_FLOW_ERROR, "set display failed in gst_vaapi_encode_chain.\n");
+ }
+ }
+ encoder_ret = gst_vaapi_encoder_initialize(encode->encoder);
+ ENCODER_CHECK_STATUS (ENCODER_NO_ERROR == encoder_ret, GST_FLOW_ERROR, "gst_vaapi_encoder_initialize failed.\n");
+ #ifdef _MRST_
+ encoder_ret = gst_vaapi_encoder_open(encode->encoder, surface_pool);
+ #else
+ encoder_ret = gst_vaapi_encoder_open(encode->encoder, NULL);
+ #endif
+ ENCODER_CHECK_STATUS (ENCODER_NO_ERROR == encoder_ret, GST_FLOW_ERROR, "gst_vaapi_encoder_open failed.\n");
+
+ encode->first_sink_frame = FALSE;
+ }
+
+ /*encoding frames*/
+ ENCODER_ASSERT(gst_vaapi_encoder_get_state(encode->encoder) >= VAAPI_ENC_OPENED);
+ ++input_count;
+ ENCODER_LOG_INFO("input %d\n", input_count);
+ encoder_ret = gst_vaapi_encoder_encode(encode->encoder, buf, &out_buffers);
+ ENCODER_CHECK_STATUS (ENCODER_NO_ERROR == encoder_ret, GST_FLOW_ERROR, "gst_vaapi_encoder_encode failed.\n");
+
+ /*check results*/
+ while (out_buffers) {
+ tmp_buffer = out_buffers->data;
+ out_buffers = g_list_remove(out_buffers, tmp_buffer);
+ /*out_buffers = g_list_next(out_buffers);*/
+ if (encode->first_src_frame) {
+ GstBuffer *codec_data = NULL;
+ ENCODER_ASSERT(encode->srcpad_caps);
+ /*replace codec data in src pad caps*/
+ if (ENCODER_NO_ERROR == gst_vaapi_encoder_get_codec_data(encode->encoder, &codec_data) && codec_data) {
+ gst_caps_set_simple(encode->srcpad_caps, "codec_data",GST_TYPE_BUFFER, codec_data, NULL);
+ }
+ gst_pad_set_caps (encode->srcpad, encode->srcpad_caps);
+ GST_BUFFER_CAPS(tmp_buffer) = gst_caps_ref(encode->srcpad_caps);
+ ENCODER_LOG_INFO("gst_vaapi_encode_chain 1st push-buffer caps,\n%s", _encode_dump_caps(encode->srcpad_caps));
+ encode->first_src_frame = FALSE;
+ }
+ ++output_count;
+ ENCODER_LOG_INFO("output:%d, %" GST_TIME_FORMAT ", 0x%s\n",
+ output_count,
+ GST_TIME_ARGS(GST_BUFFER_TIMESTAMP(tmp_buffer)),
+ vaapi_encoder_dump_bytes(GST_BUFFER_DATA(tmp_buffer),
+ (GST_BUFFER_SIZE(tmp_buffer) > 16? 16: GST_BUFFER_SIZE(tmp_buffer))));
+ gst_pad_push(encode->srcpad, tmp_buffer);
+ }
+
+end:
+ gst_mini_object_unref(GST_MINI_OBJECT(buf));
+ return ret;
+
+}
+
+static GstFlowReturn
+gst_vaapi_encode_buffer_alloc(GstPad * pad, guint64 offset, guint size,
+ GstCaps * caps, GstBuffer ** buf)
+{
+ GstVaapiEncode * const encode = GST_VAAPI_ENCODE(GST_OBJECT_PARENT(pad));
+ GstStructure *structure = NULL;
+ GstBuffer *buffer;
+ GstVaapiDisplay* display = NULL;
+ GstFlowReturn ret = GST_FLOW_ERROR;
+
+ if (caps) {
+ structure = gst_caps_get_structure(caps, 0);
+ }
+ if (!structure || gst_structure_has_name(structure, "video/x-vaapi-surface")) {
+ ENCODER_ASSERT(encode->encoder);
+ display = gst_vaapi_encoder_get_display(encode->encoder);
+ if (!display) {
+ gst_vaapi_encoder_initialize(encode->encoder);
+ display = gst_vaapi_encoder_get_display(encode->encoder);
+ ENCODER_CHECK_STATUS(display, GST_FLOW_ERROR, "gst_vaapi_encoder_get_display failed in gst_vaapi_encode_buffer_alloc.\n");
+ }
+ buffer = gst_vaapi_video_buffer_new(display);
+ } else { /* video/x-raw-yuv */
+ buffer = gst_buffer_new_and_alloc(size);
+ }
+
+ ENCODER_CHECK_STATUS(buffer, GST_FLOW_ERROR, "gst_vaapi_encode_buffer_alloc failed.\n");
+
+ GST_BUFFER_OFFSET (buffer) = offset;
+ if (caps) {
+ gst_buffer_set_caps(buffer, caps);
+ }
+ *buf = buffer;
+ ret = GST_FLOW_OK;
+
+end:
+ if (display) {
+ g_object_unref(display);
+ }
+ return ret;
+}
+
+
+static char*
+_encode_dump_caps(GstCaps *cpas)
+{
+ guint i = 0, j = 0;
+ GstStructure const *structure;
+ GValue const *value;
+ static char caps_string[4096*5];
+ char *tmp;
+
+ char *cur = caps_string;
+ memset(caps_string, 0, sizeof(caps_string));
+ for (i = 0; i < gst_caps_get_size(cpas); i++) {
+ structure = gst_caps_get_structure(cpas, i);
+ const char* caps_name = gst_structure_get_name (structure);
+ sprintf(cur, "cap_%02d:%s\n", i, caps_name);
+ cur += strlen(cur);
+
+ for (j = 0; j < gst_structure_n_fields(structure); j++) {
+ const char* name = gst_structure_nth_field_name(structure, j);
+ value = gst_structure_get_value(structure, name);
+ tmp = gst_value_serialize(value);
+ sprintf(cur, "\t%s:%s(%s)\n", name, tmp, G_VALUE_TYPE_NAME(value));
+ cur += strlen(cur);
+ g_free(tmp);
+ }
+ }
+
+ return caps_string;
+}
+
+/* plugin register*/
+static gboolean
+vaapi_encode_sets_init (GstPlugin * plugin)
+{
+ gboolean ret = TRUE;
+
+ ret &= gst_element_register (plugin, "vah264encode", GST_RANK_PRIMARY,
+ GST_TYPE_H264ENCODE);
+ ret &= gst_element_register (plugin, "vah263encode", GST_RANK_PRIMARY,
+ GST_TYPE_H263ENCODE);
+ ret &= gst_element_register (plugin, "vampeg4encode", GST_RANK_PRIMARY,
+ GST_TYPE_MPEG4ENCODE);
+ return ret;
+}
+
+#define PACKAGE "libvaapiencode"
+#define VERSION "0.1.0"
+
+/* gstreamer looks for this structure to register mrstcamsrc */
+GST_PLUGIN_DEFINE (
+ GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "vaapiencode",
+ "Vaapi Encoder",
+ vaapi_encode_sets_init,
+ VERSION,
+ "LGPL",
+ "gstreamer-vaapi",
+ "http://gstreamer.net/")
+
+
--- /dev/null
+/*
+ * gstvaapiencode.h - VA-API video encode
+ *
+ * gstreamer-vaapi (C) 2010-2011 Splitted-Desktop Systems
+ * Copyright (C) 2011 Intel Corporation
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
+ */
+
+#ifndef GST_VAAPI_ENCODE_H
+#define GST_VAAPI_ENCODE_H
+
+#include <gst/gst.h>
+#include "gstvaapiencoder.h"
+
+G_BEGIN_DECLS
+
+/* Default templates */
+#define GST_CAPS_CODEC(CODEC) \
+ CODEC ", " \
+ "width = (int) [ 1, MAX ], " \
+ "height = (int) [ 1, MAX ]; "
+
+
+#define GST_TYPE_VAAPI_ENCODE (gst_vaapi_encode_get_type())
+#define GST_IS_VAAPI_ENCODE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VAAPI_ENCODE))
+#define GST_IS_VAAPI_ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VAAPI_ENCODE))
+#define GST_VAAPI_ENCODE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VAAPI_ENCODE, GstVaapiEncodeClass))
+#define GST_VAAPI_ENCODE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VAAPI_ENCODE, GstVaapiEncode))
+#define GST_VAAPI_ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VAAPI_ENCODE, GstVaapiEncodeClass))
+
+
+typedef struct _GstVaapiEncode GstVaapiEncode;
+typedef struct _GstVaapiEncodeClass GstVaapiEncodeClass;
+
+struct _GstVaapiEncode {
+ GstElement parent_instance;
+
+ GstPad *sinkpad;
+ GstCaps *sinkpad_caps;
+
+ GstPad *srcpad;
+ GstCaps *srcpad_caps;
+
+ GstVaapiEncoder *encoder;
+ gboolean first_sink_frame;
+ gboolean first_src_frame;
+};
+
+struct _GstVaapiEncodeClass {
+ GstElementClass parent_class;
+};
+
+GType gst_vaapi_encode_get_type(void);
+
+G_END_DECLS
+
+#endif /* GST_VAAPI_ENCODE_H */
+
--- /dev/null
+#include "gstvaapiencoder.h"
+
+#include <string.h>
+
+#include "gst/vaapi/gstvaapidisplay_x11.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_encoder_debug);
+#define GST_CAT_DEFAULT gst_vaapi_encoder_debug
+
+
+G_DEFINE_TYPE(GstVaapiEncoder, gst_vaapi_encoder, G_TYPE_OBJECT);
+
+static void gst_vaapi_encoder_class_init(GstVaapiEncoderClass *kclass);
+static voidgst_vaapi_encoder_init(GstVaapiEncoder *encoder);
+static void gst_vaapi_encoder_finalize(GObject *object);
+
+static void
+gst_vaapi_encoder_class_init(GstVaapiEncoderClass *kclass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(kclass);
+ g_type_class_add_private(kclass, sizeof(GstVaapiEncoderPrivate));
+
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_encoder_debug, "gst_va_encoder", 0,
+ "gst_va_encoder element");
+
+ object_class->finalize = gst_vaapi_encoder_finalize;
+ kclass->initialize = NULL;
+ kclass->uninitialize = NULL;
+ kclass->open = NULL;
+ kclass->close = NULL;
+ kclass->encode = NULL;
+ kclass->flush = NULL;
+ kclass->get_codec_data = NULL;
+}
+
+static void
+gst_vaapi_encoder_init(GstVaapiEncoder *encoder)
+{
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ ENCODER_ASSERT(encoder_prv);
+ encoder_prv->display = NULL;
+ encoder_prv->context = NULL;
+ encoder_prv->state = VAAPI_ENC_NULL;
+
+ encoder->width = 0;
+ encoder->height = 0;
+ encoder->frame_rate = 0;
+}
+
+static void
+gst_vaapi_encoder_finalize(GObject *object)
+{
+ GstVaapiEncoder* encoder = GST_VAAPI_ENCODER(object);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(object);
+ if (VAAPI_ENC_NULL != encoder_prv->state) {
+ gst_vaapi_encoder_uninitialize(encoder);
+ }
+
+ if (encoder_prv->context) {
+ g_object_unref(encoder_prv->context);
+ encoder_prv->context = NULL;
+ }
+
+ if (encoder_prv->display) {
+ g_object_unref(encoder_prv->display);
+ encoder_prv->display = NULL;
+ }
+
+ G_OBJECT_CLASS (gst_vaapi_encoder_parent_class)->finalize (object);
+}
+
+
+gboolean
+gst_vaapi_encoder_set_display(GstVaapiEncoder* encoder, GstVaapiDisplay *display)
+{
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ if (display == encoder_prv->display) {
+ return TRUE;
+ }
+
+ if (VAAPI_ENC_INIT < encoder_prv->state) {
+ return FALSE;
+ }
+ if (encoder_prv->display) {
+ g_object_unref(encoder_prv->display);
+ encoder_prv->display = NULL;
+ }
+ encoder_prv->display = g_object_ref(display);
+ return TRUE;
+}
+
+GstVaapiDisplay *
+gst_vaapi_encoder_get_display(GstVaapiEncoder* encoder)
+{
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ return (encoder_prv->display ? g_object_ref(encoder_prv->display) : NULL);
+}
+
+GstVaapiContext *
+gst_vaapi_encoder_get_context(GstVaapiEncoder* encoder)
+{
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ return (encoder_prv->context ? g_object_ref(encoder_prv->context) : NULL);
+}
+
+
+VAAPI_Encode_State
+gst_vaapi_encoder_get_state(GstVaapiEncoder* encoder)
+{
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ return encoder_prv->state;
+}
+
+
+EncoderStatus
+gst_vaapi_encoder_initialize(GstVaapiEncoder* encoder)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ /* check state */
+ if (VAAPI_ENC_INIT == encoder_prv->state) {
+ return ENCODER_NO_ERROR;
+ }
+ ENCODER_ASSERT(VAAPI_ENC_NULL == encoder_prv->state);
+ if (VAAPI_ENC_NULL != encoder_prv->state) {
+ return ENCODER_STATE_ERR;
+ }
+
+ /* create va_dpy*/
+ if (!encoder_prv->display) {
+ encoder_prv->display = gst_vaapi_display_x11_new(NULL);
+ ENCODER_CHECK_STATUS(encoder_prv->display, ENCODER_DISPLAY_ERR, "gst_vaapi_display_x11_new failed.\n");
+ }
+
+ if (encoder_class->initialize) {
+ ret = encoder_class->initialize(encoder, encoder_prv->display);
+ ENCODER_CHECK_STATUS (ENCODER_NO_ERROR == ret, ret, "encoder <initialize> failed.\n");
+ }
+ encoder_prv->state = VAAPI_ENC_INIT;
+
+end:
+ return ret;
+}
+
+EncoderStatus
+gst_vaapi_encoder_open(GstVaapiEncoder* encoder, void* private_data)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ /* check state */
+ if (VAAPI_ENC_OPENED == encoder_prv->state) {
+ return ENCODER_NO_ERROR;
+ }
+ ENCODER_ASSERT(VAAPI_ENC_INIT == encoder_prv->state);
+ if (VAAPI_ENC_INIT != encoder_prv->state) {
+ return ENCODER_STATE_ERR;
+ }
+ ENCODER_ASSERT(encoder_prv->context);
+
+ ENCODER_CHECK_STATUS(encoder_class->open, ENCODER_FUNC_PTR_ERR, "encoder <open> function pointer empty.\n");
+ ret = encoder_class->open(encoder, encoder_prv->display, private_data, &encoder_prv->context);
+ ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ret, "encoder <open> failed.\n");
+ ENCODER_CHECK_STATUS(encoder_prv->context, ENCODER_CONTEXT_ERR, "encoder <open> context failed.\n");
+
+ encoder_prv->state = VAAPI_ENC_OPENED;
+
+end:
+ return ret;
+}
+
+EncoderStatus
+gst_vaapi_encoder_encode(GstVaapiEncoder* encoder, GstBuffer *raw_pic, GList **coded_pics)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ ENCODER_CHECK_STATUS(encoder_prv->state >= VAAPI_ENC_OPENED, ENCODER_STATE_ERR, "encoder was not opened before <encode>.\n");
+ ENCODER_CHECK_STATUS(encoder_class->encode, ENCODER_FUNC_PTR_ERR, "encoder <encode> function pointer empty.\n");
+ ret = encoder_class->encode(encoder, encoder_prv->display, encoder_prv->context, raw_pic, coded_pics);
+ ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ret, "encoder <encode> failed.\n");
+ if (encoder_prv->state < VAAPI_ENC_ENCODING) {
+ encoder_prv->state = VAAPI_ENC_ENCODING;
+ }
+end:
+ return ret;
+}
+
+EncoderStatus gst_vaapi_encoder_get_codec_data(GstVaapiEncoder* encoder, GstBuffer **codec_data)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ ENCODER_CHECK_STATUS(encoder_prv->state >= VAAPI_ENC_OPENED, ENCODER_STATE_ERR, "encoder was not opened before <get_codec_data>.\n");
+ if (!encoder_class->get_codec_data) {
+ *codec_data = NULL;
+ ENCODER_LOG_INFO("There's no codec_data");
+ return ret;
+ }
+ ret = encoder_class->get_codec_data(encoder, codec_data);
+
+end:
+ return ret;
+}
+
+EncoderStatus
+gst_vaapi_encoder_flush(GstVaapiEncoder* encoder, GList **coded_pics)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ if (encoder_prv->state < VAAPI_ENC_OPENED) {
+ return ENCODER_STATE_ERR;
+ }
+ ENCODER_CHECK_STATUS(encoder_class->flush, ENCODER_FUNC_PTR_ERR, "encoder <flush> function pointer empty.\n");
+ ret = encoder_class->flush(encoder, encoder_prv->display, encoder_prv->context, coded_pics);
+ ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ret, "encoder <flush> failed.\n");
+ if (encoder_prv->state > VAAPI_ENC_OPENED) {
+ encoder_prv->state = VAAPI_ENC_OPENED;
+ }
+end:
+ return ret;
+}
+
+EncoderStatus
+gst_vaapi_encoder_close(GstVaapiEncoder* encoder)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ if (VAAPI_ENC_INIT >= encoder_prv->state) {
+ return ENCODER_NO_ERROR;
+ }
+ ENCODER_CHECK_STATUS(encoder_class->close, ENCODER_FUNC_PTR_ERR, "encoder <close> function pointers empty.\n");
+ ret = encoder_class->close(encoder, encoder_prv->display, encoder_prv->context);
+ ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ret, "encoder <close> failed.\n");
+end:
+ if (encoder_prv->context) {
+ g_object_unref(encoder_prv->context);
+ encoder_prv->context = NULL;
+ }
+
+ encoder_prv->state = VAAPI_ENC_INIT;
+ return ret;
+}
+
+EncoderStatus
+gst_vaapi_encoder_uninitialize(GstVaapiEncoder* encoder)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoderClass *encoder_class = GST_VAAPI_ENCODER_GET_CLASS(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+
+ if (VAAPI_ENC_NULL == encoder_prv->state) {
+ return ENCODER_NO_ERROR;
+ }
+
+ if (VAAPI_ENC_INIT < encoder_prv->state) {
+ ret = gst_vaapi_encoder_close(encoder);
+ }
+ ENCODER_ASSERT(VAAPI_ENC_INIT == encoder_prv->state);
+ if (encoder_class->uninitialize) {
+ ret = encoder_class->uninitialize(encoder, encoder_prv->display);
+ ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ret, "encoder <uninitialize> failed.\n");
+ }
+end:
+ if (encoder_prv->display) {
+ g_object_unref(encoder_prv->display);
+ encoder_prv->display = NULL;
+ }
+ encoder_prv->state = VAAPI_ENC_NULL;
+ return ret;
+
+}
+
+char *vaapi_encoder_dump_bytes(const guint8 *buf, guint32 num)
+{
+ static char tmp[1024];
+ guint32 i = 0;
+ memset(tmp, 0, sizeof(tmp));
+
+ char *p = tmp;
+ for (i = 0; i < num; i++) {
+ snprintf(p, 1024-(p-tmp), "%02x", (guint8)buf[i]);
+ p += strlen(p);
+ }
+ return tmp;
+}
+
--- /dev/null
+#ifndef GST_VAAPI_ENCODER_H
+#define GST_VAAPI_ENCODER_H
+
+#include <stdio.h>
+#include <stdint.h>
+
+#include "gst/gstbuffer.h"
+#include "gst/vaapi/gstvaapidisplay.h"
+#include "gst/vaapi/gstvaapicontext.h"
+
+G_BEGIN_DECLS
+
+#define ENCODER_NO_ERROR 0
+#define ENCODER_MEM_ERR -1
+#define ENCODER_DISPLAY_ERR -2
+#define ENCODER_CONFIG_ERR -3
+#define ENCODER_CONTEXT_ERR -3
+#define ENCODER_STATE_ERR -4
+#define ENCODER_ENC_RES_ERR -5
+#define ENCODER_PICTURE_ERR -6
+#define ENCODER_SURFACE_ERR -7
+#define ENCODER_QUERY_STATUS_ERR -8
+#define ENCODER_DATA_NOT_READY -9
+#define ENCODER_DATA_ERR -10
+#define ENCODER_PROFILE_ERR -11
+#define ENCODER_PARAMETER_ERR -12
+#define ENCODER_FUNC_PTR_ERR -13
+
+#define ENCODER_LOG_ERROR(...) fprintf(stdout, ## __VA_ARGS__)
+#define ENCODER_LOG_DEBUG(...) fprintf(stdout, ## __VA_ARGS__)
+#define ENCODER_LOG_INFO(...) fprintf(stdout, ## __VA_ARGS__)
+
+#define VAAPI_UNUSED_ARG(arg) (void)(arg)
+
+#ifdef DEBUG
+#include <assert.h>
+#define ENCODER_ASSERT(exp) assert(exp)
+#else
+#define ENCODER_ASSERT(exp)
+#endif
+
+#define ENCODER_CHECK_STATUS(exp, err_num, err_reason, ...) \
+ ENCODER_ASSERT(exp); \
+ if (!(exp)) { \
+ ret = err_num; \
+ ENCODER_LOG_ERROR(err_reason, ## __VA_ARGS__); \
+ goto end; \
+ }
+
+/* must have <gboolean is_locked = FALSE;> declared first*/
+#define ENCODER_ACQUIRE_DISPLAY_LOCK(display) \
+ if (!is_locked) { \
+ GST_VAAPI_DISPLAY_LOCK(display); \
+ is_locked = TRUE; \
+ }
+
+#define ENCODER_RELEASE_DISPLAY_LOCK(display) \
+ if (is_locked) { \
+ GST_VAAPI_DISPLAY_UNLOCK(display); \
+ is_locked = FALSE; \
+ }
+
+
+typedef enum {
+ VAAPI_ENC_NULL,
+ VAAPI_ENC_INIT,
+ VAAPI_ENC_OPENED,
+ VAAPI_ENC_ENCODING,
+} VAAPI_Encode_State;
+
+typedef int EncoderStatus;
+typedef struct _GstVaapiEncoder GstVaapiEncoder;
+typedef struct _GstVaapiEncoderPrivate GstVaapiEncoderPrivate;
+typedef struct _GstVaapiEncoderClass GstVaapiEncoderClass;
+
+#define GST_TYPE_VAAPI_ENCODER (gst_vaapi_encoder_get_type())
+#define GST_IS_VAAPI_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VAAPI_ENCODER))
+#define GST_IS_VAAPI_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VAAPI_ENCODER))
+#define GST_VAAPI_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VAAPI_ENCODER, GstVaapiEncoderClass))
+#define GST_VAAPI_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VAAPI_ENCODER, GstVaapiEncoder))
+#define GST_VAAPI_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VAAPI_ENCODER, GstVaapiEncoderClass))
+#define GST_VAAPI_ENCODER_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj),GST_TYPE_VAAPI_ENCODER, GstVaapiEncoderPrivate))
+
+#define ENCODER_DISPLAY(encoder) (GST_VAAPI_ENCODER_GET_PRIVATE(encoder)->display)
+#define ENCODER_CONTEXT(encoder) (GST_VAAPI_ENCODER_GET_PRIVATE(encoder)->context)
+#define ENCODER_WIDTH(encoder) (((GstVaapiEncoder*)(encoder))->width)
+#define ENCODER_HEIGHT(encoder) (((GstVaapiEncoder*)(encoder))->height)
+#define ENCODER_FPS(encoder) (((GstVaapiEncoder*)(encoder))->frame_rate)
+
+struct _GstVaapiEncoder {
+ GObject parent;
+
+ guint32 width;
+ guint32 height;
+ guint32 frame_rate;
+};
+
+struct _GstVaapiEncoderClass {
+ GObjectClass parent_class;
+
+ EncoderStatus (*initialize) (GstVaapiEncoder* encoder, GstVaapiDisplay *display); /* can be NULL */
+ EncoderStatus (*uninitialize) (GstVaapiEncoder* encoder, GstVaapiDisplay *display); /* can be NULL */
+
+ /* context [out] */
+ EncoderStatus (*open) (GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ void* private_data, GstVaapiContext **context);
+
+ EncoderStatus (*close) (GstVaapiEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context);
+ /* coded_pics [out] */
+ EncoderStatus (*encode) (GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstBuffer *raw_pic, GList **coded_pics);
+ /* coded_pics [out] */
+ EncoderStatus (*flush) (GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GList **coded_pics);
+
+ EncoderStatus (*get_codec_data)(GstVaapiEncoder* encoder, GstBuffer **codec_data); /* can be NULL */
+};
+
+struct _GstVaapiEncoderPrivate {
+ GstVaapiDisplay *display;
+ GstVaapiContext *context;
+ VAAPI_Encode_State state;
+};
+
+GType gst_vaapi_encoder_get_type(void);
+
+/* set/get display */
+gboolean gst_vaapi_encoder_set_display(GstVaapiEncoder* encoder, GstVaapiDisplay *display);
+GstVaapiDisplay *gst_vaapi_encoder_get_display(GstVaapiEncoder* encoder);
+
+/* get context */
+GstVaapiContext *gst_vaapi_encoder_get_context(GstVaapiEncoder* encoder);
+
+/* get encoding state */
+VAAPI_Encode_State gst_vaapi_encoder_get_state(GstVaapiEncoder* encoder);
+
+/* check/open display */
+EncoderStatus gst_vaapi_encoder_initialize(GstVaapiEncoder* encoder);
+
+/* check/open context */
+EncoderStatus gst_vaapi_encoder_open(GstVaapiEncoder* encoder, void* private_data);
+
+/* encode one frame */
+EncoderStatus gst_vaapi_encoder_encode(GstVaapiEncoder* encoder, GstBuffer *raw_pic, GList **coded_pics);
+
+EncoderStatus gst_vaapi_encoder_get_codec_data(GstVaapiEncoder* encoder, GstBuffer **codec_data);
+
+/* flush all frames */
+EncoderStatus gst_vaapi_encoder_flush(GstVaapiEncoder* encoder, GList **coded_pics);
+
+/* close context */
+EncoderStatus gst_vaapi_encoder_close(GstVaapiEncoder* encoder);
+
+/* close display */
+EncoderStatus gst_vaapi_encoder_uninitialize(GstVaapiEncoder* encoder);
+
+static inline void gst_vaapi_encoder_unref (GstVaapiEncoder *encoder)
+{
+ g_object_unref (encoder);
+}
+
+/* other functions */
+char *vaapi_encoder_dump_bytes(const guint8 *buf, guint32 num);
+
+
+G_END_DECLS
+
+#endif
+
--- /dev/null
+#include "gstvaapih263encode.h"
+#include "gstvaapih263encoder.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h263_encode_debug);
+#define GST_CAT_DEFAULT gst_vaapi_h263_encode_debug
+
+#define GST_VAAPI_ENCODE_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VAAPI_ENCODE, GstVaapiEncodePrivate))
+
+static const char gst_h263encode_sink_caps_str[] =
+ GST_CAPS_CODEC("video/x-vaapi-surface ")
+ ;
+
+static const GstElementDetails gst_h263encode_details =
+ GST_ELEMENT_DETAILS(
+ "VA-API h263 encoder",
+ "Codec/Encoder/Video",
+ "A VA-API based h263 encoder",
+ "Feng Yuan <feng.yuan@intel.com>");
+
+
+static const char gst_h263encode_src_caps_str[] =
+ GST_CAPS_CODEC("video/x-h263");
+
+static GstStaticPadTemplate gst_h263encode_sink_factory =
+ GST_STATIC_PAD_TEMPLATE(
+ "sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(gst_h263encode_sink_caps_str));
+
+static GstStaticPadTemplate gst_h263encode_src_factory =
+ GST_STATIC_PAD_TEMPLATE(
+ "src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(gst_h263encode_src_caps_str));
+
+static void gst_h263encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec);
+static void gst_h263encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+/* h263 encode */
+GST_BOILERPLATE(
+ GstH263Encode,
+ gst_h263encode,
+ GstVaapiEncode,
+ GST_TYPE_VAAPI_ENCODE);
+
+enum {
+ H263_PROP_0,
+ H263_PROP_BITRATE,
+ H263_PROP_INTRA_PERIOD,
+ H263_PROP_INIT_QP,
+ H263_PROP_MIN_QP,
+};
+
+
+static void
+gst_h263encode_base_init(gpointer klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+ gst_element_class_set_details(element_class, &gst_h263encode_details);
+
+ /* sink pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_h263encode_sink_factory)
+ );
+
+ /* src pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_h263encode_src_factory)
+ );
+}
+
+static void
+gst_h263encode_class_init(GstH263EncodeClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiEncodeClass * const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_h263_encode_debug, "vaapih263encode", 0,
+ "vaapih263encode element");
+
+ /* object_class->finalize = gst_h263encode_finalize; */
+ object_class->set_property = gst_h263encode_set_property;
+ object_class->get_property = gst_h263encode_get_property;
+
+ g_object_class_install_property (object_class, H263_PROP_BITRATE,
+ g_param_spec_uint ("bitrate",
+ "H263 encoding bitrate",
+ "H263 encoding bitrate, 10k~100M, (0, auto-calculate)",
+ 0,
+ 100*1000*1000,
+ 0,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H263_PROP_INTRA_PERIOD,
+ g_param_spec_uint ("intra-period",
+ "H263 encoding intra-period",
+ "H263 encoding intra-period",
+ 1,
+ 300,
+ H263_DEFAULT_INTRA_PERIOD,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H263_PROP_INIT_QP,
+ g_param_spec_uint ("init-qp",
+ "H263 init-qp",
+ "H263 init-qp",
+ 1,
+ 51,
+ H263_DEFAULT_INIT_QP,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H263_PROP_MIN_QP,
+ g_param_spec_uint ("min-qp",
+ "H263 min-qp",
+ "H263 min-qp",
+ 1,
+ 51,
+ H263_DEFAULT_MIN_QP,
+ G_PARAM_READWRITE));
+
+}
+
+static void
+gst_h263encode_init(GstH263Encode *h263_encode, GstH263EncodeClass *klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(h263_encode);
+ encode->encoder = GST_VAAPI_ENCODER(gst_h263_encoder_new());
+ ENCODER_ASSERT(encode->encoder);
+}
+
+static void
+gst_h263encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ GstH263Encoder *h263encoder = GST_H263_ENCODER(encode->encoder);
+
+ ENCODER_ASSERT(h263encoder);
+
+ switch (prop_id) {
+ case H263_PROP_BITRATE: {
+ h263encoder->bitrate = g_value_get_uint(value);
+ }
+ break;
+
+ case H263_PROP_INTRA_PERIOD: {
+ h263encoder->intra_period = g_value_get_uint(value);
+ }
+ break;
+
+ case H263_PROP_INIT_QP: {
+ h263encoder->init_qp = g_value_get_uint(value);
+ }
+ break;
+
+ case H263_PROP_MIN_QP: {
+ h263encoder->min_qp = g_value_get_uint(value);
+ }
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_h263encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ GstH263Encoder *h263encoder = GST_H263_ENCODER(encode->encoder);
+ ENCODER_ASSERT(h263encoder);
+
+ switch (prop_id) {
+ case H263_PROP_BITRATE:
+ g_value_set_uint (value, h263encoder->bitrate);
+ break;
+
+ case H263_PROP_INTRA_PERIOD:
+ g_value_set_uint (value, h263encoder->intra_period);
+ break;
+
+ case H263_PROP_INIT_QP:
+ g_value_set_uint (value, h263encoder->init_qp);
+ break;
+
+ case H263_PROP_MIN_QP:
+ g_value_set_uint (value, h263encoder->min_qp);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
--- /dev/null
+#ifndef GST_VAAPI_H263_ENCODE_H
+#define GST_VAAPI_H263_ENCODE_H
+
+#include <gst/gst.h>
+#include "gstvaapiencode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_H263ENCODE (gst_h263encode_get_type())
+#define GST_IS_H263ENCODE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H263ENCODE))
+#define GST_IS_H263ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H263ENCODE))
+#define GST_H263ENCODE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H263ENCODE, GstH263EncodeClass))
+#define GST_H263ENCODE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H263ENCODE, GstH263Encode))
+#define GST_H263ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H263ENCODE, GstH263EncodeClass))
+
+
+typedef struct _GstH263Encode GstH263Encode;
+typedef struct _GstH263EncodeClass GstH263EncodeClass;
+
+struct _GstH263Encode {
+ GstVaapiEncode parent;
+};
+
+struct _GstH263EncodeClass {
+ GstVaapiEncodeClass parent_class;
+};
+
+GType gst_h263encode_get_type(void);
+
+
+G_END_DECLS
+
+#endif /* GST_VAAPI_H263_ENCODE_H */
+
--- /dev/null
+#include "gstvaapih263encoder.h"
+
+#include <string.h>
+
+#include "gst/gstclock.h"
+
+#include "gst/vaapi/gstvaapiobject.h"
+#include "gst/vaapi/gstvaapiobject_priv.h"
+#include "gst/vaapi/gstvaapicontext.h"
+#include "gst/vaapi/gstvaapisurface.h"
+#include "gst/vaapi/gstvaapivideobuffer.h"
+#include "gst/vaapi/gstvaapidisplay_priv.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h263_encoder_debug);
+#define GST_CAT_DEFAULT gst_vaapi_h263_encoder_debug
+
+struct _GstH263EncoderPrivate {
+ GstVaapiSurface *ref_surface; /* reference buffer*/
+ GstVaapiSurface *recon_surface; /* reconstruct buffer*/
+
+ VABufferID seq_parameter;
+ VABufferID pic_parameter;
+ VABufferID slice_parameter;
+};
+
+G_DEFINE_TYPE(GstH263Encoder, gst_h263_encoder, GST_TYPE_VAAPI_BASE_ENCODER);
+/*
+static EncoderStatus gst_h263_encoder_flush(GstVaapiEncoder* encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context,
+ GList **coded_pics);
+*/
+
+static void gst_h263_encoder_class_init(GstH263EncoderClass *klass);
+static void gst_h263_encoder_init(GstH263Encoder *encoder);
+static void gst_h263_encoder_finalize(GObject *object);
+
+static gboolean gst_h263_validate_parameters(GstVaapiBaseEncoder* encoder);
+static gboolean gst_h263_encoder_release_resource(
+ GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context);
+static EncoderStatus gst_h263_prepare_encoding(GstVaapiBaseEncoder *encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context,
+ GstVaapiSurface *surface, guint frame_index,
+ VABufferID coded_buf, gboolean *is_key);
+
+static void h263_release_buffers(GstH263Encoder *h263_encoder,
+ GstVaapiDisplay *display);
+
+GstH263Encoder *
+gst_h263_encoder_new(void)
+{
+ return GST_H263_ENCODER(g_object_new(GST_TYPE_H263_ENCODER, NULL));
+}
+
+
+static void
+gst_h263_encoder_class_init(GstH263EncoderClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiBaseEncoderClass * const base_class = GST_VAAPI_BASE_ENCODER_CLASS(klass);
+ GstVaapiEncoderClass * const encoder_class = GST_VAAPI_ENCODER_CLASS(klass);
+ g_type_class_add_private(klass, sizeof(GstH263EncoderPrivate));
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_h263_encoder_debug, "gst_va_h263_encoder", 0,
+ "gst_va_h263_encoder element");
+
+ object_class->finalize = gst_h263_encoder_finalize;
+
+ base_class->validate_attributes = gst_h263_validate_parameters;
+ base_class->pre_alloc_resource = NULL;
+ base_class->release_resource = gst_h263_encoder_release_resource;
+ base_class->prepare_frame = gst_h263_prepare_encoding;
+ base_class->notify_frame = NULL;
+ base_class->copy_coded_frame = NULL;
+
+ /*
+ encoder_class->flush = gst_h263_encoder_flush;
+ */
+ encoder_class->get_codec_data = NULL;
+
+}
+
+static void
+gst_h263_encoder_init(GstH263Encoder *h263_encoder)
+{
+ GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(h263_encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(h263_encoder);
+ ENCODER_ASSERT(h263_prv);
+
+ /* init public */
+ h263_encoder->bitrate = 0;
+ h263_encoder->intra_period = H263_DEFAULT_INTRA_PERIOD;
+ h263_encoder->init_qp = H263_DEFAULT_INIT_QP;
+ h263_encoder->min_qp = H263_DEFAULT_MIN_QP;
+
+ /* init private */
+ h263_prv->ref_surface = NULL;
+ h263_prv->recon_surface = NULL;
+
+ h263_prv->seq_parameter = VA_INVALID_ID;
+ h263_prv->pic_parameter = VA_INVALID_ID;
+ h263_prv->slice_parameter = VA_INVALID_ID;
+}
+
+static void
+gst_h263_encoder_finalize(GObject *object)
+{
+ /*free private buffers*/
+ GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
+ GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(object);
+
+ if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
+ gst_vaapi_encoder_uninitialize(encoder);
+ }
+ G_OBJECT_CLASS(gst_h263_encoder_parent_class)->finalize(object);
+}
+
+static gboolean
+gst_h263_validate_parameters(GstVaapiBaseEncoder* encoder)
+{
+ GstH263Encoder *h263_encoder = GST_H263_ENCODER(encoder);
+ if (!ENCODER_WIDTH(h263_encoder) || !ENCODER_HEIGHT(h263_encoder) || !ENCODER_FPS(h263_encoder)) {
+ return FALSE;
+ }
+ if (!h263_encoder->intra_period) {
+ h263_encoder->intra_period = H263_DEFAULT_INTRA_PERIOD;
+ }
+ if (-1 == h263_encoder->init_qp) {
+ h263_encoder->init_qp = H263_DEFAULT_INIT_QP;
+ }
+ if (-1 == h263_encoder->min_qp) {
+ h263_encoder->min_qp = H263_DEFAULT_MIN_QP;
+ }
+
+ /* default compress ratio 1: (4*8*1.5) */
+ if (!h263_encoder->bitrate) {
+ h263_encoder->bitrate = ENCODER_WIDTH(h263_encoder)*ENCODER_HEIGHT(h263_encoder)*ENCODER_FPS(h263_encoder)/4;
+ }
+ gst_vaapi_base_encoder_set_va_profile(GST_VAAPI_BASE_ENCODER(h263_encoder), VAProfileH263Baseline);
+ return TRUE;
+
+}
+
+
+static void
+h263_release_parameters(GstH263Encoder *h263_encoder, GstVaapiDisplay *display)
+{
+ GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(h263_encoder);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAStatus va_status = VA_STATUS_SUCCESS;
+
+ if (VA_INVALID_ID != h263_prv->seq_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h263_prv->seq_parameter);
+ h263_prv->seq_parameter = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != h263_prv->pic_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h263_prv->pic_parameter);
+ h263_prv->pic_parameter = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != h263_prv->slice_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h263_prv->slice_parameter);
+ h263_prv->slice_parameter = VA_INVALID_ID;
+ }
+
+}
+
+static gboolean
+gst_h263_encoder_release_resource(GstVaapiBaseEncoder* encoder,
+ GstVaapiDisplay *display,
+ GstVaapiContext *context)
+{
+ GstH263Encoder *h263_encoder = GST_H263_ENCODER(encoder);
+ GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(h263_encoder);
+
+ h263_release_parameters(h263_encoder, display);
+
+ /*remove ref_surface*/
+ if (h263_prv->ref_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h263_prv->ref_surface);
+ } else {
+ g_object_unref(h263_prv->ref_surface);
+ }
+ h263_prv->ref_surface = NULL;
+ }
+
+ /*remove recon_surface*/
+ if (h263_prv->recon_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h263_prv->recon_surface);
+ } else {
+ g_object_unref(h263_prv->recon_surface);
+ }
+ h263_prv->recon_surface = NULL;
+ }
+
+ return TRUE;
+}
+
+static EncoderStatus
+gst_h263_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key)
+
+{
+ GstH263Encoder *h263_encoder = GST_H263_ENCODER(encoder);
+ GstH263EncoderPrivate *h263_prv = GST_H263_ENCODER_GET_PRIVATE(h263_encoder);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ EncoderStatus ret = ENCODER_NO_ERROR;
+
+ *is_key = (frame_index % h263_encoder->intra_period == 0);
+
+ /* initialize sequence parameter set, only first time */
+ if (VA_INVALID_ID == h263_prv->seq_parameter) { /*only the first time*/
+ VAEncSequenceParameterBufferH263 seq_h263 = {0};
+
+ seq_h263.intra_period = h263_encoder->intra_period;
+ seq_h263.bits_per_second = h263_encoder->bitrate;
+ seq_h263.frame_rate = ENCODER_FPS(h263_encoder);
+ seq_h263.initial_qp = h263_encoder->init_qp;
+ seq_h263.min_qp = h263_encoder->min_qp;
+
+ va_status = vaCreateBuffer(va_dpy, context_id,
+ VAEncSequenceParameterBufferType,
+ sizeof(seq_h263), 1, &seq_h263, &h263_prv->seq_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_ENC_RES_ERR, "h263 alloc seq-buffer failed.\n");
+ va_status = vaRenderPicture(va_dpy, context_id, &h263_prv->seq_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "h263 vaRenderPicture seq-parameters failed.\n");
+ }
+
+ /* set reference and reconstructed surfaces */
+ if (!h263_prv->ref_surface) {
+ h263_prv->ref_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h263_prv->ref_surface, ENCODER_SURFACE_ERR, "h263 reference surface, h263_pop_free_surface failed.\n");
+ }
+ if (!h263_prv->recon_surface) {
+ h263_prv->recon_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h263_prv->recon_surface, ENCODER_SURFACE_ERR, "h263 reconstructed surface, h263_pop_free_surface failed.\n");
+ }
+
+ /* initialize picture, every time, every frame */
+ VAEncPictureParameterBufferH263 pic_h263 = {0};
+ pic_h263.reference_picture = GST_VAAPI_OBJECT_ID(h263_prv->ref_surface);
+ pic_h263.reconstructed_picture = GST_VAAPI_OBJECT_ID(h263_prv->recon_surface);
+ pic_h263.coded_buf = coded_buf;
+ pic_h263.picture_width = ENCODER_WIDTH(h263_encoder);
+ pic_h263.picture_height = ENCODER_HEIGHT(h263_encoder);
+ pic_h263.picture_type = (*is_key) ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+ if (VA_INVALID_ID != h263_prv->pic_parameter) { /* destroy first*/
+ va_status = vaDestroyBuffer(va_dpy, h263_prv->pic_parameter);
+ h263_prv->pic_parameter = VA_INVALID_ID;
+ }
+
+ va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
+ sizeof(pic_h263), 1, &pic_h263, &h263_prv->pic_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_ENC_RES_ERR, "h263 creating pic-param buffer failed.\n");
+ va_status = vaRenderPicture(va_dpy, context_id, &h263_prv->pic_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "h263 rendering pic-param buffer failed.\n");
+ /*initialize slice parameters, only ONE slice for h263*/
+ VAEncSliceParameterBuffer slice_h263 = { 0 };
+ slice_h263.start_row_number = 0;
+ slice_h263.slice_height = (ENCODER_HEIGHT(h263_encoder)+15)/16; /*MB?*/
+ slice_h263.slice_flags.bits.is_intra = *is_key;
+ slice_h263.slice_flags.bits.disable_deblocking_filter_idc = 0;
+ if (VA_INVALID_ID != h263_prv->slice_parameter) {
+ vaDestroyBuffer(va_dpy, h263_prv->slice_parameter);
+ h263_prv->slice_parameter = VA_INVALID_ID;
+ }
+
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncSliceParameterBufferType,
+ sizeof(slice_h263),
+ 1,
+ &slice_h263,
+ &h263_prv->slice_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ ENCODER_ENC_RES_ERR,
+ "h263 creating slice-parameters buffer failed.\n");
+
+ va_status = vaRenderPicture(va_dpy, context_id, &h263_prv->slice_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ ENCODER_PICTURE_ERR,
+ "h263 rendering slice-parameters buffer failed.\n");
+
+ /*swap ref_surface and recon_surface */
+ GstVaapiSurface *swap = h263_prv->ref_surface;
+ h263_prv->ref_surface = h263_prv->recon_surface;
+ h263_prv->recon_surface = swap;
+
+end:
+ return ret;
+}
+
--- /dev/null
+
+#ifndef _GST_VAAPI_H263_ENCODER_H_
+#define _GST_VAAPI_H263_ENCODER_H_
+
+
+#include "gst/vaapi/gstvaapisurfacepool.h"
+
+#include "gstvaapibaseencoder.h"
+
+G_BEGIN_DECLS
+
+#define H263_DEFAULT_INTRA_PERIOD 30
+#define H263_DEFAULT_INIT_QP 15
+#define H263_DEFAULT_MIN_QP 1
+
+
+typedef struct _GstH263Encoder GstH263Encoder;
+typedef struct _GstH263EncoderPrivate GstH263EncoderPrivate;
+typedef struct _GstH263EncoderClass GstH263EncoderClass;
+
+
+#define GST_TYPE_H263_ENCODER (gst_h263_encoder_get_type())
+#define GST_IS_H263_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H263_ENCODER))
+#define GST_IS_H263_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H263_ENCODER))
+#define GST_H263_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H263_ENCODER, GstH263EncoderClass))
+#define GST_H263_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H263_ENCODER, GstH263Encoder))
+#define GST_H263_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H263_ENCODER, GstH263EncoderClass))
+#define GST_H263_ENCODER_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj),GST_TYPE_H263_ENCODER,GstH263EncoderPrivate))
+
+struct _GstH263Encoder {
+ GstVaapiBaseEncoder parent; /*based on gobject*/
+
+ guint32 bitrate;
+ guint32 intra_period;
+ guint32 init_qp; /*default 15, 1~31*/
+ guint32 min_qp; /*default 1, 1~31*/
+};
+
+struct _GstH263EncoderClass {
+ GstVaapiBaseEncoderClass parent_class;
+};
+
+
+GType gst_h263_encoder_get_type(void);
+
+GstH263Encoder *gst_h263_encoder_new(void);
+static inline void gst_h263_encoder_unref (GstH263Encoder * encoder)
+{
+ g_object_unref (encoder);
+}
+
+
+G_END_DECLS
+
+#endif /* _GST_VAAPI_H263_ENCODER_H_ */
+
--- /dev/null
+#include "gstvaapih264encode.h"
+#include "gstvaapih264encoder.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h264_encode_debug);
+#define GST_CAT_DEFAULT gst_vaapi_h264_encode_debug
+
+static const char gst_h264encode_sink_caps_str[] =
+ GST_CAPS_CODEC("video/x-raw-yuv, " "format = (fourcc) { I420 } ")
+ GST_CAPS_CODEC("video/x-raw-yuv, " "format = (fourcc) { NV12 } ")
+ GST_CAPS_CODEC("video/x-vaapi-surface ")
+ ;
+
+static const GstElementDetails gst_h264encode_details =
+ GST_ELEMENT_DETAILS(
+ "VA-API h264 encoder",
+ "Codec/Encoder/Video",
+ "A VA-API based h264 encoder",
+ "Feng Yuan<feng.yuan@intel.com>");
+
+
+static const char gst_h264encode_src_caps_str[] =
+ GST_CAPS_CODEC("video/x-h264");
+
+static GstStaticPadTemplate gst_h264encode_sink_factory =
+ GST_STATIC_PAD_TEMPLATE(
+ "sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(gst_h264encode_sink_caps_str));
+
+static GstStaticPadTemplate gst_h264encode_src_factory =
+ GST_STATIC_PAD_TEMPLATE(
+ "src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(gst_h264encode_src_caps_str));
+
+static void gst_h264encode_finalize(GObject *object);
+static void gst_h264encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec);
+static void gst_h264encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean _h264_check_valid_profile(guint profile);
+static gboolean _h264_check_valid_level(guint level);
+
+
+/* h264 encode */
+GST_BOILERPLATE(
+ GstH264Encode,
+ gst_h264encode,
+ GstVaapiEncode,
+ GST_TYPE_VAAPI_ENCODE);
+
+enum {
+ H264_PROP_0,
+ H264_PROP_PROFILE,
+ H264_PROP_LEVEL,
+ H264_PROP_BITRATE,
+ H264_PROP_INTRA_PERIOD,
+ H264_PROP_INIT_QP,
+ H264_PROP_MIN_QP,
+ H264_PROP_SLICE_NUM,
+};
+
+
+static void
+gst_h264encode_base_init(gpointer klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+ gst_element_class_set_details(element_class, &gst_h264encode_details);
+
+ /* sink pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_h264encode_sink_factory)
+ );
+
+ /* src pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_h264encode_src_factory)
+ );
+}
+
+static void
+gst_h264encode_class_init(GstH264EncodeClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiEncodeClass * const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_h264_encode_debug, "vaapih264encode", 0,
+ "vaapih264encode element");
+
+ object_class->finalize = gst_h264encode_finalize;
+ object_class->set_property = gst_h264encode_set_property;
+ object_class->get_property = gst_h264encode_get_property;
+
+
+ g_object_class_install_property (object_class, H264_PROP_PROFILE,
+ g_param_spec_uint ("profile",
+ "H264 Profile",
+ "Profile supports: 66(Baseline), 77(Main), 100(High)",
+ H264_PROFILE_BASELINE,
+ H264_PROFILE_HIGH10,
+ H264_DEFAULT_PROFILE,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H264_PROP_LEVEL,
+ g_param_spec_uint ("level",
+ "H264 level idc",
+ "Level idc supports: 10, 11, 12, 13, 20, 21, 22, 30, 31, 32, 40, 41",
+ H264_LEVEL_10,
+ H264_LEVEL_41,
+ H264_DEFAULT_LEVEL,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H264_PROP_BITRATE,
+ g_param_spec_uint ("bitrate",
+ "H264 encoding bitrate",
+ "H264 encoding bitrate, 10k~100M, (0, auto-calculate)",
+ 0,
+ 100*1000*1000,
+ 0,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H264_PROP_INTRA_PERIOD,
+ g_param_spec_uint ("intra-period",
+ "H264 encoding intra-period",
+ "H264 encoding intra-period",
+ 1,
+ 300,
+ H264_DEFAULT_INTRA_PERIOD,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H264_PROP_INIT_QP,
+ g_param_spec_uint ("init-qp",
+ "H264 init-qp",
+ "H264 init-qp",
+ 1,
+ 51,
+ H264_DEFAULT_INIT_QP,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H264_PROP_MIN_QP,
+ g_param_spec_uint ("min-qp",
+ "H264 min-qp",
+ "H264 min-qp",
+ 1,
+ 51,
+ H264_DEFAULT_MIN_QP,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, H264_PROP_SLICE_NUM,
+ g_param_spec_uint ("slice-num",
+ "H264 slice num",
+ "H264 slice num",
+ 1,
+ 200,
+ 1,
+ G_PARAM_READWRITE));
+
+}
+
+static void
+gst_h264encode_init(GstH264Encode *h264_encode, GstH264EncodeClass *klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(h264_encode);
+ encode->encoder = GST_VAAPI_ENCODER(gst_h264_encoder_new());
+ ENCODER_ASSERT(encode->encoder);
+}
+
+static void
+gst_h264encode_finalize(GObject *object)
+{
+ //GstH264Encode * const h264_encode = GST_H264ENCODE(object);
+ G_OBJECT_CLASS(parent_class)->finalize(object);
+}
+
+static void
+gst_h264encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ GstH264Encoder *h264encoder = GST_H264_ENCODER(encode->encoder);
+
+ ENCODER_ASSERT(h264encoder);
+
+ switch (prop_id) {
+ case H264_PROP_PROFILE: {
+ guint profile = g_value_get_uint(value);
+ if (_h264_check_valid_profile(profile)) {
+ h264encoder->profile = profile;
+ } else {
+ ENCODER_LOG_ERROR("h264encode set property <profile> failed.\n");
+ }
+ }
+ break;
+
+ case H264_PROP_LEVEL: {
+ guint level = g_value_get_uint(value);
+ if (_h264_check_valid_level(level)) {
+ h264encoder->level= level;
+ } else {
+ ENCODER_LOG_ERROR("h264encode set property <level> failed.\n");
+ }
+ }
+ break;
+
+ case H264_PROP_BITRATE: {
+ h264encoder->bitrate = g_value_get_uint(value);
+ }
+ break;
+
+ case H264_PROP_INTRA_PERIOD: {
+ h264encoder->intra_period = g_value_get_uint(value);
+ }
+ break;
+
+ case H264_PROP_INIT_QP: {
+ h264encoder->init_qp = g_value_get_uint(value);
+ }
+ break;
+
+ case H264_PROP_MIN_QP: {
+ h264encoder->min_qp = g_value_get_uint(value);
+ }
+ break;
+
+ case H264_PROP_SLICE_NUM: {
+ h264encoder->slice_num= g_value_get_uint(value);
+ }
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_h264encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ GstH264Encoder *h264encoder = GST_H264_ENCODER(encode->encoder);
+ ENCODER_ASSERT(h264encoder);
+
+ switch (prop_id) {
+ case H264_PROP_PROFILE:
+ g_value_set_uint (value, h264encoder->profile);
+ break;
+
+ case H264_PROP_LEVEL:
+ g_value_set_uint (value, h264encoder->level);
+ break;
+
+ case H264_PROP_BITRATE:
+ g_value_set_uint (value, h264encoder->bitrate);
+ break;
+
+ case H264_PROP_INTRA_PERIOD:
+ g_value_set_uint (value, h264encoder->intra_period);
+ break;
+
+ case H264_PROP_INIT_QP:
+ g_value_set_uint (value, h264encoder->init_qp);
+ break;
+
+ case H264_PROP_MIN_QP:
+ g_value_set_uint (value, h264encoder->min_qp);
+ break;
+
+ case H264_PROP_SLICE_NUM:
+ g_value_set_uint (value, h264encoder->slice_num);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+
+static gboolean
+_h264_check_valid_profile(guint profile)
+{
+ static const limit_profiles[] = {
+ H264_PROFILE_BASELINE,
+ H264_PROFILE_MAIN,
+ H264_PROFILE_HIGH
+ };
+ guint n_profiles = sizeof(limit_profiles)/sizeof(limit_profiles[0]);
+ guint i;
+ for (i = 0; i < n_profiles; ++i) {
+ if (limit_profiles[i] == profile)
+ return TRUE;
+ }
+ return FALSE;
+}
+
+static gboolean
+_h264_check_valid_level(guint level)
+{
+ static const limit_levels[] = {
+ H264_LEVEL_10,
+ H264_LEVEL_11,
+ H264_LEVEL_12,
+ H264_LEVEL_13,
+ H264_LEVEL_20,
+ H264_LEVEL_21,
+ H264_LEVEL_22,
+ H264_LEVEL_30,
+ H264_LEVEL_31,
+ H264_LEVEL_32,
+ H264_LEVEL_40,
+ H264_LEVEL_41,
+ H264_LEVEL_42,
+ H264_LEVEL_50,
+ H264_LEVEL_51
+ };
+ guint n_levels = sizeof(limit_levels)/sizeof(limit_levels[0]);
+ guint i;
+ for (i = 0; i < n_levels; ++i) {
+ if (limit_levels[i] == level)
+ return TRUE;
+ }
+ return FALSE;
+
+}
+
+
--- /dev/null
+#ifndef GST_VAAPI_H264_ENCODE_H
+#define GST_VAAPI_H264_ENCODE_H
+
+#include <gst/gst.h>
+#include "gstvaapiencode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_H264ENCODE (gst_h264encode_get_type())
+#define GST_IS_H264ENCODE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264ENCODE))
+#define GST_IS_H264ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264ENCODE))
+#define GST_H264ENCODE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264ENCODE, GstH264EncodeClass))
+#define GST_H264ENCODE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264ENCODE, GstH264Encode))
+#define GST_H264ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264ENCODE, GstH264EncodeClass))
+
+
+typedef struct _GstH264Encode GstH264Encode;
+typedef struct _GstH264EncodeClass GstH264EncodeClass;
+
+struct _GstH264Encode {
+ GstVaapiEncode parent;
+};
+
+struct _GstH264EncodeClass {
+ GstVaapiEncodeClass parent_class;
+};
+
+GType gst_h264encode_get_type(void);
+
+
+G_END_DECLS
+
+#endif /* GST_VAAPI_H264_ENCODE_H */
+
--- /dev/null
+
+#include "gstvaapih264encoder.h"
+
+#include <string.h>
+#include <stdlib.h>
+#include <va/va.h>
+#include "va/va_x11.h"
+#include <X11/Xlib.h>
+#include <glib.h>
+
+#include "gst/gstclock.h"
+#include "gst/gstvalue.h"
+
+#include "gst/vaapi/gstvaapiobject.h"
+#include "gst/vaapi/gstvaapiobject_priv.h"
+#include "gst/vaapi/gstvaapicontext.h"
+#include "gst/vaapi/gstvaapisurface.h"
+#include "gst/vaapi/gstvaapivideobuffer.h"
+#include "gst/vaapi/gstvaapidisplay_priv.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h264_encoder_debug);
+#define GST_CAT_DEFAULT gst_vaapi_h264_encoder_debug
+
+#define SHARE_CODED_BUF 0
+
+#define DEFAULT_SURFACE_NUMBER 3
+#define DEFAULT_CODEDBUF_NUM 5
+#define DEFAULT_SID_INPUT 0 // suface_ids[0]
+
+#define REF_RECON_SURFACE_NUM 2
+
+typedef enum {
+ NAL_UNKNOWN = 0,
+ NAL_NON_IDR = 1,
+ NAL_IDR = 5, /* ref_idc != 0 */
+ NAL_SEI = 6, /* ref_idc == 0 */
+ NAL_SPS = 7,
+ NAL_PPS = 8,
+ NAL_AUD = 9,
+ NAL_FILLER = 12,
+}H264_NAL_TYPE;
+
+struct _GstH264EncodeBuffer {
+ GstBuffer buffer;
+ VABufferID *coded_id;
+ GstH264EncoderPrivate *encoder;
+};
+
+struct _GstH264EncoderPrivate {
+ GstH264Encoder *public;
+ guint32 format; /*NV12, I420,*/
+ gboolean es_flag; /*elementary flag*/
+
+ /* private data*/
+ GQueue *video_buffer_caches; /*not used for baseline*/
+
+ GstVaapiSurface *ref_surface; /* reference buffer*/
+ GstVaapiSurface *recon_surface; /* reconstruct buffer*/
+
+ VABufferID seq_parameter;
+ VABufferID pic_parameter;
+ VABufferID slice_parameter;
+ VAEncSliceParameterBuffer *slice_param_buffers;
+ guint32 default_slice_height;
+ guint32 slice_mod_mb_num;
+
+ GstBuffer *sps_data;
+ GstBuffer *pps_data;
+
+};
+
+G_DEFINE_TYPE(GstH264Encoder, gst_h264_encoder, GST_TYPE_VAAPI_BASE_ENCODER);
+
+
+// 4096-1
+#define H264_BITSTREAM_ALLOC_ALIGN_MASK 0x0FFF
+
+#define BIT_STREAM_BUFFER(stream) ((stream)->buffer)
+#define BIT_STREAM_BIT_SIZE(stream) ((stream)->bit_size)
+
+struct _H264Bitstream {
+ guint8 *buffer;
+ guint32 bit_size;
+ guint32 max_bit_capability;
+};
+
+typedef struct _H264Bitstream H264Bitstream;
+
+static const guint8 h264_bit_mask[9] = {0x00, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF};
+
+static EncoderStatus gst_h264_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GList **coded_pics);
+
+/*other functions*/
+static EncoderStatus gst_h264_encoder_get_avcC_codec_data(
+ GstVaapiEncoder* encoder, GstBuffer **buffer);
+static EncoderStatus gst_h264_encoder_get_nal_codec_data(GstVaapiEncoder* encoder, GstBuffer **buffer);
+
+static gboolean gst_h264_validate_parameters(GstVaapiBaseEncoder *encoder);
+static void gst_h264_encoder_finalize(GObject *object);
+static void gst_h264_encoder_init_public_values(GstH264Encoder* encoder);
+
+static gboolean gst_h264_encoder_alloc_slices(GstVaapiBaseEncoder *encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+static gboolean gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+
+static EncoderStatus gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key);
+static void gst_h264_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size);
+//static EncoderStatus h264_encoder_read_sps_pps(
+// GstH264EncoderPrivate *h264_prv, const guint8 *buf, guint32 size);
+static GstBuffer *gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
+ guint8 *frame, guint32 frame_size, VABufferID *coded_buf);
+
+/* h264 bitstream functions */
+static void h264_bitstream_init(H264Bitstream *bitstream, guint32 bit_capability);
+static gboolean h264_bitstream_write_uint(H264Bitstream *bitstream, guint32 value, guint32 bit_size);
+static gboolean h264_bitstream_align(H264Bitstream *bitstream, guint32 value);
+static gboolean h264_bitstream_write_ue(H264Bitstream *bitstream, guint32 value);
+static gboolean h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value);
+static gboolean h264_bitstream_write_trailing_bits(H264Bitstream *bitstream);
+
+static gboolean h264_bitstream_write_byte_array(H264Bitstream *bitstream, const guint8 *buf, guint32 byte_size);
+static void h264_bitstream_destroy(H264Bitstream *bitstream, gboolean free_flag);
+static gboolean h264_bitstream_auto_grow(H264Bitstream *bitstream, guint32 extra_bit_size);
+static gboolean h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
+static gboolean h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
+static const guint8 *h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size);
+static gboolean h264_read_sps_attributes(const guint8 *sps_data, guint32 sps_size,
+ guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc);
+
+static void
+gst_h264_encoder_class_init(GstH264EncoderClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiEncoderClass * const encoder_class = GST_VAAPI_ENCODER_CLASS(klass);
+ GstVaapiBaseEncoderClass * const base_class = GST_VAAPI_BASE_ENCODER_CLASS(klass);
+
+ g_type_class_add_private(klass, sizeof(GstH264EncoderPrivate));
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_h264_encoder_debug, "gst_va_h264_encoder", 0,
+ "gst_va_h264_encoder element");
+
+ object_class->finalize = gst_h264_encoder_finalize;
+
+ base_class->validate_attributes = gst_h264_validate_parameters;
+ base_class->pre_alloc_resource = gst_h264_encoder_alloc_slices;
+ base_class->release_resource = gst_h264_encoder_release_resource;
+ base_class->prepare_frame = gst_h264_prepare_encoding;
+ base_class->notify_frame = gst_h264_notify_frame;
+ base_class->copy_coded_frame = gst_h264_encoder_copy_coded_buffer;
+
+ encoder_class->flush = gst_h264_encoder_flush;
+
+ encoder_class->get_codec_data = gst_h264_encoder_get_avcC_codec_data;
+ /* encoder_class->get_codec_data = gst_h264_encoder_get_nal_codec_data; */
+
+ /*
+ object_class->set_property = gst_h264_encoder_set_property;
+ object_class->get_property = gst_h264_encoder_get_property;
+ */
+}
+
+
+static void
+gst_h264_encode_buffer_class_init (gpointer g_class, gpointer class_data)
+{
+ GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS(g_class);
+
+ h264_encode_buffer_parent_class = g_type_class_peek_parent(g_class);
+ ENCODER_ASSERT(h264_encode_buffer_parent_class);
+
+ mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+ gst_h264_encode_buffer_finalize;
+}
+
+
+static GType
+gst_h264_encode_buffer_get_type (void)
+{
+ static GType s_h264_encode_buffer_type = 0;
+ if (G_UNLIKELY (s_h264_encode_buffer_type == 0)) {
+ static const GTypeInfo s_h264_encode_buffer_info = {
+ sizeof(GstBufferClass),
+ NULL,
+ NULL,
+ gst_h264_encode_buffer_class_init,
+ NULL,
+ NULL,
+ sizeof(GstH264EncodeBuffer),
+ 0,
+ NULL,
+ NULL
+ };
+ s_h264_encode_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
+ "GstH264EncodeBuffer", &s_h264_encode_buffer_info, 0);
+ }
+ return s_h264_encode_buffer_type;
+}
+
+static void
+gst_h264_encode_buffer_finalize (GstH264EncodeBuffer *h264_buffer)
+{
+ GstH264EncoderPrivate *h264_prv = NULL;
+ VABufferID* coded_id = NULL;
+ GstVaapiDisplay *display = NULL;
+
+ gboolean is_locked = FALSE;
+
+ h264_prv = h264_buffer->encoder;
+ coded_id = h264_buffer->coded_id;
+ display = ENCODER_DISPLAY(h264_prv->public);
+
+ ENCODER_ASSERT(display);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+
+ ENCODER_ASSERT(h264_prv);
+ ENCODER_ASSERT(coded_id && VA_INVALID_ID!= *coded_id);
+
+ /*if (--(*h264_buffer->ref_coded_id) == 0) */
+ {
+ /*g_free(h264_buffer->ref_coded_id);*/
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ vaUnmapBuffer(va_dpy, *coded_id);
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ push_available_coded_buffer(h264_prv, coded_id);
+ }
+
+ if (GST_MINI_OBJECT_CLASS(h264_encode_buffer_parent_class)->finalize) {
+ GST_MINI_OBJECT_CLASS(h264_encode_buffer_parent_class)->finalize(GST_MINI_OBJECT(h264_buffer));
+ }
+}
+
+static GstH264EncodeBuffer *
+gst_h264_encode_buffer_new(GstH264EncoderPrivate *h264_prv,
+ VABufferID *coded_id)
+{
+ GstH264EncodeBuffer *buf = (GstH264EncodeBuffer*)gst_mini_object_new(GST_TYPE_H264_ENCODE_BUFFER);
+ buf->coded_id = coded_id;
+ buf->encoder = h264_prv;
+ return buf;
+}
+
+
+static GstVaapiSurface *
+h264_get_video_surface(GstH264EncoderPrivate *h264_prv, GstVaapiVideoBuffer *video_buffer)
+{
+ //ref_surface
+ GstVaapiSurface *ret = gst_vaapi_video_buffer_get_surface(video_buffer);
+
+ ENCODER_CHECK_STATUS(ret, NULL, "video buffer doesn't have a surface");
+#if 0
+ g_queue_push_tail(h264_prv->video_buffer_caches,video_buffer);
+ gst_buffer_ref(GST_BUFFER(video_buffer));
+#endif
+ return ret;
+
+ end:
+ return NULL;
+}
+
+static void
+h264_release_video_surface(GstH264EncoderPrivate *h264_prv, VASurfaceID surface)
+{
+#if 0
+ ENCODER_ASSERT(h264_prv->video_buffer_caches);
+ g_queue_find_custom(h264_prv->video_buffer_caches,xx, compare_func);
+ for (h264_prv->video_buffer_caches) {
+ }
+#endif
+}
+
+static VAProfile
+h264_get_va_profile(guint32 profile)
+{
+ switch (profile) {
+ case H264_PROFILE_BASELINE:
+ return VAProfileH264Baseline;
+
+ case H264_PROFILE_MAIN:
+ return VAProfileH264Main;
+
+ case H264_PROFILE_HIGH:
+ return VAProfileH264High;
+
+ default:
+ break;
+ }
+ return (-1);
+}
+
+GstH264Encoder *
+gst_h264_encoder_new(void)
+{
+ return GST_H264_ENCODER(g_object_new(GST_TYPE_H264_ENCODER, NULL));
+}
+
+
+static void
+gst_h264_encoder_init(GstH264Encoder *encoder)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ ENCODER_ASSERT(h264_prv);
+ h264_prv->public = encoder;
+
+ /* init public attributes */
+ gst_h264_encoder_init_public_values(encoder);
+ gst_vaapi_base_encoder_set_frame_notify(GST_VAAPI_BASE_ENCODER(encoder), TRUE);
+
+ /* init private values*/
+ h264_prv->format = GST_MAKE_FOURCC('N','V','1','2');
+ h264_prv->es_flag = TRUE;
+
+ h264_prv->ref_surface = NULL;
+ h264_prv->recon_surface = NULL;
+
+ h264_prv->seq_parameter = VA_INVALID_ID;
+ h264_prv->pic_parameter = VA_INVALID_ID;
+ h264_prv->slice_parameter = VA_INVALID_ID;
+ h264_prv->slice_param_buffers = NULL;
+ h264_prv->default_slice_height = 0;
+ h264_prv->slice_mod_mb_num = 0;
+
+ h264_prv->sps_data = NULL;
+ h264_prv->pps_data = NULL;
+}
+
+static void
+gst_h264_encoder_finalize(GObject *object)
+{
+ /*free private buffers*/
+ GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(object);
+
+ if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
+ gst_vaapi_encoder_uninitialize(encoder);
+ }
+
+ if (h264_prv->sps_data) {
+ gst_buffer_unref(h264_prv->sps_data);
+ h264_prv->sps_data = NULL;
+ }
+ if (h264_prv->pps_data) {
+ gst_buffer_unref(h264_prv->pps_data);
+ h264_prv->pps_data = NULL;
+ }
+ if (h264_prv->slice_param_buffers) {
+ g_free(h264_prv->slice_param_buffers);
+ h264_prv->slice_param_buffers = NULL;
+ }
+
+ G_OBJECT_CLASS(gst_h264_encoder_parent_class)->finalize(object);
+}
+
+
+static void
+gst_h264_encoder_init_public_values(GstH264Encoder* encoder)
+{
+ encoder->profile = 0;
+ encoder->level = 0;
+ encoder->bitrate = 0;
+ encoder->intra_period = 0;
+ encoder->init_qp = -1;
+ encoder->min_qp = -1;
+ encoder->slice_num = 0;
+}
+
+void
+gst_h264_encoder_set_es_flag(GstH264Encoder* encoder, gboolean es)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+ h264_prv->es_flag = es;
+}
+
+
+gboolean
+gst_h264_validate_parameters(GstVaapiBaseEncoder *base_encoder)
+{
+ GstH264Encoder *encoder = GST_H264_ENCODER(base_encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+ if (!ENCODER_WIDTH(encoder) || !ENCODER_HEIGHT(encoder) || !ENCODER_FPS(encoder)) {
+ return FALSE;
+ }
+ if (!encoder->profile) {
+ encoder->profile = H264_DEFAULT_PROFILE;
+ }
+ gst_vaapi_base_encoder_set_va_profile(base_encoder, h264_get_va_profile(encoder->profile));
+ if (!encoder->level) {
+ encoder->level = H264_DEFAULT_LEVEL;
+ }
+ if (!encoder->intra_period) {
+ encoder->intra_period = H264_DEFAULT_INTRA_PERIOD;
+ }
+ if (-1 == encoder->init_qp) {
+ encoder->init_qp = H264_DEFAULT_INIT_QP;
+ }
+ if (-1 == encoder->min_qp) {
+ encoder->min_qp = H264_DEFAULT_MIN_QP;
+ }
+
+ if (encoder->min_qp > encoder->init_qp) {
+ encoder->min_qp = encoder->init_qp;
+ }
+
+ /* default compress ratio 1: (4*8*1.5) */
+ if (!encoder->bitrate) {
+ encoder->bitrate = ENCODER_WIDTH(encoder)*ENCODER_HEIGHT(encoder)*ENCODER_FPS(encoder)/4;
+ }
+
+ if (!encoder->slice_num) {
+ encoder->slice_num = H264_DEFAULT_SLICE_NUM;
+ }
+
+ /* need calculate slice-num and each slice-height
+ suppose: ((encoder->height+15)/16) = 13, slice_num = 8
+ then: slice_1_height = 2
+ slice_2_height = 2
+ slice_3_height = 2
+ slice_4_height = 2
+ slice_5_height = 2
+ slice_6_height = 1
+ slice_7_height = 1
+ slice_8_height = 1
+ */
+ h264_prv->default_slice_height = (ENCODER_HEIGHT(encoder)+15)/16/encoder->slice_num;
+ if (0 == h264_prv->default_slice_height) { /* special value */
+ h264_prv->default_slice_height = 1;
+ h264_prv->slice_mod_mb_num = 0;
+ encoder->slice_num = (ENCODER_HEIGHT(encoder)+15)/16;
+ } else {
+ h264_prv->slice_mod_mb_num = ((ENCODER_HEIGHT(encoder)+15)/16)%encoder->slice_num;
+ }
+ return TRUE;
+}
+
+
+static gboolean
+h264_encoder_release_parameters(GstH264Encoder *h264_encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ guint32 i;
+
+ gboolean is_locked = FALSE;
+
+ ENCODER_ASSERT(display);
+ ENCODER_ASSERT(context);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ if (VA_INVALID_ID != h264_prv->seq_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->seq_parameter);
+ h264_prv->seq_parameter = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != h264_prv->pic_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->pic_parameter);
+ h264_prv->pic_parameter = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != h264_prv->slice_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->slice_parameter);
+ h264_prv->slice_parameter = VA_INVALID_ID;
+ }
+
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+
+ if (h264_prv->slice_param_buffers) {
+ g_free(h264_prv->slice_param_buffers);
+ h264_prv->slice_param_buffers = NULL;
+ }
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
+ GstH264Encoder* h264_encoder = GST_H264_ENCODER(encoder);
+ gboolean ret = TRUE;
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+
+ /* release buffers first */
+ h264_encoder_release_parameters(h264_encoder, display, context);
+
+ /*remove ref_surface*/
+ if (h264_prv->ref_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h264_prv->ref_surface);
+ } else {
+ g_object_unref(h264_prv->ref_surface);
+ }
+ h264_prv->ref_surface = NULL;
+ }
+
+ /*remove recon_surface*/
+ if (h264_prv->recon_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h264_prv->recon_surface);
+ } else {
+ g_object_unref(h264_prv->recon_surface);
+ }
+ h264_prv->recon_surface = NULL;
+ }
+
+ if (h264_prv->sps_data) {
+ gst_buffer_unref(h264_prv->sps_data);
+ h264_prv->sps_data = NULL;
+ }
+ if (h264_prv->pps_data) {
+ gst_buffer_unref(h264_prv->pps_data);
+ h264_prv->pps_data = NULL;
+ }
+ return ret;
+}
+
+static gboolean
+gst_h264_encoder_alloc_slices(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
+ gboolean ret = TRUE;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+
+ h264_prv->slice_param_buffers = (VAEncSliceParameterBuffer*)g_malloc0_n(h264_encoder->slice_num,
+ sizeof(h264_prv->slice_param_buffers[0]));
+ return ret;
+}
+
+
+
+static EncoderStatus
+gst_h264_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key)
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+#ifdef _MRST_
+ VAEncPictureParameterBufferH264 pic_h264;
+#else
+ VAEncPictureParameterBufferH264Baseline pic_h264;
+#endif
+ VAEncSliceParameterBuffer *slice_h264 = NULL;
+
+ gboolean is_locked = FALSE;
+
+ ENCODER_ASSERT(display && context);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+
+ *is_key = ((frame_index % h264_encoder->intra_period) == 0);
+
+ /* lock display */
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ /*handle first surface_index*/
+ /*only need first frame*/
+ if (VA_INVALID_ID == h264_prv->seq_parameter) { /*first time*/
+ #ifdef _MRST_
+ VAEncSequenceParameterBufferH264 seq_h264 = {0};
+ #else
+ VAEncSequenceParameterBufferH264Baseline seq_h264 = {0};
+ #endif
+
+ seq_h264.level_idc = h264_encoder->level; /* 3.0 */
+ seq_h264.max_num_ref_frames = 1; /*Only I, P frames*/
+ seq_h264.picture_width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
+ seq_h264.picture_height_in_mbs = (ENCODER_HEIGHT(h264_encoder)+15)/16;
+
+ seq_h264.bits_per_second = h264_encoder->bitrate;
+ seq_h264.frame_rate = ENCODER_FPS(h264_encoder);
+ seq_h264.initial_qp = h264_encoder->init_qp; /*qp_value; 15, 24, 26?*/
+ seq_h264.min_qp = h264_encoder->min_qp; /*1, 6, 10*/
+ seq_h264.basic_unit_size = 0;
+ seq_h264.intra_period = h264_encoder->intra_period;
+ seq_h264.intra_idr_period = h264_encoder->intra_period;
+
+ va_status = vaCreateBuffer(va_dpy, context_id,
+ VAEncSequenceParameterBufferType,
+ sizeof(seq_h264), 1, &seq_h264, &h264_prv->seq_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_ENC_RES_ERR, "alloc seq-buffer failed.\n");
+ va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->seq_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaRenderPicture seq-parameters failed.\n");
+ }
+
+ /* set pic_parameters*/
+ if (!h264_prv->ref_surface) {
+ h264_prv->ref_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h264_prv->ref_surface, ENCODER_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
+ }
+ if (!h264_prv->recon_surface) {
+ h264_prv->recon_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h264_prv->recon_surface, ENCODER_SURFACE_ERR, "reconstructed surface, h264_pop_free_surface failed.\n");
+ }
+
+ pic_h264.reference_picture = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface);
+ pic_h264.reconstructed_picture = GST_VAAPI_OBJECT_ID(h264_prv->recon_surface);
+ pic_h264.coded_buf = coded_buf;
+ pic_h264.picture_width = ENCODER_WIDTH(h264_encoder);
+ pic_h264.picture_height = ENCODER_HEIGHT(h264_encoder);
+ pic_h264.last_picture = 0; // last pic or not
+
+ if (VA_INVALID_ID != h264_prv->pic_parameter) { /* share the same pic_parameter*/
+ vaDestroyBuffer(va_dpy, h264_prv->pic_parameter);
+ h264_prv->pic_parameter = VA_INVALID_ID;
+ }
+ va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
+ sizeof(pic_h264), 1, &pic_h264, &h264_prv->pic_parameter);
+
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "creating pic-param buffer failed.\n");
+
+ va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->pic_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "rendering pic-param buffer failed.\n");
+
+ /* set slice parameters, support multiple slices */
+ int i = 0;
+ guint32 last_row_num = 0;
+ guint32 slice_mod_num = h264_prv->slice_mod_mb_num;
+
+ memset(h264_prv->slice_param_buffers, 0, h264_encoder->slice_num*sizeof(h264_prv->slice_param_buffers[0]));
+ for (i = 0; i < h264_encoder->slice_num; ++i) {
+ slice_h264 = &h264_prv->slice_param_buffers[i];
+ slice_h264->start_row_number = last_row_num; /* unit MB*/
+ slice_h264->slice_height = h264_prv->default_slice_height; /* unit MB */
+ if (slice_mod_num) {
+ ++slice_h264->slice_height;
+ --slice_mod_num;
+ }
+ last_row_num += slice_h264->slice_height;
+ slice_h264->slice_flags.bits.is_intra = *is_key;
+ slice_h264->slice_flags.bits.disable_deblocking_filter_idc = 0;
+
+ }
+ ENCODER_ASSERT(last_row_num == (ENCODER_HEIGHT(h264_encoder)+15)/16);
+
+ if (VA_INVALID_ID != h264_prv->slice_parameter) {
+ vaDestroyBuffer(va_dpy, h264_prv->slice_parameter);
+ h264_prv->slice_parameter = VA_INVALID_ID;
+ }
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncSliceParameterBufferType,
+ sizeof(h264_prv->slice_param_buffers[0]),
+ h264_encoder->slice_num,
+ h264_prv->slice_param_buffers,
+ &h264_prv->slice_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "creating slice-parameters buffer failed.\n");
+
+ va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->slice_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "rendering slice-parameters buffer failed.\n");
+
+ /*after finished, set ref_surface_index, recon_surface_index */
+ GstVaapiSurface *swap = h264_prv->ref_surface;
+ h264_prv->ref_surface = h264_prv->recon_surface;
+ h264_prv->recon_surface = swap;
+
+ end:
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
+ return ret;
+}
+
+
+static GstBuffer *
+gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
+ guint8 *frame,
+ guint32 frame_size,
+ VABufferID *coded_buf)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+ GstBuffer *ret_buffer;
+ guint32 nal_size;
+ const guint8 *nal_start;
+ guint8 *frame_end;
+
+ ret_buffer = gst_buffer_new();
+ ENCODER_ASSERT(ret_buffer);
+ H264Bitstream bitstream;
+ h264_bitstream_init(&bitstream, (frame_size+32)*8);
+ h264_bitstream_align(&bitstream, 0);
+ ENCODER_ASSERT(bitstream.bit_size == 0);
+
+ if (!h264_prv->es_flag) { /*nal format*/
+ h264_bitstream_write_byte_array(&bitstream, frame, frame_size);
+ ENCODER_ASSERT(bitstream.bit_size == frame_size*8);
+ } else { /* elementary format */
+ frame_end = frame + frame_size;
+ nal_start = frame;
+ nal_size = 0;
+ while((nal_start = h264_next_nal(nal_start, frame_end-nal_start, &nal_size)) != NULL) {
+ ENCODER_ASSERT(nal_size);
+ if (!nal_size) {
+ nal_start += nal_size;
+ continue;
+ }
+ h264_bitstream_write_uint(&bitstream, nal_size, 32);
+ h264_bitstream_write_byte_array(&bitstream, nal_start, nal_size);
+ nal_start += nal_size;
+ }
+ }
+ h264_bitstream_align(&bitstream, 0);
+
+ GST_BUFFER_MALLOCDATA(ret_buffer) =
+ GST_BUFFER_DATA(ret_buffer) = BIT_STREAM_BUFFER(&bitstream);
+ GST_BUFFER_SIZE(ret_buffer) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
+ h264_bitstream_destroy(&bitstream, FALSE);
+
+ return ret_buffer;
+}
+
+static EncoderStatus
+h264_encoder_read_sps_pps(GstH264EncoderPrivate *h264_prv, const guint8 *buf, guint32 size)
+{
+ const guint8 *end = buf + size;
+ const guint8 *nal_start = buf;
+ guint32 nal_size = 0;
+ guint8 nal_type;
+ GstBuffer *sps = NULL, *pps = NULL;
+
+ while((!sps || !pps) && (nal_start = h264_next_nal(nal_start, end-nal_start, &nal_size)) != NULL) {
+ if (!nal_size) {
+ nal_start += nal_size;
+ continue;
+ }
+
+ nal_type = (*nal_start)&0x1F;
+ switch (nal_type) {
+ case NAL_SPS: {
+ sps = gst_buffer_new_and_alloc(nal_size);
+ memcpy(GST_BUFFER_DATA(sps), nal_start, nal_size);
+ gst_buffer_replace(&h264_prv->sps_data, sps);
+ gst_buffer_unref(sps); /*don't set to NULL*/
+ break;
+ }
+
+ case NAL_PPS: {
+ pps = gst_buffer_new_and_alloc(nal_size);
+ memcpy(GST_BUFFER_DATA(pps), nal_start, nal_size);
+ gst_buffer_replace(&h264_prv->pps_data, pps);
+ gst_buffer_unref(pps);
+ break;
+ }
+
+ default:
+ break;
+ }
+ nal_start += nal_size;
+
+ }
+ if (!sps || !pps) {
+ return ENCODER_DATA_NOT_READY;
+ }
+ return ENCODER_NO_ERROR;
+}
+
+static void
+gst_h264_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+ if (!h264_prv->sps_data || !h264_prv->pps_data) {
+ h264_encoder_read_sps_pps(h264_prv, buf, size);
+ }
+ if (h264_prv->sps_data && h264_prv->pps_data) {
+ gst_vaapi_base_encoder_set_frame_notify(GST_VAAPI_BASE_ENCODER(encoder), FALSE);
+ }
+}
+
+
+static gboolean
+h264_read_sps_attributes(const guint8 *sps_data, guint32 sps_size,
+ guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc)
+{
+ ENCODER_ASSERT(profile_idc && profile_comp && level_idc);
+ ENCODER_ASSERT(sps_size >= 4);
+ if (sps_size < 4) {
+ return FALSE;
+ }
+ /*skip sps_data[0], nal_type*/
+ *profile_idc = sps_data[1];
+ *profile_comp = sps_data[2];
+ *level_idc = sps_data[3];
+ return TRUE;
+}
+
+
+static EncoderStatus
+gst_h264_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GList **coded_pics)
+{
+ GstH264Encoder* h264_encoder = GST_H264_ENCODER(encoder);
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+
+ //h264_prv->frame_count = 0;
+ //gst_vaapi_base_encoder_set_frame_notify((GST_VAAPI_BASE_ENCODER)encoder, TRUE);
+
+ //end:
+ return ret;
+}
+
+/*test*/
+static int draw_picture(int width, int height,
+ unsigned char *Y_start,
+ unsigned char *U_start,
+ unsigned char *V_start,
+ int UV_interleave, int box_width, int row_shift);
+
+int main_test(int argc, char* argv[])
+{
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstVaapiEncoder *encoder = NULL;
+
+ GList *coded_pics = NULL;
+ GstBuffer **raw_buffer = NULL;
+ const guint32 raw_buffer_num = 20;
+
+ GstBuffer *tmp_buffer;
+
+ guint32 i = 0, k = 0;
+
+ gst_init (&argc, &argv);
+
+ g_type_init();
+ if (!g_thread_supported ())
+ g_thread_init (NULL);
+
+ GstH264Encoder *h264_encoder = gst_h264_encoder_new();
+ encoder = GST_VAAPI_ENCODER(h264_encoder);
+ ENCODER_ASSERT(encoder);
+
+ h264_encoder->profile = 64;
+ h264_encoder->level = 30;
+ encoder->width = 1280;
+ encoder->height = 720;
+ encoder->frame_rate = 10;
+ h264_encoder->bitrate = 512*1000;
+ h264_encoder->intra_period = 30;
+ ret = gst_vaapi_encoder_initialize(encoder);
+ ENCODER_ASSERT(ret == ENCODER_NO_ERROR);
+ ret = gst_vaapi_encoder_open(encoder, NULL);
+ ENCODER_ASSERT(ret == ENCODER_NO_ERROR);
+
+ guint32 buffer_size = encoder->width * encoder->width *3 /2;
+ guint32 y_width = encoder->width, y_size = encoder->width * encoder->height;
+ guint32 u_width = encoder->width/2, u_size = (encoder->width/2) * (encoder->height/2);
+ guint32 v_width = encoder->width/2;
+ guint8 *y_src, *u_src, *v_src;
+
+ /*set buffers*/
+ int box_width=8;
+ int row_shift=0;
+ raw_buffer = (GstBuffer**)g_malloc0(raw_buffer_num*sizeof(GstBuffer*));
+ for (i = 0; i < raw_buffer_num; i++) {
+ raw_buffer[i] = gst_buffer_new_and_alloc(buffer_size);
+ y_src = GST_BUFFER_DATA(raw_buffer[i]);
+ u_src = y_src + y_size;
+ v_src = u_src + u_size;
+
+ draw_picture(encoder->width, encoder->height, y_src, u_src, v_src, 0, box_width, row_shift);
+ row_shift++;
+ if (row_shift==(2*box_width)) row_shift= 0;
+ }
+
+ FILE *fp = fopen("tmp.h264", "wb");
+ ENCODER_ASSERT(fp);
+
+ k = 0;
+
+ for (i = 0; i < 50; i++) {
+ coded_pics = NULL;
+ ret = gst_vaapi_encoder_encode(encoder, raw_buffer[k], &coded_pics);
+ ENCODER_ASSERT(ENCODER_NO_ERROR == ret);
+ ++k;
+ if (k >= raw_buffer_num) k = 0;
+
+ while (coded_pics) {
+ tmp_buffer = coded_pics->data;
+ coded_pics = g_list_remove(coded_pics, tmp_buffer);
+ fwrite(GST_BUFFER_DATA(tmp_buffer), GST_BUFFER_SIZE(tmp_buffer), 1, fp);
+ printf("F:%d, S:%d, %s\n", i, GST_BUFFER_SIZE(tmp_buffer), vaapi_encoder_dump_bytes(GST_BUFFER_DATA(tmp_buffer)+4, 8));
+ gst_buffer_unref(tmp_buffer);
+ }
+ }
+ fclose(fp);
+
+ ret = gst_vaapi_encoder_close(encoder);
+ ENCODER_ASSERT(ENCODER_NO_ERROR == ret);
+
+ for (i = 0; i < raw_buffer_num; i++) {
+ gst_buffer_unref(raw_buffer[i]);
+ }
+ g_free(raw_buffer);
+ gst_vaapi_encoder_unref(encoder);
+
+ return 0;
+}
+
+EncoderStatus
+gst_h264_encoder_get_avcC_codec_data(GstVaapiEncoder *encoder, GstBuffer **buffer)
+{
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ GstBuffer *avc_codec;
+ const guint32 configuration_version = 0x01;
+ const guint32 length_size_minus_one = 0x03;
+ guint32 profile, profile_comp, level_idc;
+
+ ENCODER_ASSERT(buffer);
+ if (!h264_prv->sps_data || !h264_prv->pps_data) {
+ return ENCODER_DATA_NOT_READY;
+ }
+
+ if (FALSE == h264_read_sps_attributes(GST_BUFFER_DATA(h264_prv->sps_data),
+ GST_BUFFER_SIZE(h264_prv->sps_data),
+ &profile, &profile_comp, &level_idc))
+ {
+ ENCODER_ASSERT(0);
+ return ENCODER_DATA_ERR;
+ }
+
+ H264Bitstream bitstream;
+ h264_bitstream_init(&bitstream,
+ (GST_BUFFER_SIZE(h264_prv->sps_data)+GST_BUFFER_SIZE(h264_prv->pps_data) + 32)*8);
+
+ /*codec_data*/
+ h264_bitstream_write_uint(&bitstream, configuration_version, 8);
+ h264_bitstream_write_uint(&bitstream, profile, 8);
+ h264_bitstream_write_uint(&bitstream, profile_comp, 8);
+ h264_bitstream_write_uint(&bitstream, level_idc, 8);
+ h264_bitstream_write_uint(&bitstream, h264_bit_mask[6], 6); /*111111*/
+ h264_bitstream_write_uint(&bitstream, length_size_minus_one, 2);
+ h264_bitstream_write_uint(&bitstream, h264_bit_mask[3], 3); /*111*/
+
+ /*write sps*/
+ h264_bitstream_write_uint(&bitstream, 1, 5); /* sps count = 1*/
+ ENCODER_ASSERT( BIT_STREAM_BIT_SIZE(&bitstream)%8 == 0);
+ h264_bitstream_write_uint(&bitstream, GST_BUFFER_SIZE(h264_prv->sps_data), 16);
+ h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->sps_data),
+ GST_BUFFER_SIZE(h264_prv->sps_data));
+
+ /*write pps*/
+ h264_bitstream_write_uint(&bitstream, 1, 8); /*pps count = 1*/
+ h264_bitstream_write_uint(&bitstream, GST_BUFFER_SIZE(h264_prv->pps_data), 16);
+ h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->pps_data),
+ GST_BUFFER_SIZE(h264_prv->pps_data));
+
+ avc_codec = gst_buffer_new();
+ GST_BUFFER_MALLOCDATA(avc_codec) =
+ GST_BUFFER_DATA(avc_codec) =
+ BIT_STREAM_BUFFER(&bitstream);
+ GST_BUFFER_SIZE(avc_codec) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
+ h264_bitstream_destroy(&bitstream, FALSE);
+ *buffer = avc_codec;
+
+ return ENCODER_NO_ERROR;
+}
+
+EncoderStatus
+gst_h264_encoder_get_nal_codec_data(GstVaapiEncoder *encoder, GstBuffer **buffer)
+{
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ GstBuffer *nal_sps_pps;
+
+ ENCODER_ASSERT(buffer);
+ if (!h264_prv->sps_data || !h264_prv->pps_data) {
+ return ENCODER_DATA_NOT_READY;
+ }
+
+ H264Bitstream bitstream;
+ h264_bitstream_init(&bitstream,
+ (GST_BUFFER_SIZE(h264_prv->sps_data)+GST_BUFFER_SIZE(h264_prv->pps_data) + 8)*8);
+
+ /*0x000001 start code*/
+ h264_bitstream_write_uint(&bitstream, 0x000001, 24);
+ h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->sps_data),
+ GST_BUFFER_SIZE(h264_prv->sps_data));
+ h264_bitstream_write_uint(&bitstream, 0x000001, 24);
+ h264_bitstream_write_byte_array(&bitstream, GST_BUFFER_DATA(h264_prv->pps_data),
+ GST_BUFFER_SIZE(h264_prv->pps_data));
+
+ nal_sps_pps = gst_buffer_new();
+ GST_BUFFER_MALLOCDATA(nal_sps_pps) =
+ GST_BUFFER_DATA(nal_sps_pps) =
+ BIT_STREAM_BUFFER(&bitstream);
+ GST_BUFFER_SIZE(nal_sps_pps) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
+ h264_bitstream_destroy(&bitstream, FALSE);
+ *buffer = nal_sps_pps;
+ return ENCODER_NO_ERROR;
+}
+
+static void
+h264_bitstream_init(H264Bitstream *bitstream, guint32 bit_capability)
+{
+ bitstream->bit_size = 0;
+ bitstream->buffer = NULL;
+ bitstream->max_bit_capability = 0;
+ if (bit_capability) {
+ h264_bitstream_auto_grow(bitstream, bit_capability);
+ }
+}
+
+static gboolean
+h264_bitstream_write_uint(H264Bitstream *bitstream, guint32 value, guint32 bit_size)
+{
+ gboolean ret = TRUE;
+ guint32 byte_pos, bit_offset;
+ guint8 *cur_byte;
+ guint32 fill_bits;
+
+ if(!bit_size) {
+ return TRUE;
+ }
+
+ ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, bit_size), FALSE, "h264_bitstream_auto_grow failed.\n");
+ byte_pos = (bitstream->bit_size>>3);
+ bit_offset = (bitstream->bit_size&0x07);
+ cur_byte = bitstream->buffer + byte_pos;
+ ENCODER_ASSERT(bit_offset < 8 && bitstream->bit_size <= bitstream->max_bit_capability);
+
+ while (bit_size) {
+ fill_bits = ((8-bit_offset) < bit_size ? (8-bit_offset) : bit_size);
+ bit_size -= fill_bits;
+ bitstream->bit_size += fill_bits;
+
+ *cur_byte |= ((value>>bit_size) & h264_bit_mask[fill_bits])<<(8-bit_offset-fill_bits);
+ ++cur_byte;
+ bit_offset = 0;
+ }
+ ENCODER_ASSERT(cur_byte <= bitstream->buffer + bitstream->max_bit_capability/8);
+ return TRUE;
+
+ end:
+ return FALSE;
+}
+
+static gboolean h264_bitstream_align(H264Bitstream *bitstream, guint32 value)
+{
+ guint32 bit_offset, bit_left;
+
+ bit_offset = (bitstream->bit_size&0x07);
+ if (!bit_offset) {
+ return TRUE;
+ }
+ bit_left = 8 - bit_offset;
+ if (value) value = h264_bit_mask[bit_left];
+ return h264_bitstream_write_uint(bitstream, value, bit_left);
+}
+
+
+static gboolean
+h264_bitstream_write_byte_array(H264Bitstream *bitstream, const guint8 *buf, guint32 byte_size)
+{
+ gboolean ret = TRUE;
+ if (!byte_size) {
+ return 0;
+ }
+ ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, byte_size<<3), FALSE, "h264_bitstream_auto_grow failed.\n");
+ if (0 == (bitstream->bit_size&0x07)) {
+ memcpy(&bitstream->buffer[bitstream->bit_size>>3], buf, byte_size);
+ bitstream->bit_size += (byte_size<<3);
+ } else {
+ ENCODER_ASSERT(0);
+ while(byte_size) {
+ h264_bitstream_write_uint(bitstream, *buf, 8);
+ --byte_size;
+ ++buf;
+ }
+ }
+ return TRUE;
+
+end:
+ return FALSE;
+}
+
+static gboolean
+h264_bitstream_write_ue(H264Bitstream *bitstream, guint32 value)
+{
+ gboolean ret = TRUE;
+ guint32 size_in_bits = 0;
+ guint32 tmp_value = ++value;
+ while (tmp_value) {
+ ++size_in_bits;
+ tmp_value >>= 1;
+ }
+ ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, 0, size_in_bits-1), FALSE, "h264_bitstream_write_ue failed.\n");
+ ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, value, size_in_bits), FALSE, "h264_bitstream_write_ue failed.\n");
+ return TRUE;
+
+end:
+ return FALSE;
+}
+
+static gboolean
+h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value)
+{
+ gboolean ret = TRUE;
+ guint32 new_val;
+
+ if (value <= 0) {
+ new_val = -(value<<1);
+ } else {
+ new_val = (value<<1) - 1;
+ }
+
+ ENCODER_CHECK_STATUS(h264_bitstream_write_ue(bitstream, new_val), FALSE, "h264_bitstream_write_se failed.\n");
+ return TRUE;
+
+ end:
+ return FALSE;
+}
+
+static gboolean
+h264_bitstream_write_trailing_bits(H264Bitstream *bitstream)
+{
+ h264_bitstream_write_uint(bitstream, 1, 1);
+ h264_bitstream_align(bitstream, 0);
+ return TRUE;
+}
+
+static void
+h264_bitstream_destroy(H264Bitstream *bitstream, gboolean free_flag)
+{
+ if (bitstream->buffer && free_flag) {
+ free (bitstream->buffer);
+ }
+ bitstream->buffer = NULL;
+ bitstream->bit_size = 0;
+ bitstream->max_bit_capability = 0;
+}
+
+static gboolean
+h264_bitstream_auto_grow(H264Bitstream *bitstream, guint32 extra_bit_size)
+{
+ guint32 new_bit_size = extra_bit_size + bitstream->bit_size;
+ guint32 clear_pos;
+
+ ENCODER_ASSERT(bitstream->bit_size <= bitstream->max_bit_capability);
+ if (new_bit_size <= bitstream->max_bit_capability) {
+ return TRUE;
+ }
+
+ new_bit_size = ((new_bit_size + H264_BITSTREAM_ALLOC_ALIGN_MASK)
+ &(~H264_BITSTREAM_ALLOC_ALIGN_MASK));
+ ENCODER_ASSERT(new_bit_size%(H264_BITSTREAM_ALLOC_ALIGN_MASK+1) == 0);
+ clear_pos = ((bitstream->bit_size+7)>>3);
+ bitstream->buffer = realloc(bitstream->buffer, new_bit_size>>3);
+ memset(bitstream->buffer+clear_pos, 0, (new_bit_size>>3)-clear_pos);
+ bitstream->max_bit_capability = new_bit_size;
+ return TRUE;
+}
+
+static gboolean
+h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
+{
+ guint32 constraint_set0_flag, constraint_set1_flag, constraint_set2_flag, constraint_set3_flag;
+ guint32 seq_parameter_set_id = 0;
+
+ /*need to set the values*/
+ guint32 log2_max_frame_num_minus4 = 0; // 1? 3?
+ guint32 pic_order_cnt_type = 0; // Must be 0
+ guint32 log2_max_pic_order_cnt_lsb_minus4 = 0; // 2 ? 4?
+ guint32 num_ref_frames = 1; // only P frames
+ guint32 gaps_in_frame_num_value_allowed_flag = 0; // ??
+ guint32 mb_width = (ENCODER_WIDTH(h264_prv->public)+15)/16; // mb_width
+ guint32 mb_height = (ENCODER_HEIGHT(h264_prv->public)+15)/16; // mb_height
+ guint32 frame_mbs_only_flag = 1; // only mbs
+ guint32 frame_cropping_flag = 0;
+ guint32 frame_crop_bottom_offset = 0;
+ guint32 vui_present_flag = 0; // no vui flags
+
+
+ constraint_set0_flag = h264_prv->public->profile == H264_PROFILE_BASELINE;
+ constraint_set1_flag = h264_prv->public->profile <= H264_PROFILE_MAIN;
+ constraint_set2_flag = 0;
+ constraint_set3_flag = 0;
+
+ if (mb_height * 16 - ENCODER_HEIGHT(h264_prv->public)) {
+ frame_cropping_flag = 1;
+ frame_crop_bottom_offset =
+ (mb_height * 16 - ENCODER_HEIGHT(h264_prv->public)) / (2 * (!frame_mbs_only_flag + 1));
+ }
+
+ h264_bitstream_write_uint(bitstream, h264_prv->public->profile, 8); /* profile_idc */
+ h264_bitstream_write_uint(bitstream, constraint_set0_flag, 1); /* constraint_set0_flag */
+ h264_bitstream_write_uint(bitstream, constraint_set1_flag, 1); /* constraint_set1_flag */
+ h264_bitstream_write_uint(bitstream, constraint_set2_flag, 1); /* constraint_set2_flag */
+ h264_bitstream_write_uint(bitstream, constraint_set3_flag, 1); /* constraint_set3_flag */
+ h264_bitstream_write_uint(bitstream, 0, 4); /* reserved_zero_4bits */
+ h264_bitstream_write_uint(bitstream, h264_prv->public->level, 8); /* level_idc */
+ h264_bitstream_write_ue(bitstream, seq_parameter_set_id); /* seq_parameter_set_id */
+
+ if (h264_prv->public->profile >= H264_PROFILE_HIGH) {
+ /* FIXME: fix for high profile */
+ ENCODER_ASSERT(0);
+ }
+
+ h264_bitstream_write_ue(bitstream, log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
+ h264_bitstream_write_ue(bitstream, pic_order_cnt_type); /* pic_order_cnt_type */
+
+ if (pic_order_cnt_type == 0)
+ h264_bitstream_write_ue(bitstream, log2_max_pic_order_cnt_lsb_minus4);/* log2_max_pic_order_cnt_lsb_minus4 */
+ else {
+ ENCODER_ASSERT(0);
+ }
+
+ h264_bitstream_write_ue(bitstream, num_ref_frames); /* num_ref_frames */
+ h264_bitstream_write_uint(bitstream, gaps_in_frame_num_value_allowed_flag, 1); /* gaps_in_frame_num_value_allowed_flag */
+
+ h264_bitstream_write_ue(bitstream, mb_width - 1); /* pic_width_in_mbs_minus1 */
+ h264_bitstream_write_ue(bitstream, mb_height - 1); /* pic_height_in_map_units_minus1 */
+ h264_bitstream_write_uint(bitstream, frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
+
+ if (!frame_mbs_only_flag) { //ONLY mbs
+ ENCODER_ASSERT(0);
+ }
+
+ h264_bitstream_write_uint(bitstream, 0, 1); /* direct_8x8_inference_flag */
+ h264_bitstream_write_uint(bitstream, frame_cropping_flag, 1); /* frame_cropping_flag */
+
+ if (frame_cropping_flag) {
+ h264_bitstream_write_ue(bitstream, 0); /* frame_crop_left_offset */
+ h264_bitstream_write_ue(bitstream, 0); /* frame_crop_right_offset */
+ h264_bitstream_write_ue(bitstream, 0); /* frame_crop_top_offset */
+ h264_bitstream_write_ue(bitstream, frame_crop_bottom_offset); /* frame_crop_bottom_offset */
+ }
+
+ h264_bitstream_write_uint(bitstream, vui_present_flag, 1); /* vui_parameters_present_flag */
+ h264_bitstream_write_trailing_bits(bitstream); /* rbsp_trailing_bits */
+ return TRUE;
+
+ //end:
+ //return FALSE;
+
+}
+
+static const guint8 *
+h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size)
+{
+ const guint8 *cur = buffer;
+ const guint8 *end = buffer + len;
+ const guint8 *nal_start = NULL;
+ guint32 flag = 0xFFFFFFFF;
+ guint32 nal_start_len = 0;
+
+ ENCODER_ASSERT(len >= 0 && buffer && nal_size);
+ if (len < 3) {
+ *nal_size = len;
+ nal_start = (len ? buffer : NULL);
+ return nal_start;
+ }
+
+ /*locate head postion*/
+ if (!buffer[0] && !buffer[1]) {
+ if (buffer[2] == 1) { // 0x000001
+ nal_start_len = 3;
+ } else if (!buffer[2] && len >=4 && buffer[3] == 1) { //0x00000001
+ nal_start_len = 4;
+ }
+ }
+ nal_start = buffer + nal_start_len;
+ cur = nal_start;
+
+ /*find next nal start position*/
+ while (cur < end) {
+ flag = ((flag<<8) | ((*cur++)&0xFF));
+ if (flag == 0x00000001) {
+ *nal_size = cur - 4 - nal_start;
+ break;
+ } else if ((flag&0x00FFFFFF) == 0x00000001) {
+ *nal_size = cur - 3 - nal_start;
+ break;
+ }
+ }
+ if (cur >= end) {
+ *nal_size = end - nal_start;
+ if (nal_start >= end) {
+ nal_start = NULL;
+ }
+ }
+ return nal_start;
+}
+
+
+static gboolean
+h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
+{
+ ENCODER_ASSERT(0);
+ return TRUE;
+}
+
+static int draw_picture(int width, int height,
+ unsigned char *Y_start,
+ unsigned char *U_start,
+ unsigned char *V_start,
+ int UV_interleave, int box_width, int row_shift)
+{
+ int row;
+ int field = 0;
+ int Y_pitch = width;
+ int U_pitch = width/2;
+ int V_pitch = width/2;
+
+ /* copy Y plane */
+ for (row=0;row<height;row++) {
+ unsigned char *Y_row = Y_start + row * Y_pitch;
+ int jj, xpos, ypos;
+
+ ypos = (row / box_width) & 0x1;
+
+ /* fill garbage data into the other field */
+ if (((field == 1) && (row &1))
+ || ((field == 2) && ((row &1)==0))) {
+ memset(Y_row, 0xff, width);
+ continue;
+ }
+
+ for (jj=0; jj<width; jj++) {
+ xpos = ((row_shift + jj) / box_width) & 0x1;
+
+ if ((xpos == 0) && (ypos == 0))
+ Y_row[jj] = 0xeb;
+ if ((xpos == 1) && (ypos == 1))
+ Y_row[jj] = 0xeb;
+
+ if ((xpos == 1) && (ypos == 0))
+ Y_row[jj] = 0x10;
+ if ((xpos == 0) && (ypos == 1))
+ Y_row[jj] = 0x10;
+ }
+ }
+
+ /* copy UV data */
+ for( row =0; row < height/2; row++) {
+ unsigned short value = 0x80;
+
+ /* fill garbage data into the other field */
+ if (((field == 1) && (row &1))
+ || ((field == 2) && ((row &1)==0))) {
+ value = 0xff;
+ }
+
+ if (UV_interleave) {
+ unsigned short *UV_row = (unsigned short *)(U_start + row * U_pitch);
+
+ memset(UV_row, value, width);
+ } else {
+ unsigned char *U_row = U_start + row * U_pitch;
+ unsigned char *V_row = V_start + row * V_pitch;
+
+ memset (U_row,value,width/2);
+ memset (V_row,value,width/2);
+ }
+ }
+ return 0;
+}
+
+
+
--- /dev/null
+
+#ifndef _GST_H264_ENCODER_H_
+#define _GST_H264_ENCODER_H_
+
+
+#include "gst/vaapi/gstvaapisurfacepool.h"
+
+#include "gstvaapibaseencoder.h"
+
+G_BEGIN_DECLS
+
+typedef struct _GstH264Encoder GstH264Encoder;
+typedef struct _GstH264EncoderPrivate GstH264EncoderPrivate;
+typedef struct _GstH264EncoderClass GstH264EncoderClass;
+
+
+#define GST_TYPE_H264_ENCODER (gst_h264_encoder_get_type())
+#define GST_IS_H264_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_H264_ENCODER))
+#define GST_IS_H264_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_H264_ENCODER))
+#define GST_H264_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_H264_ENCODER, GstH264EncoderClass))
+#define GST_H264_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_H264_ENCODER, GstH264Encoder))
+#define GST_H264_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_H264_ENCODER, GstH264EncoderClass))
+#define GST_H264_ENCODER_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj),GST_TYPE_H264_ENCODER,GstH264EncoderPrivate))
+
+typedef enum {
+ H264_PROFILE_BASELINE = 66,
+ H264_PROFILE_MAIN = 77,
+ H264_PROFILE_EXTENDED = 88,
+ H264_PROFILE_HIGH = 100,
+ H264_PROFILE_HIGH10 = 110,
+ H264_PROFILE_HIGH422 = 122,
+ H264_PROFILE_HIGH444 = 144,
+ H264_PROFILE_HIGH444_PREDICTIVE = 244,
+} H264_Profile;
+
+typedef enum {
+ H264_LEVEL_10 = 10, /* QCIF format, < 380160 samples/sec */
+ H264_LEVEL_11 = 11, /* CIF format, < 768000 samples/sec */
+ H264_LEVEL_12 = 12, /* CIF format, < 1536000 samples/sec */
+ H264_LEVEL_13 = 13, /* CIF format, < 3041280 samples/sec */
+ H264_LEVEL_20 = 20, /* CIF format, < 3041280 samples/sec */
+ H264_LEVEL_21 = 21, /* HHR format, < 5068800 samples/sec */
+ H264_LEVEL_22 = 22, /* SD/4CIF format, < 5184000 samples/sec */
+ H264_LEVEL_30 = 30, /* SD/4CIF format, < 10368000 samples/sec */
+ H264_LEVEL_31 = 31, /* 720pHD format, < 27648000 samples/sec */
+ H264_LEVEL_32 = 32, /* SXGA format, < 55296000 samples/sec */
+ H264_LEVEL_40 = 40, /* 2Kx1K format, < 62914560 samples/sec */
+ H264_LEVEL_41 = 41, /* 2Kx1K format, < 62914560 samples/sec */
+ H264_LEVEL_42 = 42, /* 2Kx1K format, < 125829120 samples/sec */
+ H264_LEVEL_50 = 50, /* 3672x1536 format, < 150994944 samples/sec */
+ H264_LEVEL_51 = 51, /* 4096x2304 format, < 251658240 samples/sec */
+} H264_Level;
+
+#define H264_DEFAULT_PROFILE H264_PROFILE_BASELINE
+#define H264_DEFAULT_LEVEL H264_LEVEL_30
+#define H264_DEFAULT_INIT_QP 24
+#define H264_DEFAULT_MIN_QP 1
+#define H264_DEFAULT_INTRA_PERIOD 30
+#define H264_DEFAULT_FPS 30
+#define H264_DEFAULT_SLICE_NUM 1
+
+struct _GstH264Encoder {
+ GstVaapiBaseEncoder parent; /*based on gobject*/
+
+ guint32 profile;
+ guint32 level;
+ guint32 bitrate;
+ guint32 intra_period;
+ guint32 init_qp; /*default 24*/
+ guint32 min_qp; /*default 1*/
+ guint32 slice_num;
+};
+
+struct _GstH264EncoderClass {
+ GstVaapiBaseEncoderClass parent_class;
+};
+
+
+GType gst_h264_encoder_get_type(void);
+
+GstH264Encoder *gst_h264_encoder_new(void);
+static inline void gst_h264_encoder_unref (GstH264Encoder * encoder)
+{
+ g_object_unref (encoder);
+}
+
+void gst_h264_encoder_set_es_flag(GstH264Encoder* encoder, gboolean es);
+
+
+G_END_DECLS
+
+#endif /*_GST_H264_ENCODER_H_ */
+
--- /dev/null
+#include "gstvaapimpeg4encode.h"
+#include "gstvaapimpeg4encoder.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_mpeg4_encode_debug);
+#define GST_CAT_DEFAULT gst_vaapi_mpeg4_encode_debug
+
+
+static const char gst_mpeg4encode_sink_caps_str[] =
+ GST_CAPS_CODEC("video/x-vaapi-surface ")
+ ;
+
+static const GstElementDetails gst_mpeg4encode_details =
+ GST_ELEMENT_DETAILS(
+ "VA-API mpeg4 encoder",
+ "Codec/Encoder/Video",
+ "A VA-API based mpeg4 encoder",
+ "Feng Yuan<feng.yuan@intel.com>");
+
+
+static const char gst_mpeg4encode_src_caps_str[] =
+ GST_CAPS_CODEC("video/mpeg, mpegversion=4");
+
+static GstStaticPadTemplate gst_mpeg4encode_sink_factory =
+ GST_STATIC_PAD_TEMPLATE(
+ "sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(gst_mpeg4encode_sink_caps_str));
+
+static GstStaticPadTemplate gst_mpeg4encode_src_factory =
+ GST_STATIC_PAD_TEMPLATE(
+ "src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS(gst_mpeg4encode_src_caps_str));
+
+static void gst_mpeg4encode_finalize(GObject *object);
+static void gst_mpeg4encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec);
+static void gst_mpeg4encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+static gboolean _mpeg4_check_valid_profile(guint profile);
+static gboolean _mpeg4_check_valid_level(guint level);
+
+
+/* mpeg4 encode */
+GST_BOILERPLATE(
+ GstMPEG4Encode,
+ gst_mpeg4encode,
+ GstVaapiEncode,
+ GST_TYPE_VAAPI_ENCODE);
+
+enum {
+ MPEG4_PROP_0,
+ MPEG4_PROP_PROFILE,
+ MPEG4_PROP_BITRATE,
+ MPEG4_PROP_INTRA_PERIOD,
+ MPEG4_PROP_INIT_QP,
+ MPEG4_PROP_MIN_QP,
+};
+
+
+static void
+gst_mpeg4encode_base_init(gpointer klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+
+ gst_element_class_set_details(element_class, &gst_mpeg4encode_details);
+
+ /* sink pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_mpeg4encode_sink_factory)
+ );
+
+ /* src pad */
+ gst_element_class_add_pad_template(
+ element_class,
+ gst_static_pad_template_get(&gst_mpeg4encode_src_factory)
+ );
+}
+
+static void
+gst_mpeg4encode_class_init(GstMPEG4EncodeClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiEncodeClass * const encode_class = GST_VAAPI_ENCODE_CLASS(klass);
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_mpeg4_encode_debug, "vaapimpeg4encode", 0,
+ "vaapimpeg4encode element");
+
+ object_class->finalize = gst_mpeg4encode_finalize;
+ object_class->set_property = gst_mpeg4encode_set_property;
+ object_class->get_property = gst_mpeg4encode_get_property;
+
+
+ g_object_class_install_property (object_class, MPEG4_PROP_PROFILE,
+ g_param_spec_uint ("profile",
+ "MPEG4 Profile",
+ "Profile supports: 2(Baseline), 3(ASP)",
+ 2,
+ 3,
+ 2,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, MPEG4_PROP_BITRATE,
+ g_param_spec_uint ("bitrate",
+ "MPEG4 encoding bitrate",
+ "MPEG4 encoding bitrate, 10k~100M, (0, auto-calculate)",
+ 0,
+ 100*1000*1000,
+ 0,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, MPEG4_PROP_INTRA_PERIOD,
+ g_param_spec_uint ("intra-period",
+ "MPEG4 encoding intra-period",
+ "MPEG4 encoding intra-period",
+ 1,
+ 300,
+ MPEG4_DEFAULT_INTRA_PERIOD,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, MPEG4_PROP_INIT_QP,
+ g_param_spec_uint ("init-qp",
+ "MPEG4 init-qp",
+ "MPEG4 init-qp",
+ 1,
+ 51,
+ MPEG4_DEFAULT_INIT_QP,
+ G_PARAM_READWRITE));
+ g_object_class_install_property (object_class, MPEG4_PROP_MIN_QP,
+ g_param_spec_uint ("min-qp",
+ "MPEG4 min-qp",
+ "MPEG4 min-qp",
+ 1,
+ 51,
+ MPEG4_DEFAULT_MIN_QP,
+ G_PARAM_READWRITE));
+
+}
+
+static void
+gst_mpeg4encode_init(GstMPEG4Encode *mpeg4_encode, GstMPEG4EncodeClass *klass)
+{
+ GstElementClass * const element_class = GST_ELEMENT_CLASS(klass);
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(mpeg4_encode);
+ encode->encoder = GST_VAAPI_ENCODER(gst_mpeg4_encoder_new());
+ ENCODER_ASSERT(encode->encoder);
+}
+
+static void
+gst_mpeg4encode_finalize(GObject *object)
+{
+ //GstMPEG4Encode * const mpeg4_encode = GST_MPEG4ENCODE(object);
+ G_OBJECT_CLASS(parent_class)->finalize(object);
+}
+
+static void
+gst_mpeg4encode_set_property(GObject *object, guint prop_id,
+ const GValue *value, GParamSpec *pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ GstMPEG4Encoder *mpeg4encoder = GST_MPEG4_ENCODER(encode->encoder);
+
+ ENCODER_ASSERT(mpeg4encoder);
+
+ switch (prop_id) {
+ case MPEG4_PROP_PROFILE: {
+ mpeg4encoder->profile = g_value_get_uint(value);
+ }
+ break;
+
+ case MPEG4_PROP_BITRATE: {
+ mpeg4encoder->bitrate = g_value_get_uint(value);
+ }
+ break;
+
+ case MPEG4_PROP_INTRA_PERIOD: {
+ mpeg4encoder->intra_period = g_value_get_uint(value);
+ }
+ break;
+
+ case MPEG4_PROP_INIT_QP: {
+ mpeg4encoder->init_qp = g_value_get_uint(value);
+ }
+ break;
+
+ case MPEG4_PROP_MIN_QP: {
+ mpeg4encoder->min_qp = g_value_get_uint(value);
+ }
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_mpeg4encode_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVaapiEncode *encode = GST_VAAPI_ENCODE(object);
+ GstMPEG4Encoder *mpeg4encoder = GST_MPEG4_ENCODER(encode->encoder);
+ ENCODER_ASSERT(mpeg4encoder);
+
+ switch (prop_id) {
+ case MPEG4_PROP_PROFILE:
+ g_value_set_uint (value, mpeg4encoder->profile);
+ break;
+
+ case MPEG4_PROP_BITRATE:
+ g_value_set_uint (value, mpeg4encoder->bitrate);
+ break;
+
+ case MPEG4_PROP_INTRA_PERIOD:
+ g_value_set_uint (value, mpeg4encoder->intra_period);
+ break;
+
+ case MPEG4_PROP_INIT_QP:
+ g_value_set_uint (value, mpeg4encoder->init_qp);
+ break;
+
+ case MPEG4_PROP_MIN_QP:
+ g_value_set_uint (value, mpeg4encoder->min_qp);
+ break;
+
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
--- /dev/null
+#ifndef GST_VAAPI_MPEG4_ENCODE_H
+#define GST_VAAPI_MPEG4_ENCODE_H
+
+#include <gst/gst.h>
+#include "gstvaapiencode.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_MPEG4ENCODE (gst_mpeg4encode_get_type())
+#define GST_IS_MPEG4ENCODE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MPEG4ENCODE))
+#define GST_IS_MPEG4ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MPEG4ENCODE))
+#define GST_MPEG4ENCODE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_MPEG4ENCODE, GstMPEG4EncodeClass))
+#define GST_MPEG4ENCODE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MPEG4ENCODE, GstMPEG4Encode))
+#define GST_MPEG4ENCODE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MPEG4ENCODE, GstMPEG4EncodeClass))
+
+
+typedef struct _GstMPEG4Encode GstMPEG4Encode;
+typedef struct _GstMPEG4EncodeClass GstMPEG4EncodeClass;
+
+struct _GstMPEG4Encode {
+ GstVaapiEncode parent;
+};
+
+struct _GstMPEG4EncodeClass {
+ GstVaapiEncodeClass parent_class;
+};
+
+GType gst_mpeg4encode_get_type(void);
+
+
+G_END_DECLS
+
+#endif /* GST_VAAPI_MPEG4_ENCODE_H */
+
--- /dev/null
+#include "gstvaapimpeg4encoder.h"
+
+#include <string.h>
+
+#include "gst/gstclock.h"
+
+#include "gst/vaapi/gstvaapiobject.h"
+#include "gst/vaapi/gstvaapiobject_priv.h"
+#include "gst/vaapi/gstvaapicontext.h"
+#include "gst/vaapi/gstvaapisurface.h"
+#include "gst/vaapi/gstvaapivideobuffer.h"
+#include "gst/vaapi/gstvaapidisplay_priv.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_mpeg4_encoder_debug);
+#define GST_CAT_DEFAULT gst_vaapi_mpeg4_encoder_debug
+
+
+struct _GstMPEG4EncoderPrivate {
+ GstVaapiSurface *ref_surface; /* reference buffer*/
+ GstVaapiSurface *recon_surface; /* reconstruct buffer*/
+
+ VABufferID seq_parameter;
+ VABufferID pic_parameter;
+ VABufferID slice_parameter;
+ //VABufferID coded_buffer;
+
+ /*total encoded frames*/
+ //guint32 frame_count;
+ GstBuffer *codec_data;
+};
+
+G_DEFINE_TYPE(GstMPEG4Encoder, gst_mpeg4_encoder, GST_TYPE_VAAPI_BASE_ENCODER);
+
+/*
+static EncoderStatus gst_mpeg4_encoder_flush(GstVaapiEncoder* encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context,
+ GList **coded_pics);
+*/
+static EncoderStatus gst_mpeg4_encoder_get_codec_data(
+ GstVaapiEncoder *encoder, GstBuffer **buffer);
+static gboolean gst_mpeg4_validate_parameters(GstVaapiBaseEncoder *encoder);
+static gboolean gst_mpeg4_encoder_release_resource(
+ GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context);
+static void gst_mpeg4_notify_frame(GstVaapiBaseEncoder *encoder,
+ guint8 *buf, guint32 size);
+
+static EncoderStatus gst_mpeg4_prepare_encoding(GstVaapiBaseEncoder *encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context,
+ GstVaapiSurface *surface, guint frame_index,
+ VABufferID coded_buf, gboolean *is_key);
+
+
+static void gst_mpeg4_encoder_class_init(GstMPEG4EncoderClass *klass);
+static void gst_mpeg4_encoder_init(GstMPEG4Encoder *encoder);
+static void gst_mpeg4_encoder_finalize(GObject *object);
+
+static gboolean mpeg4_encoder_generate_codec_data(const guint8 *in_buffer,
+ guint32 in_size, GstBuffer **out_buffer);
+
+GstMPEG4Encoder *
+gst_mpeg4_encoder_new(void)
+{
+ return GST_MPEG4_ENCODER(g_object_new(GST_TYPE_MPEG4_ENCODER, NULL));
+}
+
+
+static void
+gst_mpeg4_encoder_class_init(GstMPEG4EncoderClass *klass)
+{
+ GObjectClass * const object_class = G_OBJECT_CLASS(klass);
+ GstVaapiEncoderClass * const encoder_class = GST_VAAPI_ENCODER_CLASS(klass);
+ GstVaapiBaseEncoderClass * const base_class = GST_VAAPI_BASE_ENCODER_CLASS(klass);
+
+ g_type_class_add_private(klass, sizeof(GstMPEG4EncoderPrivate));
+
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_mpeg4_encoder_debug, "gst_va_mpeg4_encoder", 0,
+ "gst_va_mpeg4_encoder element");
+
+ object_class->finalize = gst_mpeg4_encoder_finalize;
+
+ base_class->validate_attributes = gst_mpeg4_validate_parameters;
+ base_class->pre_alloc_resource = NULL;
+ base_class->release_resource = gst_mpeg4_encoder_release_resource;
+ base_class->prepare_frame = gst_mpeg4_prepare_encoding;
+ base_class->notify_frame = gst_mpeg4_notify_frame;
+ base_class->copy_coded_frame = NULL;
+
+ /*
+ encoder_class->flush = gst_mpeg4_encoder_flush;
+ */
+ encoder_class->get_codec_data = gst_mpeg4_encoder_get_codec_data;
+}
+
+static void
+gst_mpeg4_encoder_init(GstMPEG4Encoder *mpeg4_encoder)
+{
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
+ GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(mpeg4_encoder);
+ ENCODER_ASSERT(mpeg4_prv);
+
+ /* init public */
+ mpeg4_encoder->profile = VAProfileMPEG4Simple;
+ mpeg4_encoder->bitrate = 0;
+ mpeg4_encoder->intra_period = MPEG4_DEFAULT_INTRA_PERIOD;
+ mpeg4_encoder->init_qp = MPEG4_DEFAULT_INIT_QP;
+ mpeg4_encoder->min_qp = MPEG4_DEFAULT_MIN_QP;
+
+ gst_vaapi_base_encoder_set_frame_notify(GST_VAAPI_BASE_ENCODER(mpeg4_encoder), TRUE);
+ /* init private */
+ mpeg4_prv->ref_surface = NULL;
+ mpeg4_prv->recon_surface = NULL;
+
+ mpeg4_prv->seq_parameter = VA_INVALID_ID;
+ mpeg4_prv->pic_parameter = VA_INVALID_ID;
+ mpeg4_prv->slice_parameter = VA_INVALID_ID;
+
+ mpeg4_prv->codec_data = NULL;
+}
+
+static void
+gst_mpeg4_encoder_finalize(GObject *object)
+{
+ /*free private buffers*/
+ GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(object);
+
+ if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
+ gst_vaapi_encoder_uninitialize(encoder);
+ }
+ G_OBJECT_CLASS(gst_mpeg4_encoder_parent_class)->finalize(object);
+}
+
+gboolean
+gst_mpeg4_validate_parameters(GstVaapiBaseEncoder *encoder)
+{
+ GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
+ if (!ENCODER_WIDTH(mpeg4_encoder) || !ENCODER_HEIGHT(mpeg4_encoder) || !ENCODER_FPS(mpeg4_encoder)) {
+ return FALSE;
+ }
+ if (VAProfileMPEG4Simple != mpeg4_encoder->profile && VAProfileMPEG4AdvancedSimple != mpeg4_encoder->profile) {
+ return FALSE;
+ }
+ gst_vaapi_base_encoder_set_va_profile(encoder, mpeg4_encoder->profile);
+
+ if (!mpeg4_encoder->intra_period) {
+ mpeg4_encoder->intra_period = MPEG4_DEFAULT_INTRA_PERIOD;
+ }
+ if (-1 == mpeg4_encoder->init_qp) {
+ mpeg4_encoder->init_qp = MPEG4_DEFAULT_INIT_QP;
+ }
+ if (-1 == mpeg4_encoder->min_qp) {
+ mpeg4_encoder->min_qp = MPEG4_DEFAULT_MIN_QP;
+ }
+
+ /* default compress ratio 1: (4*8*1.5) */
+ if (!mpeg4_encoder->bitrate) {
+ mpeg4_encoder->bitrate = ENCODER_WIDTH(mpeg4_encoder)*ENCODER_HEIGHT(mpeg4_encoder)*ENCODER_FPS(mpeg4_encoder)/4;
+ }
+ return TRUE;
+
+}
+
+static void
+mpeg4_release_parameters(GstMPEG4Encoder *mpeg4_encoder, GstVaapiDisplay *display)
+{
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAStatus va_status = VA_STATUS_SUCCESS;
+
+ if (VA_INVALID_ID != mpeg4_prv->seq_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, mpeg4_prv->seq_parameter);
+ mpeg4_prv->seq_parameter = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != mpeg4_prv->pic_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, mpeg4_prv->pic_parameter);
+ mpeg4_prv->pic_parameter = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != mpeg4_prv->slice_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, mpeg4_prv->slice_parameter);
+ mpeg4_prv->slice_parameter = VA_INVALID_ID;
+ }
+}
+
+static gboolean
+gst_mpeg4_encoder_release_resource(GstVaapiBaseEncoder* encoder,
+ GstVaapiDisplay *display,
+ GstVaapiContext *context)
+{
+ GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
+
+ mpeg4_release_parameters(mpeg4_encoder, display);
+
+ /*remove ref_surface*/
+ if (mpeg4_prv->ref_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, mpeg4_prv->ref_surface);
+ } else {
+ g_object_unref(mpeg4_prv->ref_surface);
+ }
+ mpeg4_prv->ref_surface = NULL;
+ }
+
+ /*remove recon_surface*/
+ if (mpeg4_prv->recon_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, mpeg4_prv->recon_surface);
+ } else {
+ g_object_unref(mpeg4_prv->recon_surface);
+ }
+ mpeg4_prv->recon_surface = NULL;
+ }
+
+ if (mpeg4_prv->codec_data) {
+ gst_buffer_unref(mpeg4_prv->codec_data);
+ mpeg4_prv->codec_data = NULL;
+ }
+
+ return TRUE;
+}
+
+static guint32
+mpeg4_get_profile_level_indication(guint32 profile)
+{
+ switch(profile) {
+ case VAProfileMPEG4Simple:
+ return MPEG4_DEFAULT_SIMPLE_PROFILE_AND_LEVEL;
+ case VAProfileMPEG4AdvancedSimple:
+ return MPEG4_DEFAULT_ADVANCED_SIMPLE_PROFILE_AND_LEVEL;
+ default:
+ return 0;
+ }
+ return 0;
+}
+
+
+static EncoderStatus
+gst_mpeg4_prepare_encoding(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key)
+{
+ GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+
+ VAStatus va_status = VA_STATUS_SUCCESS;
+ EncoderStatus ret = ENCODER_NO_ERROR;
+
+ *is_key = (frame_index % mpeg4_encoder->intra_period == 0);
+
+ /* initialize sequence parameter set, only first time */
+ if (VA_INVALID_ID == mpeg4_prv->seq_parameter) { /*only the first time*/
+ VAEncSequenceParameterBufferMPEG4 seq_mpeg4 = {0};
+
+ seq_mpeg4.profile_and_level_indication = mpeg4_get_profile_level_indication(mpeg4_encoder->profile);
+ seq_mpeg4.intra_period = mpeg4_encoder->intra_period;
+ seq_mpeg4.video_object_layer_width = ENCODER_WIDTH(mpeg4_encoder);
+ seq_mpeg4.video_object_layer_height = ENCODER_HEIGHT(mpeg4_encoder);
+ seq_mpeg4.vop_time_increment_resolution = ENCODER_FPS(mpeg4_encoder);
+ seq_mpeg4.fixed_vop_rate = MPEG4_DEFAULT_FIXED_VOP_RATE;
+ if (seq_mpeg4.fixed_vop_rate) {
+ seq_mpeg4.fixed_vop_time_increment = 1;
+ }
+ seq_mpeg4.bits_per_second = mpeg4_encoder->bitrate;
+ seq_mpeg4.frame_rate = ENCODER_FPS(mpeg4_encoder);
+ seq_mpeg4.initial_qp = mpeg4_encoder->init_qp;
+ seq_mpeg4.min_qp = mpeg4_encoder->min_qp; //mpeg4_encoder->min_qp;
+
+ va_status = vaCreateBuffer(va_dpy, context_id,
+ VAEncSequenceParameterBufferType,
+ sizeof(seq_mpeg4), 1, &seq_mpeg4, &mpeg4_prv->seq_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_ENC_RES_ERR, "mpeg4 alloc seq-buffer failed.\n");
+ va_status = vaRenderPicture(va_dpy, context_id, &mpeg4_prv->seq_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "mpeg4 vaRenderPicture seq-parameters failed.\n");
+ }
+
+ /* set reference and reconstructed surfaces */
+ if (!mpeg4_prv->ref_surface) {
+ mpeg4_prv->ref_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(mpeg4_prv->ref_surface, ENCODER_SURFACE_ERR, "mpeg4 reference surface, mpeg4_pop_free_surface failed.\n");
+ }
+ if (!mpeg4_prv->recon_surface) {
+ mpeg4_prv->recon_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(mpeg4_prv->recon_surface, ENCODER_SURFACE_ERR, "mpeg4 reconstructed surface, mpeg4_pop_free_surface failed.\n");
+ }
+
+ /* initialize picture, every time, every frame */
+ VAEncPictureParameterBufferMPEG4 pic_mpeg4 = {0};
+ pic_mpeg4.reference_picture = GST_VAAPI_OBJECT_ID(mpeg4_prv->ref_surface);
+ pic_mpeg4.reconstructed_picture = GST_VAAPI_OBJECT_ID(mpeg4_prv->recon_surface);
+ pic_mpeg4.coded_buf = coded_buf;
+ pic_mpeg4.picture_width = ENCODER_WIDTH(mpeg4_encoder);
+ pic_mpeg4.picture_height = ENCODER_HEIGHT(mpeg4_encoder);
+ if (0 == frame_index) {
+ pic_mpeg4.modulo_time_base = 0;
+ } else {
+ pic_mpeg4.modulo_time_base = ((frame_index%ENCODER_FPS(mpeg4_encoder)) == 0 ? 1 : 0);
+ }
+ pic_mpeg4.vop_time_increment = 301%ENCODER_FPS(mpeg4_encoder);
+ pic_mpeg4.picture_type = *is_key ? VAEncPictureTypeIntra : VAEncPictureTypePredictive;
+
+ if (VA_INVALID_ID != mpeg4_prv->pic_parameter) { /* destroy first*/
+ va_status = vaDestroyBuffer(va_dpy, mpeg4_prv->pic_parameter);
+ mpeg4_prv->pic_parameter = VA_INVALID_ID;
+ }
+
+ va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
+ sizeof(pic_mpeg4), 1, &pic_mpeg4, &mpeg4_prv->pic_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_ENC_RES_ERR, "mpeg4 creating pic-param buffer failed.\n");
+ va_status = vaRenderPicture(va_dpy, context_id, &mpeg4_prv->pic_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "mpeg4 rendering pic-param buffer failed.\n");
+ /*initialize slice parameters, only ONE slice for mpeg4*/
+ VAEncSliceParameterBuffer slice_mpeg4 = { 0 };
+ slice_mpeg4.start_row_number = 0;
+ slice_mpeg4.slice_height = (ENCODER_HEIGHT(mpeg4_encoder)+15)/16; /*MB?*/
+ slice_mpeg4.slice_flags.bits.is_intra = *is_key;
+ slice_mpeg4.slice_flags.bits.disable_deblocking_filter_idc = 0;
+ if (VA_INVALID_ID != mpeg4_prv->slice_parameter) {
+ vaDestroyBuffer(va_dpy, mpeg4_prv->slice_parameter);
+ mpeg4_prv->slice_parameter = VA_INVALID_ID;
+ }
+
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncSliceParameterBufferType,
+ sizeof(slice_mpeg4),
+ 1,
+ &slice_mpeg4,
+ &mpeg4_prv->slice_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_ENC_RES_ERR, "mpeg4 creating slice-parameters buffer failed.\n");
+
+ va_status = vaRenderPicture(va_dpy, context_id, &mpeg4_prv->slice_parameter, 1);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "mpeg4 rendering slice-parameters buffer failed.\n");
+
+ /*swap ref_surface and recon_surface */
+ GstVaapiSurface *swap = mpeg4_prv->ref_surface;
+ mpeg4_prv->ref_surface = mpeg4_prv->recon_surface;
+ mpeg4_prv->recon_surface = swap;
+
+end:
+ return ret;
+}
+
+static GstBuffer *
+gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
+ guint8 *frame, guint32 frame_size, VABufferID *coded_buf)
+
+{
+ /*process data*/
+ GstBuffer* buffer = gst_buffer_new_and_alloc(frame_size);
+ memcpy(GST_BUFFER_DATA(buffer), frame, frame_size);
+
+ #if 1
+ GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
+ if (mpeg4_encoder->profile == VAProfileMPEG4AdvancedSimple) {
+ guint8 *start_code = GST_BUFFER_DATA(buffer)+16;
+ if (start_code[0] == 0x01 && start_code[1] == 0x20
+ && start_code[-1] == 0x00 && start_code[-2] == 0x00)
+ {
+ start_code[2] = 0x08;
+ }
+ }
+ #endif
+
+ return buffer;
+}
+
+static void
+gst_mpeg4_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size)
+{
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(encoder);
+ if (!mpeg4_prv->codec_data) {
+ if (!mpeg4_encoder_generate_codec_data(buf, size, &mpeg4_prv->codec_data)) {
+ ENCODER_LOG_ERROR("mpeg4 encoder coded data error, please check <mpeg4_encoder_generate_codec_data>.\n");
+ }
+ }
+ if (mpeg4_prv->codec_data) {
+ gst_vaapi_base_encoder_set_frame_notify(GST_VAAPI_BASE_ENCODER(encoder), FALSE);
+ }
+}
+
+
+static EncoderStatus
+gst_mpeg4_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GList **coded_pics)
+{
+ GstMPEG4Encoder *mpeg4_encoder = GST_MPEG4_ENCODER(encoder);
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(mpeg4_encoder);
+
+ mpeg4_release_parameters(mpeg4_encoder, display);
+ return ENCODER_NO_ERROR;
+}
+
+
+static EncoderStatus
+gst_mpeg4_encoder_get_codec_data(GstVaapiEncoder *encoder, GstBuffer **buffer)
+{
+ GstMPEG4EncoderPrivate *mpeg4_prv = GST_MPEG4_ENCODER_GET_PRIVATE(encoder);
+
+ if (!mpeg4_prv->codec_data)
+ return ENCODER_DATA_NOT_READY;
+ *buffer = gst_buffer_ref(mpeg4_prv->codec_data);
+ return ENCODER_NO_ERROR;
+}
+
+#define VISUAL_OBJECT_SEQUENCE_START_CODE 0x000001B0
+#define VISUAL_OBJECT_SEQUENCE_END_CODE 0x000001B1
+#define VISUAL_OBJECT_START_CODE 0x000001B5
+#define VIDEO_OBJECT_PLANE_START_CODE 0x000001B6
+/* Video Object Start Code range */
+#define VIDEO_OBJECT_START_CODE_MIN 0x00000100
+#define VIDEO_OBJECT_START_CODE_MAX 0x0000011F
+/* Video Object Layer Start Code range 0x00000120 ~ 0x0000012F*/
+#define VIDEO_OBJECT_LAYER_START_CODE 0x00000120
+#define VIDEO_OBJECT_LAYER_START_CODE_MASK 0xFFFFFFF0
+
+
+static gboolean
+find_video_object_configuration_info(const guint8 *in_buffer, guint32 in_size,
+ const guint8 **out_buffer, guint32 *out_size)
+{
+ guint32 value = 0x00;
+ const guint8 *end = in_buffer + in_size;
+
+ while(in_buffer < end) {
+ value = ((value<<8)|(*in_buffer));
+ if (VISUAL_OBJECT_SEQUENCE_START_CODE == value) {
+ *out_buffer = in_buffer - 3;
+ ++in_buffer;
+ break;
+ }
+ ++in_buffer;
+ }
+ if (in_buffer >= end)
+ return FALSE;
+
+ while(in_buffer < end) {
+ value = ((value<<8)|(*in_buffer));
+ if (VIDEO_OBJECT_PLANE_START_CODE == value) {
+ *out_size = (in_buffer - 3 - *out_buffer);
+ return TRUE;
+ }
+ ++in_buffer;
+ }
+ return FALSE;
+}
+
+static gboolean
+mpeg4_encoder_generate_codec_data(const guint8 *in_buffer, guint32 in_size, GstBuffer **out_buffer)
+{
+ const guint8 *codec_buffer = NULL;
+ guint32 codec_size = 0;
+ guint8 *visual_obj_seq_end = NULL;
+
+ if (!find_video_object_configuration_info(in_buffer, in_size, &codec_buffer, &codec_size)) {
+ return FALSE;
+ }
+ ENCODER_ASSERT(codec_size);
+ *out_buffer = gst_buffer_new_and_alloc(codec_size+4);
+ memcpy(GST_BUFFER_DATA(*out_buffer), codec_buffer, codec_size);
+ visual_obj_seq_end = GST_BUFFER_DATA(*out_buffer) + codec_size;
+ visual_obj_seq_end[0] = (VISUAL_OBJECT_SEQUENCE_END_CODE>>24);
+ visual_obj_seq_end[1] = (VISUAL_OBJECT_SEQUENCE_END_CODE>>16);
+ visual_obj_seq_end[2] = (VISUAL_OBJECT_SEQUENCE_END_CODE>>8);
+ visual_obj_seq_end[3] = (guint8)VISUAL_OBJECT_SEQUENCE_END_CODE;
+ return TRUE;
+}
+
--- /dev/null
+
+#ifndef _GST_VAAPI_MPEG4_ENCODER_H_
+#define _GST_VAAPI_MPEG4_ENCODER_H_
+
+
+#include "gst/vaapi/gstvaapisurfacepool.h"
+
+#include "gstvaapibaseencoder.h"
+
+G_BEGIN_DECLS
+
+#define MPEG4_DEFAULT_INTRA_PERIOD 30
+#define MPEG4_DEFAULT_INIT_QP 15
+#define MPEG4_DEFAULT_MIN_QP 1
+#define MPEG4_DEFAULT_SIMPLE_PROFILE_AND_LEVEL 0x03
+#define MPEG4_DEFAULT_ADVANCED_SIMPLE_PROFILE_AND_LEVEL 0xF3
+
+#define MPEG4_DEFAULT_FIXED_VOP_RATE FALSE
+
+
+typedef struct _GstMPEG4Encoder GstMPEG4Encoder;
+typedef struct _GstMPEG4EncoderPrivate GstMPEG4EncoderPrivate;
+typedef struct _GstMPEG4EncoderClass GstMPEG4EncoderClass;
+
+
+#define GST_TYPE_MPEG4_ENCODER (gst_mpeg4_encoder_get_type())
+#define GST_IS_MPEG4_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_MPEG4_ENCODER))
+#define GST_IS_MPEG4_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_MPEG4_ENCODER))
+#define GST_MPEG4_ENCODER_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_MPEG4_ENCODER, GstMPEG4EncoderClass))
+#define GST_MPEG4_ENCODER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_MPEG4_ENCODER, GstMPEG4Encoder))
+#define GST_MPEG4_ENCODER_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_MPEG4_ENCODER, GstMPEG4EncoderClass))
+#define GST_MPEG4_ENCODER_GET_PRIVATE(obj) (G_TYPE_INSTANCE_GET_PRIVATE((obj),GST_TYPE_MPEG4_ENCODER,GstMPEG4EncoderPrivate))
+
+struct _GstMPEG4Encoder {
+ GstVaapiBaseEncoder parent; /*based on gobject*/
+ VAProfile profile; /* VAProfileMPEG4Simple, VAProfileMPEG4AdvancedSimple */
+ guint32 bitrate;
+ guint32 intra_period;
+ guint32 init_qp; /*default 15, 1~31*/
+ guint32 min_qp; /*default 1, 1~31*/
+};
+
+struct _GstMPEG4EncoderClass {
+ GstVaapiBaseEncoderClass parent_class;
+};
+
+
+GType gst_mpeg4_encoder_get_type(void);
+
+GstMPEG4Encoder *gst_mpeg4_encoder_new(void);
+static inline void gst_mpeg4_encoder_unref (GstMPEG4Encoder * encoder)
+{
+ g_object_unref (encoder);
+}
+
+
+G_END_DECLS
+
+#endif /* _GST_VAAPI_MPEG4_ENCODER_H_ */
+