gint width, height;
GValue const *framerate, *format_value;
gint fps_n, fps_d;
- guint32 foramt;
+ guint32 format;
+ GstVaapiSurfacePool *surface_pool = NULL;
H264_LOG_INFO("gst_h264encode_chain 1st recv-buffer caps,\n%s", _h264_dump_caps(recv_caps));
fps_n = gst_value_get_fraction_numerator (framerate);
fps_d = gst_value_get_fraction_denominator (framerate);
format_value = gst_structure_get_value (recv_struct, "format");
- GST_H264_ENCODE_CHECK_STATUS(format_value && GST_TYPE_FOURCC == G_VALUE_TYPE(format_value),
+ if (format_value) {
+ GST_H264_ENCODE_CHECK_STATUS(format_value && GST_TYPE_FOURCC == G_VALUE_TYPE(format_value),
GST_FLOW_ERROR, "1st buffer caps' format type is not fourcc.\n");
- foramt = gst_value_get_fourcc (format_value);
+ format = gst_value_get_fourcc (format_value);
+ }
encode->encoder->profile = 66;
encode->encoder->level = 30;
"framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL);
/*set display and initialize encoder*/
- gst_h264_encoder_set_input_format(encode->encoder, foramt);
+ if (format) {
+ gst_h264_encoder_set_input_format(encode->encoder, format);
+ }
if (GST_VAAPI_IS_VIDEO_BUFFER(buf)) {
GstVaapiDisplay *display = NULL;
GstVaapiVideoBuffer *video_buffer = GST_VAAPI_VIDEO_BUFFER(buf);
H264_ASSERT(video_buffer);
display = gst_vaapi_video_buffer_get_display(video_buffer);
+ //need to get surface_pool and set to h264encoder->vaapi_context
+ //(video_buffer->priv->surface_pool);
+ #ifdef _MRST_
+ surface_pool = gst_vaapi_video_buffer_get_surface_pool(video_buffer);
+ #endif
if (display) {
GST_H264_ENCODE_CHECK_STATUS(gst_h264_encoder_set_display(encode->encoder,display)
, GST_FLOW_ERROR, "set display failed in gst_h264encode_chain.\n");
}
h264ret = gst_h264_encoder_initialize(encode->encoder);
GST_H264_ENCODE_CHECK_STATUS (H264_NO_ERROR == h264ret, GST_FLOW_ERROR, "h264_encoder_initialize failed.\n");
-
+ #ifdef _MRST_
+ h264ret = gst_h264_encoder_open(encode->encoder, surface_pool);
+ #else
h264ret = gst_h264_encoder_open(encode->encoder);
+ #endif
GST_H264_ENCODE_CHECK_STATUS (H264_NO_ERROR == h264ret, GST_FLOW_ERROR, "gst_h264_encoder_open failed.\n");
encode->first_sink_frame = FALSE;
GstCaps * caps, GstBuffer ** buf)
{
GstH264Encode * const encode = GST_H264ENCODE(GST_OBJECT_PARENT(pad));
- GstStructure *structure;
+ GstStructure *structure = NULL;
GstBuffer *buffer;
GstVaapiDisplay* display = NULL;
GstFlowReturn ret_num = GST_FLOW_ERROR;
- structure = gst_caps_get_structure(caps, 0);
- if (gst_structure_has_name(structure, "video/x-vaapi-surface")) {
+ if (caps) {
+ structure = gst_caps_get_structure(caps, 0);
+ }
+ if (!structure || gst_structure_has_name(structure, "video/x-vaapi-surface")) {
H264_ASSERT(encode->encoder);
display = gst_h264_encoder_get_display(encode->encoder);
if (!display) {
GST_H264_ENCODE_CHECK_STATUS(buffer, GST_FLOW_ERROR, "gst_h264encode_buffer_alloc failed.\n");
GST_BUFFER_OFFSET (buffer) = offset;
- gst_buffer_set_caps(buffer, caps);
+ if (caps) {
+ gst_buffer_set_caps(buffer, caps);
+ }
*buf = buffer;
ret_num = GST_FLOW_OK;
#include "gst/vaapi/gstvaapicontext.h"
#include "gst/vaapi/gstvaapisurface.h"
#include "gst/vaapi/gstvaapivideobuffer.h"
-
-
+#include "gst/vaapi/gstvaapidisplay_priv.h"
#define ENCPRV(encoder) GST_H264_ENCODER_GET_PRIVATE(encoder)
static H264Status h264_encoder_release_buffers(GstH264EncoderPrivate *h264_prv);
static H264Status h264_put_raw_buffer_to_surface(GstH264EncoderPrivate *h264_prv,
GstBuffer *raw_pic,
- VASurfaceID surface_id);
+ GstVaapiSurface *surface);
static H264Status h264_prepare_encoding(GstH264EncoderPrivate *h264_prv,
GstBuffer *raw_pic, gboolean is_key,
}
-static VASurfaceID
+static GstVaapiSurface *
h264_get_video_surface(GstH264EncoderPrivate *h264_prv, GstVaapiVideoBuffer *video_buffer)
{
//ref_surface
- VASurfaceID ret = VA_INVALID_SURFACE;
- GstVaapiSurface *surface = gst_vaapi_video_buffer_get_surface(video_buffer);
-
- H264_CHECK_STATUS(surface, VA_INVALID_SURFACE, "video buffer doesn't have a surface");
- ret = (VASurfaceID)GST_VAAPI_OBJECT_ID(surface);
+ GstVaapiSurface *ret = gst_vaapi_video_buffer_get_surface(video_buffer);
+ H264_CHECK_STATUS(ret, NULL, "video buffer doesn't have a surface");
#if 0
g_queue_push_tail(h264_prv->video_buffer_caches,video_buffer);
gst_buffer_ref(GST_BUFFER(video_buffer));
#endif
-
return ret;
error:
- return ret;
+ return NULL;
}
static void
gst_h264_encoder_get_display(GstH264Encoder* encoder)
{
GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
- return g_object_ref(h264_prv->vaapi_display);
+ return (h264_prv->vaapi_display ? g_object_ref(h264_prv->vaapi_display) : NULL);
}
}
-
H264Status
+#ifdef _MRST_
+gst_h264_encoder_open(GstH264Encoder* encoder, GstVaapiSurfacePool *surfaces_pool)
+#else
gst_h264_encoder_open(GstH264Encoder* encoder)
+#endif
{
H264Status ret = H264_NO_ERROR;
GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
H264_ASSERT(h264_prv->vaapi_display);
H264_ASSERT(!h264_prv->vaapi_context);
+#ifdef _MRST_
+ h264_prv->vaapi_context = g_object_new(
+ GST_VAAPI_TYPE_CONTEXT,
+ "display", h264_prv->vaapi_display,
+ "id", GST_VAAPI_ID(VA_INVALID_ID),
+ "entrypoint", gst_vaapi_entrypoint(VAEntrypointEncSlice),
+ "width", h264_prv->public->width,
+ "height", h264_prv->public->height,
+ NULL
+ );
+ if (surfaces_pool) {
+ gst_vaapi_context_set_surface_pool(h264_prv->vaapi_context, surfaces_pool);
+ }
+ g_object_set(h264_prv->vaapi_context, "profile", gst_vaapi_profile(va_profile), NULL);
+
+#else
h264_prv->vaapi_context = gst_vaapi_context_new(h264_prv->vaapi_display,
gst_vaapi_profile(va_profile),
gst_vaapi_entrypoint(VAEntrypointEncSlice),
h264_prv->public->width,
h264_prv->public->height);
+#endif
H264_CHECK_STATUS(h264_prv->vaapi_context, H264_CONTEXT_ERR, "gst_vaapi_context_new failed.\n");
ret = h264_encoder_alloc_buffers(h264_prv);
if (H264_NO_ERROR != ret) {
H264_ASSERT(h264_prv->vaapi_context);
VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
VAContextID context_id = GST_VAAPI_OBJECT_ID(h264_prv->vaapi_context);
+ gboolean is_locked = FALSE;
+
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
+ is_locked = TRUE;
/* 1. create sequence parameter set */
{
}
}
+ /* unlock here */
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
+ is_locked = FALSE;
+
H264_ASSERT(h264_prv->available_code_buffers);
/* 3. init queue available_code_buffers */
return H264_NO_ERROR;
error:
+ if (is_locked) {
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
+ is_locked = FALSE;
+ }
return ret;
}
}
g_mutex_unlock(h264_prv->code_buffer_lock);
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
for (i = 0; i < h264_prv->coded_buf_num; i++) {
va_status = vaDestroyBuffer(va_dpy, h264_prv->coded_bufs[i]);
}
va_status = vaDestroyBuffer(va_dpy, h264_prv->seq_parameter);
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
if (h264_prv->coded_bufs) {
g_free(h264_prv->coded_bufs);
VABufferID* coded_buf = NULL;
VAStatus va_status = VA_STATUS_SUCCESS;
VASurfaceID buffer_surface_id = VA_INVALID_SURFACE;
+ GstVaapiSurface *buffer_surface = NULL;
H264_ASSERT(h264_prv->vaapi_display);
H264_ASSERT(h264_prv->vaapi_context);
}
if (GST_VAAPI_IS_VIDEO_BUFFER(raw_pic)) {
- buffer_surface_id = h264_get_video_surface(h264_prv, GST_VAAPI_VIDEO_BUFFER(raw_pic));
+ buffer_surface = h264_get_video_surface(h264_prv, GST_VAAPI_VIDEO_BUFFER(raw_pic));
} else {
new_surface = h264_pop_free_surface(h264_prv);
- H264_CHECK_STATUS(new_surface, H264_SURFACE_ERR, "h264_pop_free_surface failed.\n");
- buffer_surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(new_surface);
- H264_CHECK_STATUS(buffer_surface_id != VA_INVALID_SURFACE, H264_SURFACE_ERR, "surface id == VA_INVALID_SURFACE.\n");
+ buffer_surface = new_surface;
+ H264_CHECK_STATUS(buffer_surface, H264_SURFACE_ERR, "h264_pop_free_surface failed.\n");
/*input picture to h264_prv->cur_surface_index*/
- va_status = h264_put_raw_buffer_to_surface(h264_prv, raw_pic, buffer_surface_id);
+ va_status = h264_put_raw_buffer_to_surface(h264_prv, raw_pic, buffer_surface);
H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "va put buffer to surface failed.\n");
}
+ buffer_surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
+ H264_CHECK_STATUS(buffer_surface_id != VA_INVALID_SURFACE, H264_SURFACE_ERR, "surface id == VA_INVALID_SURFACE.\n");
/* begin picture, using default sid 0*/
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
va_status = vaBeginPicture(va_dpy, context_id, buffer_surface_id);
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
+
H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaBeginPicture error.\n");
/* set input buffers*/
H264_CHECK_STATUS(H264_NO_ERROR == ret, H264_PICTURE_ERR, "h264_prepare_encoding failed.\n");
/* end picture */
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
va_status = vaEndPicture(va_dpy, context_id);
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaEndPicture error.\n");
/*query surface result*/
static H264Status
h264_put_raw_buffer_to_surface(GstH264EncoderPrivate *h264_prv,
GstBuffer *raw_pic,
- VASurfaceID surface_id)
+ GstVaapiSurface *surface)
{
H264Status ret = H264_NO_ERROR;
VAStatus va_status = VA_STATUS_SUCCESS;
VAImage surface_image;
- void *surface_p = NULL;
- uint8_t *y_src, *u_src, *v_src;
- uint8_t *y_dst, *u_dst, *v_dst;
- int row, col;
- H264_ASSERT(h264_prv->vaapi_display);
- VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
-
- int y_size = h264_prv->public->width * h264_prv->public->height;
- int u_size = (h264_prv->public->width >> 1) * (h264_prv->public->height >> 1);
+ VADisplay va_dpy;
+ GstVaapiImage *image;
+ GstVaapiImageFormat image_format;
+ uint8_t *y_src = NULL, *u_src = NULL, *v_src = NULL;
+ uint8_t *y_dst = NULL, *u_dst = NULL, *v_dst = NULL;
+ int y_size = 0, u_size = 0;
+ int row = 0, col = 0;
+ uint32_t plane_count = 0;
+ uint32_t image_width = 0, image_height = 0;
+ uint32_t pitchy = 0, pitchu = 0, pitchv = 0;
- va_status = vaDeriveImage(va_dpy, surface_id, &surface_image);
- H264_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, H264_PICTURE_ERR, "vaDeriveImage error.\n");
+ H264_ASSERT(h264_prv->vaapi_display);
+ va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
+ /*map image*/
+ image = gst_vaapi_surface_derive_image(surface);
+ gst_vaapi_image_map(image);
- vaMapBuffer(va_dpy, surface_image.buf, &surface_p);
+ image_format = gst_vaapi_image_get_format(image);
+ image_width = gst_vaapi_image_get_width(image);
+ image_height = gst_vaapi_image_get_height(image);
/* copy buffer to surface */
H264_ASSERT(GST_BUFFER_SIZE(raw_pic) >= y_size + (y_size>>1));
+ y_size = h264_prv->public->width * h264_prv->public->height;
+ u_size = ((h264_prv->public->width+1) >> 1) * ((h264_prv->public->height+1) >> 1);
+
y_src = GST_BUFFER_DATA(raw_pic);
u_src = y_src + y_size;
v_src = u_src + u_size;
- y_dst = surface_p + surface_image.offsets[0];
- u_dst = surface_p + surface_image.offsets[1];
- v_dst = surface_p + surface_image.offsets[2];
+ plane_count = gst_vaapi_image_get_plane_count(image);
+ y_dst = gst_vaapi_image_get_plane(image, 0);
+ u_dst = gst_vaapi_image_get_plane(image, 1);
+ pitchy = gst_vaapi_image_get_pitch(image, 0);
+ pitchu = gst_vaapi_image_get_pitch(image, 1);
+
+ if (plane_count > 2) {
+ v_dst = gst_vaapi_image_get_plane(image, 2);
+ pitchv = gst_vaapi_image_get_pitch(image, 2);
+ }
/* copy from avcenc.c*/
/* Y plane */
- for (row = 0; row < surface_image.height; row++) {
- memcpy(y_dst, y_src, surface_image.width);
- y_dst += surface_image.pitches[0];
- y_src += h264_prv->public->width;
- }
-
- if (surface_image.format.fourcc == VA_FOURCC_NV12) { /* UV plane */
- if (h264_prv->format == GST_MAKE_FOURCC('I','4','2','0')) {
- for (row = 0; row < surface_image.height / 2; row++) {
- for (col = 0; col < surface_image.width / 2; col++) {
- u_dst[col * 2] = u_src[col];
- u_dst[col * 2 + 1] = v_src[col];
- }
-
- u_dst += surface_image.pitches[1];
- u_src += (h264_prv->public->width>>1);
- v_src += (h264_prv->public->width>>1);
- }
- } else if (h264_prv->format == GST_MAKE_FOURCC('N','V','1','2')){
- for (row = 0; row < surface_image.height / 2; row++) {
- memcpy(u_dst, u_src, surface_image.width);
- u_src += h264_prv->public->width;
- u_dst += surface_image.pitches[1];
- }
- } else {
- H264_ASSERT(0);
+ for (row = 0; row < image_height; row++) {
+ memcpy(y_dst, y_src, image_width);
+ y_dst += pitchy;
+ y_src += h264_prv->public->width;
+ }
+
+ if (GST_VAAPI_IMAGE_NV12 == image_format) { /* UV plane */
+ if (GST_VAAPI_IMAGE_I420 == h264_prv->format) {
+ for (row = 0; row < image_height / 2; row++) {
+ for (col = 0; col < image_width / 2; col++) {
+ u_dst[col * 2] = u_src[col];
+ u_dst[col * 2 + 1] = v_src[col];
+ }
+
+ u_dst += pitchu;
+ u_src += (h264_prv->public->width>>1);
+ v_src += (h264_prv->public->width>>1);
+ }
+ } else if (GST_VAAPI_IMAGE_NV12 == h264_prv->format){
+ for (row = 0; row < image_height / 2; row++) {
+ memcpy(u_dst, u_src, image_width);
+ u_src += h264_prv->public->width;
+ u_dst += pitchu;
}
} else {
- /* FIXME: fix this later */
- H264_ASSERT(0);
+ H264_ASSERT(0);
}
+ } else {
+ /* FIXME: fix this later */
+ H264_ASSERT(0);
+ }
- vaUnmapBuffer(va_dpy, surface_image.buf);
- vaDestroyImage(va_dpy, surface_image.image_id);
-
+ /*unmap image*/
+ g_object_unref(image);
error:
return ret;
}
VADisplay va_dpy = gst_vaapi_display_get_display(h264_prv->vaapi_display);
VAContextID context_id = GST_VAAPI_OBJECT_ID(h264_prv->vaapi_context);
+ /* lock display */
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
/*handle first surface_index*/
/*only need first frame*/
if (h264_prv->frame_count == 0) {
}
va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
sizeof(pic_h264), 1, &pic_h264, &h264_prv->pic_parameter);
+
H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_PICTURE_ERR, "creating pic-param buffer failed.\n");
+
va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->pic_parameter, 1);
H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_PICTURE_ERR, "rendering pic-param buffer failed.\n");
h264_prv->ref_surface = h264_prv->recon_surface;
h264_prv->recon_surface = swap;
- /* error */
error:
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
return ret;
}
VACodedBufferSegment *buf_list = NULL;
GstBuffer* ret_buffer = NULL;
gboolean has_coded_data = FALSE;
+ gboolean is_locked = FALSE;
H264_ASSERT(h264_prv->vaapi_display);
H264_ASSERT(h264_prv->vaapi_context);
H264_ASSERT(coded_pics && *coded_pics == NULL);
+ /* lock display */
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
+ is_locked = TRUE;
+
va_status = vaSyncSurface(va_dpy, surface_id);
H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_QUERY_STATUS_ERR, "vaSyncSurface failed.\n");
va_status = vaMapBuffer(va_dpy, *coded_buf, (void **)(&buf_list));
H264_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, H264_QUERY_STATUS_ERR, "vaMapBuffer failed.\n");
+ /*unlock display*/
+ if (is_locked) {
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
+ is_locked = FALSE;
+ }
+
while (buf_list != NULL) {
if (!h264_prv->sps_data || !h264_prv->pps_data) {
h264_encoder_read_sps_pps(h264_prv, buf_list->buf, buf_list->size);
#endif
{ // if non-related, push back to available_code_buffers
g_mutex_lock(h264_prv->code_buffer_lock);
+ GST_VAAPI_DISPLAY_LOCK(h264_prv->vaapi_display);
vaUnmapBuffer(va_dpy, *coded_buf);
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
g_queue_push_head(h264_prv->available_code_buffers, coded_buf);
g_cond_signal(h264_prv->code_buffer_cond);
g_mutex_unlock(h264_prv->code_buffer_lock);
return H264_NO_ERROR;
error:
+ /*unlock display*/
+ if (is_locked) {
+ GST_VAAPI_DISPLAY_UNLOCK(h264_prv->vaapi_display);
+ is_locked = FALSE;
+ }
return ret;
}
encoder->intra_period = 30;
ret = gst_h264_encoder_initialize(encoder);
H264_ASSERT(ret == H264_NO_ERROR);
+ #ifdef _MRST_
+ ret = gst_h264_encoder_open(encoder, NULL);
+ #else
ret = gst_h264_encoder_open(encoder);
+ #endif
H264_ASSERT(ret == H264_NO_ERROR);
uint32_t buffer_size = encoder->width * encoder->height *3 /2;