+/*
+ * gstvaapih264encoder.c - H.264 encoder
+ *
+ * Copyright (C) 2011 Intel Corporation
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public License
+ * as published by the Free Software Foundation; either version 2.1
+ * of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free
+ * Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301 USA
+ */
#include "gstvaapih264encoder.h"
#include "gst/vaapi/gstvaapivideobuffer.h"
#include "gst/vaapi/gstvaapidisplay_priv.h"
-#define ENCPRV(encoder) GST_H264_ENCODER_GET_PRIVATE(encoder)
+/* enable old lib va*/
+//#define _SIMPLE_LIB_VA_
+
+GST_DEBUG_CATEGORY_STATIC (gst_vaapi_h264_encoder_debug);
+#define GST_CAT_DEFAULT gst_vaapi_h264_encoder_debug
#define SHARE_CODED_BUF 0
#define REF_RECON_SURFACE_NUM 2
-typedef struct _GstH264EncodeBuffer GstH264EncodeBuffer;
+#define ENTROPY_MODE_CAVLC 0
+#define ENTROPY_MODE_CABAC 1
+
+#define BR_CBR 0
+#define BR_VBR 1
+#define BR_CQP 2
+
+#define NAL_REF_IDC_NONE 0
+#define NAL_REF_IDC_LOW 1
+#define NAL_REF_IDC_MEDIUM 2
+#define NAL_REF_IDC_HIGH 3
-#define GST_TYPE_H264_ENCODE_BUFFER (gst_h264_encode_buffer_get_type())
typedef enum {
NAL_UNKNOWN = 0,
NAL_FILLER = 12,
}H264_NAL_TYPE;
-struct _GstH264EncodeBuffer {
- GstBuffer buffer;
- VABufferID *coded_id;
- GstH264EncoderPrivate *encoder;
-};
+
+typedef enum {
+ SLICE_TYPE_P = 0,
+ SLICE_TYPE_B = 1,
+ SLICE_TYPE_I = 2
+} H264_SLICE_TYPE;
struct _GstH264EncoderPrivate {
GstH264Encoder *public;
gboolean es_flag; /*elementary flag*/
/* private data*/
- //GstVaapiDisplay *vaapi_display;
- //GstVaapiContext *vaapi_context;
GQueue *video_buffer_caches; /*not used for baseline*/
- GstVaapiSurface *ref_surface; /* reference buffer*/
+ GstVaapiSurface *ref_surface1; /* reference buffer*/
+ GstVaapiSurface *ref_surface2; /* for B frames */
GstVaapiSurface *recon_surface; /* reconstruct buffer*/
- //VAAPI_Encode_State encode_state;
-
VABufferID seq_parameter;
VABufferID pic_parameter;
VABufferID slice_parameter;
- VAEncSliceParameterBuffer *slice_param_buffers;
+ VABufferID packed_sps_par_buf;
+ VABufferID packed_sps_data_buf;
+ VABufferID packed_pps_par_buf;
+ VABufferID packed_pps_data_buf;
+#ifdef _SIMPLE_LIB_VA_
+ VAEncSliceParameterBuffer *slice_param_buffers;
+#else
+ VAEncSliceParameterBufferH264 *slice_param_buffers;
+#endif
guint32 default_slice_height;
guint32 slice_mod_mb_num;
-
- VABufferID *coded_bufs;
- guint32 coded_buf_num;
- guint32 cur_coded_index;
-
- /*total encoded frames*/
- guint32 frame_count;
+ guint32 default_cts_offset;
GstBuffer *sps_data;
GstBuffer *pps_data;
- GMutex *code_buffer_lock;
- GCond *code_buffer_cond;
- GQueue *available_code_buffers;
+ GQueue *queued_buffers; /* GstVaapiVideoBuffers with surface*/
+ guint32 gop_count;
+ guint32 cur_display_num;
+ guint32 cur_decode_num;
+ H264_SLICE_TYPE cur_slice_type;
+ guint64 last_decode_time;
};
-G_DEFINE_TYPE(GstH264Encoder, gst_h264_encoder, GST_TYPE_VAAPI_ENCODER);
+G_DEFINE_TYPE(GstH264Encoder, gst_h264_encoder, GST_TYPE_VAAPI_BASE_ENCODER);
// 4096-1
static const guint8 h264_bit_mask[9] = {0x00, 0x01, 0x03, 0x07, 0x0F, 0x1F, 0x3F, 0x7F, 0xFF};
-static GstBufferClass *h264_encode_buffer_parent_class = NULL;
-
-
-static EncoderStatus gst_h264_encoder_initialize(GstVaapiEncoder* encoder, GstVaapiDisplay *display);
-static EncoderStatus gst_h264_encoder_uninitialize(GstVaapiEncoder* encoder, GstVaapiDisplay *display);
-static EncoderStatus gst_h264_encoder_open(GstVaapiEncoder* encoder, GstVaapiDisplay *display, void* private_data, GstVaapiContext **context);
-static EncoderStatus gst_h264_encoder_close(GstVaapiEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context);
-static EncoderStatus gst_h264_encoder_encode(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
- GstVaapiContext *context, GstBuffer *raw_pic, GList **coded_pics);
static EncoderStatus gst_h264_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
- GstVaapiContext *context, GList **coded_pics);
+ GstVaapiContext *context, GList **coded_pics);
+
/*other functions*/
-static EncoderStatus gst_h264_encoder_get_avcC_codec_data(GstVaapiEncoder* encoder, GstBuffer **buffer);
+static EncoderStatus gst_h264_encoder_get_avcC_codec_data(
+ GstVaapiEncoder* encoder, GstBuffer **buffer);
static EncoderStatus gst_h264_encoder_get_nal_codec_data(GstVaapiEncoder* encoder, GstBuffer **buffer);
-static gboolean gst_h264_validate_parameters(GstH264Encoder *encoder);
+static gboolean gst_h264_validate_parameters(GstVaapiBaseEncoder *encoder);
static void gst_h264_encoder_finalize(GObject *object);
static void gst_h264_encoder_init_public_values(GstH264Encoder* encoder);
-static VAProfile h264_get_va_profile(guint32 profile);
-static EncoderStatus h264_encoder_alloc_buffers(GstH264Encoder *h264_encoder,
- GstVaapiDisplay *display, GstVaapiContext *context);
-static EncoderStatus h264_encoder_release_buffers(GstH264Encoder *h264_encoder,
- GstVaapiDisplay *display, GstVaapiContext *context);
-static EncoderStatus h264_put_raw_buffer_to_surface(GstH264Encoder *h264_encoder,
- GstVaapiDisplay *display,
- GstBuffer *raw_pic,
- GstVaapiSurface *surface);
-
-static EncoderStatus h264_prepare_encoding(GstH264Encoder *h264_encoder, GstVaapiDisplay *display,
- GstVaapiContext *context, gboolean is_key, VABufferID coded_buf);
-static EncoderStatus h264_query_encoding_status(GstH264Encoder *h264_encoder,
- GstVaapiDisplay *display,
- GstVaapiSurface *buffer_surface,
- gboolean is_key,
- GstClockTime timestamp,
- GstClockTime duration,
- VABufferID *coded_buf,
- GList **coded_pics);
-static EncoderStatus
-h264_encoder_read_sps_pps(GstH264EncoderPrivate *h264_prv, const guint8 *buf, guint32 size);
-static GstBuffer *h264_encoder_create_coded_buffer(GstH264EncoderPrivate *h264_prv,
- guint8 *frame,
- guint32 frame_size,
- VABufferID *coded_buf);
-
-
-/*encoded buffer, for SHARE_CODED_BUF */
-static void gst_h264_encode_buffer_class_init (gpointer g_class, gpointer class_data);
-static GType gst_h264_encode_buffer_get_type (void);
-static void gst_h264_encode_buffer_finalize (GstH264EncodeBuffer *h264_buffer);
-static GstH264EncodeBuffer *gst_h264_encode_buffer_new(GstH264EncoderPrivate *h264_prv,
- VABufferID *coded_id);
+static gboolean gst_h264_encoder_alloc_slices(GstVaapiBaseEncoder *encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+static gboolean gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder,
+ GstVaapiDisplay *display, GstVaapiContext *context);
+static EncoderStatus gst_h264_encoder_prepare_next_buffer(GstVaapiBaseEncoder* encoder,
+ GstVaapiVideoBuffer *display_buf, gboolean need_flush,
+ GstVaapiVideoBuffer **out_buf);
+static void gst_h264_encoder_frame_failed(GstVaapiBaseEncoder *encoder,
+ GstVaapiVideoBuffer* buffer);
+static EncoderStatus gst_h264_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key);
+static void gst_h264_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size);
+//static EncoderStatus h264_encoder_read_sps_pps(
+// GstH264EncoderPrivate *h264_prv, const guint8 *buf, guint32 size);
+static GstBuffer *gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
+ guint8 *frame, guint32 frame_size, VABufferID *coded_buf);
/* h264 bitstream functions */
static void h264_bitstream_init(H264Bitstream *bitstream, guint32 bit_capability);
static gboolean h264_bitstream_write_uint(H264Bitstream *bitstream, guint32 value, guint32 bit_size);
static gboolean h264_bitstream_align(H264Bitstream *bitstream, guint32 value);
static gboolean h264_bitstream_write_ue(H264Bitstream *bitstream, guint32 value);
-static gboolean h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value);
+static gboolean h264_bitstream_write_se(H264Bitstream *bitstream, gint32 value);
static gboolean h264_bitstream_write_trailing_bits(H264Bitstream *bitstream);
static gboolean h264_bitstream_write_byte_array(H264Bitstream *bitstream, const guint8 *buf, guint32 byte_size);
static void h264_bitstream_destroy(H264Bitstream *bitstream, gboolean free_flag);
static gboolean h264_bitstream_auto_grow(H264Bitstream *bitstream, guint32 extra_bit_size);
-static gboolean h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
-static gboolean h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv);
+static gboolean h264_bitstream_write_sps(H264Bitstream *bitstream, VAEncSequenceParameterBufferH264 *seq);
+static gboolean h264_bitstream_write_pps(H264Bitstream *bitstream, VAEncPictureParameterBufferH264 *pic);
+static gboolean h264_bitstream_write_nal_header(H264Bitstream *bitstream,
+ guint nal_ref_idc, guint nal_unit_type);
+
static const guint8 *h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size);
static gboolean h264_read_sps_attributes(const guint8 *sps_data, guint32 sps_size,
- guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc);
-
-/* available_coded_buffer actions */
-static VABufferID *pop_available_coded_buffer(GstH264EncoderPrivate *h264_prv);
-static gboolean push_available_coded_buffer(GstH264EncoderPrivate *h264_prv, VABufferID *buf);
-static gboolean alloc_all_available_coded_buffers(GstH264EncoderPrivate *h264_prv,
- GstVaapiDisplay *display, GstVaapiContext *context,
- guint32 buffer_size, gboolean need_display_lock);
-static void wait_and_clear_available_coded_buffers(GstH264EncoderPrivate *h264_prv,
- GstVaapiDisplay *display, gboolean need_display_lock);
+ guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc);
static void
gst_h264_encoder_class_init(GstH264EncoderClass *klass)
{
GObjectClass * const object_class = G_OBJECT_CLASS(klass);
GstVaapiEncoderClass * const encoder_class = GST_VAAPI_ENCODER_CLASS(klass);
+ GstVaapiBaseEncoderClass * const base_class = GST_VAAPI_BASE_ENCODER_CLASS(klass);
+
g_type_class_add_private(klass, sizeof(GstH264EncoderPrivate));
+ GST_DEBUG_CATEGORY_INIT (gst_vaapi_h264_encoder_debug, "gst_va_h264_encoder", 0,
+ "gst_va_h264_encoder element");
+
object_class->finalize = gst_h264_encoder_finalize;
- encoder_class->initialize = gst_h264_encoder_initialize;
- encoder_class->uninitialize = gst_h264_encoder_uninitialize;
- encoder_class->open = gst_h264_encoder_open;
- encoder_class->close = gst_h264_encoder_close;
- encoder_class->encode = gst_h264_encoder_encode;
+ base_class->validate_attributes = gst_h264_validate_parameters;
+ base_class->pre_alloc_resource = gst_h264_encoder_alloc_slices;
+ base_class->release_resource = gst_h264_encoder_release_resource;
+ base_class->prepare_next_input_buffer = gst_h264_encoder_prepare_next_buffer;
+ base_class->render_frame = gst_h264_encoder_rendering;
+ base_class->notify_frame = gst_h264_notify_frame;
+ base_class->copy_coded_frame = gst_h264_encoder_copy_coded_buffer;
+ base_class->encode_frame_failed = gst_h264_encoder_frame_failed;
+
encoder_class->flush = gst_h264_encoder_flush;
+
encoder_class->get_codec_data = gst_h264_encoder_get_avcC_codec_data;
/* encoder_class->get_codec_data = gst_h264_encoder_get_nal_codec_data; */
*/
}
-
-static void
-gst_h264_encode_buffer_class_init (gpointer g_class, gpointer class_data)
-{
- GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS(g_class);
-
- h264_encode_buffer_parent_class = g_type_class_peek_parent(g_class);
- ENCODER_ASSERT(h264_encode_buffer_parent_class);
-
- mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
- gst_h264_encode_buffer_finalize;
-}
-
-
-static GType
-gst_h264_encode_buffer_get_type (void)
-{
- static GType s_h264_encode_buffer_type = 0;
- if (G_UNLIKELY (s_h264_encode_buffer_type == 0)) {
- static const GTypeInfo s_h264_encode_buffer_info = {
- sizeof(GstBufferClass),
- NULL,
- NULL,
- gst_h264_encode_buffer_class_init,
- NULL,
- NULL,
- sizeof(GstH264EncodeBuffer),
- 0,
- NULL,
- NULL
- };
- s_h264_encode_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
- "GstH264EncodeBuffer", &s_h264_encode_buffer_info, 0);
- }
- return s_h264_encode_buffer_type;
-}
-
-static void
-gst_h264_encode_buffer_finalize (GstH264EncodeBuffer *h264_buffer)
-{
- GstH264EncoderPrivate *h264_prv = NULL;
- VABufferID* coded_id = NULL;
- GstVaapiDisplay *display = NULL;
-
- gboolean is_locked = FALSE;
-
- h264_prv = h264_buffer->encoder;
- coded_id = h264_buffer->coded_id;
- display = ENCODER_DISPLAY(h264_prv->public);
-
- ENCODER_ASSERT(display);
- VADisplay va_dpy = gst_vaapi_display_get_display(display);
-
- ENCODER_ASSERT(h264_prv);
- ENCODER_ASSERT(coded_id && VA_INVALID_ID!= *coded_id);
-
- /*if (--(*h264_buffer->ref_coded_id) == 0) */
- {
- /*g_free(h264_buffer->ref_coded_id);*/
- ENCODER_ACQUIRE_DISPLAY_LOCK(display);
- vaUnmapBuffer(va_dpy, *coded_id);
- ENCODER_RELEASE_DISPLAY_LOCK(display);
- push_available_coded_buffer(h264_prv, coded_id);
- }
-
- if (GST_MINI_OBJECT_CLASS(h264_encode_buffer_parent_class)->finalize) {
- GST_MINI_OBJECT_CLASS(h264_encode_buffer_parent_class)->finalize(GST_MINI_OBJECT(h264_buffer));
- }
-}
-
-static GstH264EncodeBuffer *
-gst_h264_encode_buffer_new(GstH264EncoderPrivate *h264_prv,
- VABufferID *coded_id)
-{
- GstH264EncodeBuffer *buf = (GstH264EncodeBuffer*)gst_mini_object_new(GST_TYPE_H264_ENCODE_BUFFER);
- buf->coded_id = coded_id;
- buf->encoder = h264_prv;
- return buf;
-}
-
-
-static GstVaapiSurface *
-h264_get_video_surface(GstH264EncoderPrivate *h264_prv, GstVaapiVideoBuffer *video_buffer)
-{
- //ref_surface
- GstVaapiSurface *ret = gst_vaapi_video_buffer_get_surface(video_buffer);
-
- ENCODER_CHECK_STATUS(ret, NULL, "video buffer doesn't have a surface");
-#if 0
- g_queue_push_tail(h264_prv->video_buffer_caches,video_buffer);
- gst_buffer_ref(GST_BUFFER(video_buffer));
-#endif
- return ret;
-
- end:
- return NULL;
-}
-
-static void
-h264_release_video_surface(GstH264EncoderPrivate *h264_prv, VASurfaceID surface)
-{
-#if 0
- ENCODER_ASSERT(h264_prv->video_buffer_caches);
- g_queue_find_custom(h264_prv->video_buffer_caches,xx, compare_func);
- for (h264_prv->video_buffer_caches) {
- }
-#endif
-}
-
static VAProfile
h264_get_va_profile(guint32 profile)
{
return (-1);
}
-GstH264Encoder *
-gst_h264_encoder_new(void)
-{
- return GST_H264_ENCODER(g_object_new(GST_TYPE_H264_ENCODER, NULL));
-}
-
-
static void
gst_h264_encoder_init(GstH264Encoder *encoder)
{
- GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
- GstVaapiEncoderPrivate *encoder_prv = GST_VAAPI_ENCODER_GET_PRIVATE(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
ENCODER_ASSERT(h264_prv);
h264_prv->public = encoder;
/* init public attributes */
gst_h264_encoder_init_public_values(encoder);
+ gst_vaapi_base_encoder_set_frame_notify(GST_VAAPI_BASE_ENCODER(encoder), TRUE);
/* init private values*/
h264_prv->format = GST_MAKE_FOURCC('N','V','1','2');
h264_prv->es_flag = TRUE;
+ //h264_prv->es_flag = FALSE;
- //h264_prv->vaapi_display = NULL;
- h264_prv->ref_surface = NULL;
+ h264_prv->ref_surface1 = NULL;
+ h264_prv->ref_surface2 = NULL;
h264_prv->recon_surface = NULL;
- h264_prv->video_buffer_caches = g_queue_new();
- //h264_prv->encode_state = H264_ENC_NULL;
h264_prv->seq_parameter = VA_INVALID_ID;
h264_prv->pic_parameter = VA_INVALID_ID;
h264_prv->slice_parameter = VA_INVALID_ID;
+ h264_prv->packed_sps_par_buf = VA_INVALID_ID;
+ h264_prv->packed_sps_data_buf = VA_INVALID_ID;
+ h264_prv->packed_pps_par_buf = VA_INVALID_ID;
+ h264_prv->packed_pps_data_buf = VA_INVALID_ID;
h264_prv->slice_param_buffers = NULL;
h264_prv->default_slice_height = 0;
h264_prv->slice_mod_mb_num = 0;
- h264_prv->coded_bufs = NULL;
- h264_prv->coded_buf_num = DEFAULT_CODEDBUF_NUM;
- h264_prv->frame_count = 0;
h264_prv->sps_data = NULL;
h264_prv->pps_data = NULL;
- /*index init*/
- h264_prv->cur_coded_index = 0;
- /*init others*/
- h264_prv->code_buffer_lock = g_mutex_new();
- h264_prv->code_buffer_cond = g_cond_new();
- h264_prv->available_code_buffers = g_queue_new();
+ h264_prv->queued_buffers = g_queue_new();
+ h264_prv->gop_count = 0;
+ h264_prv->cur_display_num = 0;
+ h264_prv->cur_decode_num = 0;
+ h264_prv->cur_slice_type = SLICE_TYPE_I;
+ h264_prv->last_decode_time = 0LL;
+ h264_prv->default_cts_offset = 0;
}
static void
{
/*free private buffers*/
GstVaapiEncoder *encoder = GST_VAAPI_ENCODER(object);
- GstH264EncoderPrivate *h264_prv = ENCPRV(object);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(object);
if (gst_vaapi_encoder_get_state(encoder) != VAAPI_ENC_NULL) {
gst_vaapi_encoder_uninitialize(encoder);
}
- g_mutex_free(h264_prv->code_buffer_lock);
- g_cond_free(h264_prv->code_buffer_cond);
- if (h264_prv->available_code_buffers) {
- g_queue_free(h264_prv->available_code_buffers);
- h264_prv->available_code_buffers = NULL;
- }
-
- if (h264_prv->video_buffer_caches) {
- g_queue_free(h264_prv->video_buffer_caches);
- h264_prv->video_buffer_caches = NULL;
- };
if (h264_prv->sps_data) {
gst_buffer_unref(h264_prv->sps_data);
h264_prv->sps_data = NULL;
g_free(h264_prv->slice_param_buffers);
h264_prv->slice_param_buffers = NULL;
}
+
+ if (h264_prv->queued_buffers) {
+ ENCODER_ASSERT(g_queue_is_empty(h264_prv->queued_buffers));
+ g_queue_free(h264_prv->queued_buffers);
+ h264_prv->queued_buffers = NULL;
+ }
+
+ G_OBJECT_CLASS(gst_h264_encoder_parent_class)->finalize(object);
}
+GstH264Encoder *
+gst_h264_encoder_new(void)
+{
+ return GST_H264_ENCODER(g_object_new(GST_TYPE_H264_ENCODER, NULL));
+}
+
static void
gst_h264_encoder_init_public_values(GstH264Encoder* encoder)
{
encoder->profile = 0;
encoder->level = 0;
- //encoder->width = 0;
- //encoder->height = 0;
- //encoder->frame_rate = 0;
encoder->bitrate = 0;
encoder->intra_period = 0;
encoder->init_qp = -1;
encoder->min_qp = -1;
encoder->slice_num = 0;
-}
-
-void
-gst_h264_encoder_set_input_format(GstH264Encoder* encoder, guint32 format)
-{
- GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
- h264_prv->format = format;
+ encoder->b_frame_num = 0;
}
void
gst_h264_encoder_set_es_flag(GstH264Encoder* encoder, gboolean es)
{
- GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
h264_prv->es_flag = es;
}
-EncoderStatus
-gst_h264_encoder_initialize(GstVaapiEncoder* encoder, GstVaapiDisplay *display)
-{
- return ENCODER_NO_ERROR;
-}
-
-EncoderStatus
-gst_h264_encoder_uninitialize(GstVaapiEncoder* encoder, GstVaapiDisplay *display)
-{
- return ENCODER_NO_ERROR;
-
-}
gboolean
-gst_h264_validate_parameters(GstH264Encoder *encoder)
+gst_h264_validate_parameters(GstVaapiBaseEncoder *base_encoder)
{
- GstH264EncoderPrivate *h264_prv = ENCPRV(encoder);
+ GstH264Encoder *encoder = GST_H264_ENCODER(base_encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
if (!ENCODER_WIDTH(encoder) || !ENCODER_HEIGHT(encoder) || !ENCODER_FPS(encoder)) {
return FALSE;
}
if (!encoder->profile) {
encoder->profile = H264_DEFAULT_PROFILE;
}
+ gst_vaapi_base_encoder_set_va_profile(base_encoder, h264_get_va_profile(encoder->profile));
if (!encoder->level) {
- encoder->level = H264_DEFAULT_LEVEL;
+ if (encoder->profile <= H264_PROFILE_BASELINE)
+ encoder->level = H264_LEVEL_30;
+ else
+ encoder->level = H264_LEVEL_41;
}
if (!encoder->intra_period) {
encoder->intra_period = H264_DEFAULT_INTRA_PERIOD;
/* default compress ratio 1: (4*8*1.5) */
if (!encoder->bitrate) {
- encoder->bitrate = ENCODER_WIDTH(encoder)*ENCODER_HEIGHT(encoder)*ENCODER_FPS(encoder)/4;
+ encoder->bitrate = 0; //ENCODER_WIDTH(encoder)*ENCODER_HEIGHT(encoder)*ENCODER_FPS(encoder)/4;
}
if (!encoder->slice_num) {
} else {
h264_prv->slice_mod_mb_num = ((ENCODER_HEIGHT(encoder)+15)/16)%encoder->slice_num;
}
+
+ if (encoder->b_frame_num) {
+ h264_prv->default_cts_offset = GST_SECOND/ENCODER_FPS(encoder);
+ } else {
+ h264_prv->default_cts_offset = 0;
+ }
return TRUE;
}
-EncoderStatus
-gst_h264_encoder_open(GstVaapiEncoder* encoder, GstVaapiDisplay *display, void* private_data, GstVaapiContext **context)
-{
- GstH264Encoder* h264_encoder = GST_H264_ENCODER(encoder);
- GstVaapiSurfacePool *surfaces_pool = private_data;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
- GstVaapiContext *out_context = NULL;
- VAProfile va_profile = -1;
- EncoderStatus ret = ENCODER_NO_ERROR;
+static gboolean
+h264_encoder_release_parameters(GstH264Encoder *h264_encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+{
VAStatus va_status = VA_STATUS_SUCCESS;
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
- /*check and set default values*/
- ENCODER_CHECK_STATUS(gst_h264_validate_parameters(h264_encoder), ENCODER_PARAMETER_ERR, "h264encoder paramerter error.\n");
+ gboolean is_locked = FALSE;
- va_profile = h264_get_va_profile(h264_encoder->profile);
- ENCODER_ASSERT(ENCODER_DISPLAY(encoder));
- ENCODER_CHECK_STATUS(-1 != va_profile, ENCODER_PROFILE_ERR, "profile(%d) is NOT supported.\n", h264_encoder->profile);
+ ENCODER_ASSERT(display);
+ ENCODER_ASSERT(context);
+ VAAPI_UNUSED_ARG(va_status);
+ VADisplay va_dpy = gst_vaapi_display_get_display(display);
-#ifdef _MRST_
- out_context = g_object_new(
- GST_VAAPI_TYPE_CONTEXT,
- "display", display,
- "id", GST_VAAPI_ID(VA_INVALID_ID),
- "entrypoint", gst_vaapi_entrypoint(VAEntrypointEncSlice),
- "width", ENCODER_WIDTH(encoder),
- "height", ENCODER_HEIGHT(encoder),
- NULL
- );
- if (surfaces_pool) {
- gst_vaapi_context_set_surface_pool(out_context, surfaces_pool);
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ if (VA_INVALID_ID != h264_prv->seq_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->seq_parameter);
+ h264_prv->seq_parameter = VA_INVALID_ID;
}
- g_object_set(out_context, "profile", gst_vaapi_profile(va_profile), NULL);
-
-#else
- VAAPI_UNUSED_ARG(surfaces_pool);
- out_context = gst_vaapi_context_new(display,
- gst_vaapi_profile(va_profile),
- gst_vaapi_entrypoint(VAEntrypointEncSlice),
- ENCODER_WIDTH(encoder),
- ENCODER_HEIGHT(encoder));
-#endif
- ENCODER_CHECK_STATUS(out_context, ENCODER_CONTEXT_ERR, "gst_vaapi_context_new failed.\n");
- ret = h264_encoder_alloc_buffers(h264_encoder, display, out_context);
- if (ENCODER_NO_ERROR != ret) {
- goto end;
+ if (VA_INVALID_ID != h264_prv->pic_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->pic_parameter);
+ h264_prv->pic_parameter = VA_INVALID_ID;
}
- *context = out_context;
- return ENCODER_NO_ERROR;
-
-end:
- // clear resources
- if (ENCODER_NO_ERROR != ret) {
- gst_h264_encoder_close(encoder, display, out_context);
- if (out_context) {
- g_object_unref(out_context);
- }
+ if (VA_INVALID_ID != h264_prv->slice_parameter) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->slice_parameter);
+ h264_prv->slice_parameter = VA_INVALID_ID;
}
- return ret;
-
-}
-
-EncoderStatus
-gst_h264_encoder_close(GstVaapiEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context)
-{
- GstH264Encoder* h264_encoder = GST_H264_ENCODER(encoder);
- EncoderStatus ret = ENCODER_NO_ERROR;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
- /* release buffers first */
- h264_encoder_release_buffers(h264_encoder, display, context);
-
- /*remove ref_surface*/
- if (h264_prv->ref_surface) {
- if (context) {
- gst_vaapi_context_put_surface(context, h264_prv->ref_surface);
- } else {
- g_object_unref(h264_prv->ref_surface);
- }
- h264_prv->ref_surface = NULL;
+ if (VA_INVALID_ID != h264_prv->packed_sps_par_buf) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_sps_par_buf);
+ h264_prv->packed_sps_par_buf = VA_INVALID_ID;
}
-
- /*remove recon_surface*/
- if (h264_prv->recon_surface) {
- if (context) {
- gst_vaapi_context_put_surface(context, h264_prv->recon_surface);
- } else {
- g_object_unref(h264_prv->recon_surface);
- }
- h264_prv->recon_surface = NULL;
+ if (VA_INVALID_ID != h264_prv->packed_sps_data_buf) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_sps_data_buf);
+ h264_prv->packed_sps_data_buf = VA_INVALID_ID;
+ }
+ if (VA_INVALID_ID != h264_prv->packed_pps_par_buf) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_pps_par_buf);
+ h264_prv->packed_pps_par_buf = VA_INVALID_ID;
}
+ if (VA_INVALID_ID != h264_prv->packed_pps_data_buf) {
+ va_status = vaDestroyBuffer(va_dpy, h264_prv->packed_pps_data_buf);
+ h264_prv->packed_pps_data_buf = VA_INVALID_ID;
+ }
+
+ ENCODER_RELEASE_DISPLAY_LOCK(display);
- h264_prv->frame_count = 0;
+ if (h264_prv->slice_param_buffers) {
+ g_free(h264_prv->slice_param_buffers);
+ h264_prv->slice_param_buffers = NULL;
+ }
if (h264_prv->sps_data) {
gst_buffer_unref(h264_prv->sps_data);
gst_buffer_unref(h264_prv->pps_data);
h264_prv->pps_data = NULL;
}
- return ret;
-}
-
-static EncoderStatus
-h264_encoder_alloc_buffers(GstH264Encoder *h264_encoder, GstVaapiDisplay *display, GstVaapiContext *context)
-{
- EncoderStatus ret = ENCODER_NO_ERROR;
- VAStatus va_status = VA_STATUS_SUCCESS;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
- guint32 i;
-
- ENCODER_ASSERT(display && context);
- VADisplay va_dpy = gst_vaapi_display_get_display(display);
- VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
- gboolean is_locked = FALSE;
- guint32 codedbuf_size = (ENCODER_WIDTH(h264_encoder) * ENCODER_HEIGHT(h264_encoder) * 400) / (16*16);
- ENCODER_CHECK_STATUS(alloc_all_available_coded_buffers(h264_prv, display, context, codedbuf_size, TRUE),
- ENCODER_ENC_RES_ERR,
- "alloc_all_available_coded_buffers failed.\n");
-
- /* create slice_param_buffers */
- h264_prv->slice_param_buffers = (VAEncSliceParameterBuffer*)g_malloc0_n(h264_encoder->slice_num,
- sizeof(h264_prv->slice_param_buffers[0]));
-end:
- return ret;
+ return TRUE;
}
-static EncoderStatus
-h264_encoder_release_buffers(GstH264Encoder *h264_encoder, GstVaapiDisplay *display, GstVaapiContext *context)
+static void
+h264_release_queued_buffers(GstH264EncoderPrivate *h264_prv)
{
- VAStatus va_status = VA_STATUS_SUCCESS;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
- guint32 available_buf_count = h264_prv->coded_buf_num;
- guint32 i;
-
- gboolean is_locked = FALSE;
-
- ENCODER_ASSERT(display);
- ENCODER_ASSERT(context);
- VADisplay va_dpy = gst_vaapi_display_get_display(display);
-
- /* wait clear all coded buffer freed*/
- wait_and_clear_available_coded_buffers(h264_prv, display, TRUE);
-
- ENCODER_ACQUIRE_DISPLAY_LOCK(display);
- va_status = vaDestroyBuffer(va_dpy, h264_prv->seq_parameter);
- ENCODER_RELEASE_DISPLAY_LOCK(display);
-
- if (h264_prv->slice_param_buffers) {
- g_free(h264_prv->slice_param_buffers);
- h264_prv->slice_param_buffers = NULL;
+ while (!g_queue_is_empty(h264_prv->queued_buffers)) {
+ GstBuffer* tmp = g_queue_pop_head(h264_prv->queued_buffers);
+ if (tmp)
+ gst_buffer_unref(tmp);
}
-
- return ENCODER_NO_ERROR;
}
-EncoderStatus
-gst_h264_encoder_encode(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
- GstVaapiContext *context, GstBuffer *raw_pic, GList **coded_pics)
+
+static gboolean
+gst_h264_encoder_release_resource(GstVaapiBaseEncoder* encoder, GstVaapiDisplay *display, GstVaapiContext *context)
{
GstH264Encoder* h264_encoder = GST_H264_ENCODER(encoder);
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
-
- EncoderStatus ret = ENCODER_NO_ERROR;
- gboolean is_key = FALSE;
- VABufferID* coded_buf = NULL;
- VAStatus va_status = VA_STATUS_SUCCESS;
- VASurfaceID buffer_surface_id = VA_INVALID_SURFACE;
- GstVaapiSurface *buffer_surface = NULL;
-
- gboolean is_locked = FALSE;
-
- ENCODER_ASSERT(display && context);
- VADisplay va_dpy = gst_vaapi_display_get_display(display);
- VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
- GstVaapiSurface *new_surface = NULL;
-
- if (GST_VAAPI_IS_VIDEO_BUFFER(raw_pic)) {
- buffer_surface = h264_get_video_surface(h264_prv, GST_VAAPI_VIDEO_BUFFER(raw_pic));
- } else {
- new_surface = gst_vaapi_context_get_surface(context);
- buffer_surface = new_surface;
- ENCODER_CHECK_STATUS(buffer_surface, ENCODER_SURFACE_ERR, "h264_pop_free_surface failed.\n");
+ gboolean ret = TRUE;
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
- /*input picture to h264_prv->cur_surface_index*/
- va_status = h264_put_raw_buffer_to_surface(h264_encoder, display, raw_pic, buffer_surface);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "va put buffer to surface failed.\n");
+ /* release buffers first */
+ h264_encoder_release_parameters(h264_encoder, display, context);
+ h264_release_queued_buffers(h264_prv);
+ h264_prv->cur_display_num = 0;
+ h264_prv->cur_decode_num = 0;
+ h264_prv->cur_slice_type = SLICE_TYPE_I;
+ h264_prv->gop_count = 0;
+ h264_prv->last_decode_time = 0LL;
+ h264_prv->default_cts_offset = 0;
+
+ /*remove ref_surface1*/
+ if (h264_prv->ref_surface1) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h264_prv->ref_surface1);
+ } else {
+ g_object_unref(h264_prv->ref_surface1);
+ }
+ h264_prv->ref_surface1 = NULL;
}
- buffer_surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
- ENCODER_CHECK_STATUS(buffer_surface_id != VA_INVALID_SURFACE, ENCODER_SURFACE_ERR, "surface id == VA_INVALID_SURFACE.\n");
-
- /* begin picture, using default sid 0*/
- ENCODER_ACQUIRE_DISPLAY_LOCK(display);
- va_status = vaBeginPicture(va_dpy, context_id, buffer_surface_id);
- ENCODER_RELEASE_DISPLAY_LOCK(display);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaBeginPicture error.\n");
-
- /* set input buffers*/
- is_key = ((h264_prv->frame_count % h264_encoder->intra_period) == 0);
-
- /*get valid coded buffer*/
- coded_buf = pop_available_coded_buffer(h264_prv);
- ENCODER_CHECK_STATUS(coded_buf, ENCODER_ENC_RES_ERR, "dequeue_available_coded_buffer error.\n");
-
- ret = h264_prepare_encoding(h264_encoder, display, context, is_key, *coded_buf);
- if (ENCODER_NO_ERROR != ret) {
- push_available_coded_buffer(h264_prv, coded_buf);
+ if (h264_prv->ref_surface2) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h264_prv->ref_surface2);
+ } else {
+ g_object_unref(h264_prv->ref_surface2);
+ }
+ h264_prv->ref_surface2 = NULL;
}
- ENCODER_CHECK_STATUS(ENCODER_NO_ERROR == ret, ENCODER_PICTURE_ERR, "h264_prepare_encoding failed.\n");
- /* end picture */
- ENCODER_ACQUIRE_DISPLAY_LOCK(display);
- va_status = vaEndPicture(va_dpy, context_id);
- ENCODER_RELEASE_DISPLAY_LOCK(display);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaEndPicture error.\n");
-
- /*query surface result*/
- ret = h264_query_encoding_status(h264_encoder, display, buffer_surface,
- is_key, GST_BUFFER_TIMESTAMP(raw_pic), GST_BUFFER_DURATION(raw_pic), coded_buf, coded_pics);
- if (ENCODER_NO_ERROR != ret) {
- goto end;
+ /*remove recon_surface*/
+ if (h264_prv->recon_surface) {
+ if (context) {
+ gst_vaapi_context_put_surface(context, h264_prv->recon_surface);
+ } else {
+ g_object_unref(h264_prv->recon_surface);
+ }
+ h264_prv->recon_surface = NULL;
}
- h264_prv->frame_count++;
-
-end:
- ENCODER_RELEASE_DISPLAY_LOCK(display);
- if (new_surface) {
- gst_vaapi_context_put_surface(context, new_surface);
- }
return ret;
}
-static VABufferID *
-pop_available_coded_buffer(GstH264EncoderPrivate *h264_prv)
+static gboolean
+gst_h264_encoder_alloc_slices(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display, GstVaapiContext *context)
{
- VABufferID *coded_buf = NULL;
gboolean ret = TRUE;
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
- g_mutex_lock(h264_prv->code_buffer_lock);
-
- ENCODER_CHECK_STATUS(h264_prv->available_code_buffers, FALSE, "coded buffer not found");
- while (g_queue_is_empty(h264_prv->available_code_buffers)) {
- g_cond_wait(h264_prv->code_buffer_cond, h264_prv->code_buffer_lock);
- }
- coded_buf = (VABufferID*)g_queue_pop_head (h264_prv->available_code_buffers);
+ h264_prv->slice_param_buffers =
+#ifdef _SIMPLE_LIB_VA_
+ (VAEncSliceParameterBuffer*)
+#else
+ (VAEncSliceParameterBufferH264*)
+#endif
+ g_malloc0_n(h264_encoder->slice_num,
+ sizeof(h264_prv->slice_param_buffers[0]));
-end:
- g_mutex_unlock(h264_prv->code_buffer_lock);
- return coded_buf;
+ return ret;
}
-static gboolean
-push_available_coded_buffer(GstH264EncoderPrivate *h264_prv, VABufferID *buf)
+static void
+gst_h264_encoder_frame_failed(GstVaapiBaseEncoder *encoder, GstVaapiVideoBuffer* buffer)
{
- g_mutex_lock(h264_prv->code_buffer_lock);
- g_queue_push_head(h264_prv->available_code_buffers, buf);
- g_cond_signal(h264_prv->code_buffer_cond);
- g_mutex_unlock(h264_prv->code_buffer_lock);
- return TRUE;
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+
+ h264_release_queued_buffers(h264_prv);
+ h264_prv->cur_display_num = 0;
+ h264_prv->cur_decode_num = 0;
+ h264_prv->cur_slice_type = SLICE_TYPE_I;
+ h264_prv->gop_count = 0;
+ h264_prv->last_decode_time = 0LL;
}
-static gboolean
-alloc_all_available_coded_buffers(GstH264EncoderPrivate *h264_prv, GstVaapiDisplay *display,
- GstVaapiContext *context, guint32 buffer_size, gboolean need_display_lock)
+static EncoderStatus
+gst_h264_encoder_prepare_next_buffer(GstVaapiBaseEncoder* encoder,
+ GstVaapiVideoBuffer *display_buf, gboolean need_flush,
+ GstVaapiVideoBuffer **out_buf)
{
- guint32 i = 0;
- gboolean ret = TRUE;
- VADisplay va_dpy = gst_vaapi_display_get_display(display);
- VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
- VAStatus va_status = VA_STATUS_SUCCESS;
-
- ENCODER_ASSERT(h264_prv->available_code_buffers);
- ENCODER_ASSERT(!h264_prv->coded_bufs);
-
- h264_prv->coded_bufs = (VABufferID*)g_malloc0(h264_prv->coded_buf_num * sizeof(h264_prv->coded_bufs[0]));
-
- if (need_display_lock) { /* lock */
- GST_VAAPI_DISPLAY_LOCK(display);
- }
- for (i = 0; i < h264_prv->coded_buf_num; i++) {
- va_status = vaCreateBuffer(va_dpy, context_id,VAEncCodedBufferType,
- buffer_size, 1, NULL, &h264_prv->coded_bufs[i]);
- if (VA_STATUS_SUCCESS != va_status)
- break;
- }
- if (need_display_lock) { /* unlock */
- GST_VAAPI_DISPLAY_UNLOCK(display);
+ EncoderStatus ret = ENCODER_NO_ERROR;
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ GstVaapiVideoBuffer *return_buf = NULL;
+ //guint64 pts = 0;
+
+ if (NULL == display_buf && g_queue_is_empty(h264_prv->queued_buffers)) {
+ ret = ENCODER_BUFFER_EMPTY;
+ if (h264_prv->gop_count >= h264_encoder->intra_period || need_flush)
+ h264_prv->gop_count = 0;
+ goto end;
}
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, FALSE, "alloc coded buffer failed.\n");
- /* init queue available_code_buffers */
- g_mutex_lock(h264_prv->code_buffer_lock);
- for (i = 0; i < h264_prv->coded_buf_num; i++) {
- g_queue_push_head(h264_prv->available_code_buffers, &h264_prv->coded_bufs[i]);
+ if (display_buf) {
+ ++h264_prv->gop_count;
+ gst_buffer_ref(GST_BUFFER_CAST(display_buf));
+ h264_prv->last_decode_time = GST_BUFFER_TIMESTAMP(display_buf);
}
- g_cond_signal(h264_prv->code_buffer_cond);
- g_mutex_unlock(h264_prv->code_buffer_lock);
-end:
- return ret;
-}
-
-static void
-wait_and_clear_available_coded_buffers(GstH264EncoderPrivate *h264_prv,
- GstVaapiDisplay *display, gboolean need_display_lock)
-{
- guint32 available_buf_count = h264_prv->coded_buf_num;
- VADisplay va_dpy = gst_vaapi_display_get_display(display);
- VAStatus va_status = VA_STATUS_SUCCESS;
- guint32 i = 0;
+ /* first frame */
+ if (h264_prv->gop_count == 1) {
+ ENCODER_ASSERT(display_buf);
+ h264_prv->cur_display_num = 0;
+ h264_prv->cur_decode_num = 0;
+ h264_prv->cur_slice_type = SLICE_TYPE_I;
+ return_buf = display_buf;
+ goto end;
+ }
- g_mutex_lock(h264_prv->code_buffer_lock);
- while (available_buf_count) {
- if (g_queue_is_empty(h264_prv->available_code_buffers)) {
- g_cond_wait(h264_prv->code_buffer_cond, h264_prv->code_buffer_lock);
+ if (display_buf) {
+ if (h264_encoder->b_frame_num &&
+ h264_prv->gop_count < h264_encoder->intra_period &&
+ g_queue_get_length(h264_prv->queued_buffers) < h264_encoder->b_frame_num
+ )
+ {
+ g_queue_push_tail(h264_prv->queued_buffers, display_buf);
+ ret = ENCODER_BUFFER_WAITING;
+ goto end;
+ }
+ h264_prv->cur_slice_type = SLICE_TYPE_P;
+ h264_prv->cur_display_num = h264_prv->gop_count-1;
+ ++h264_prv->cur_decode_num;
+ return_buf = display_buf;
+ } else {
+ if (need_flush) {
+ return_buf = (GstVaapiVideoBuffer*)g_queue_pop_tail(h264_prv->queued_buffers);
+ h264_prv->cur_slice_type = SLICE_TYPE_P;
+ h264_prv->cur_display_num = h264_prv->gop_count - 1;
+ ++h264_prv->cur_decode_num;
} else {
- g_queue_pop_head(h264_prv->available_code_buffers);
- available_buf_count--;
+ return_buf = (GstVaapiVideoBuffer*)g_queue_pop_head(h264_prv->queued_buffers);
+ h264_prv->cur_slice_type = SLICE_TYPE_B;
+ h264_prv->cur_display_num = h264_prv->gop_count - 2 - g_queue_get_length(h264_prv->queued_buffers);
}
}
- g_mutex_unlock(h264_prv->code_buffer_lock);
- if (need_display_lock) {
- GST_VAAPI_DISPLAY_LOCK(display);
- }
- for (i = 0; i < h264_prv->coded_buf_num; i++) {
- va_status = vaDestroyBuffer(va_dpy, h264_prv->coded_bufs[i]);
- }
- if (need_display_lock) {
- GST_VAAPI_DISPLAY_UNLOCK(display);
- }
-
- if (h264_prv->coded_bufs) {
- g_free(h264_prv->coded_bufs);
- h264_prv->coded_bufs = NULL;
- }
-}
-
-static EncoderStatus
-h264_put_raw_buffer_to_surface(GstH264Encoder *h264_encoder,
- GstVaapiDisplay *display,
- GstBuffer *raw_pic,
- GstVaapiSurface *surface)
-{
- EncoderStatus ret = ENCODER_NO_ERROR;
- VAStatus va_status = VA_STATUS_SUCCESS;
- VAImage surface_image;
- VADisplay va_dpy;
- GstVaapiImage *image;
- GstVaapiImageFormat image_format;
- guint8 *y_src = NULL, *u_src = NULL, *v_src = NULL;
- guint8 *y_dst = NULL, *u_dst = NULL, *v_dst = NULL;
- int y_size = 0, u_size = 0;
- int row = 0, col = 0;
- guint32 plane_count = 0;
- guint32 image_width = 0, image_height = 0;
- guint32 pitchy = 0, pitchu = 0, pitchv = 0;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
+end:
+ *out_buf = return_buf;
+ /* calculate cts/pts/dts */
+#if 0
+ if (return_buf) {
+ pts = GST_BUFFER_TIMESTAMP(return_buf);
+ tmp_next_buf = (GstVaapiVideoBuffer*)g_queue_peek_head(h264_prv->queued_buffers);
+ if (tmp_next_buf) {
+ GST_BUFFER_TIMESTAMP(return_buf) = GST_BUFFER_TIMESTAMP(tmp_next_buf);
+ } else if (SLICE_TYPE_B == h264_prv->cur_slice_type) {
+ GST_BUFFER_TIMESTAMP(return_buf) = h264_prv->last_decode_time;
+ }
- ENCODER_ASSERT(display);
- va_dpy = gst_vaapi_display_get_display(display);
- /*map image*/
- image = gst_vaapi_surface_derive_image(surface);
- gst_vaapi_image_map(image);
-
- image_format = gst_vaapi_image_get_format(image);
- image_width = gst_vaapi_image_get_width(image);
- image_height = gst_vaapi_image_get_height(image);
-
- /* copy buffer to surface */
- ENCODER_ASSERT(GST_BUFFER_SIZE(raw_pic) >= y_size + (y_size>>1));
-
- y_size = ENCODER_WIDTH(h264_encoder) * ENCODER_HEIGHT(h264_encoder);
- u_size = ((ENCODER_WIDTH(h264_encoder)+1) >> 1) * ((ENCODER_HEIGHT(h264_encoder)+1) >> 1);
-
- y_src = GST_BUFFER_DATA(raw_pic);
- u_src = y_src + y_size;
- v_src = u_src + u_size;
-
- plane_count = gst_vaapi_image_get_plane_count(image);
- y_dst = gst_vaapi_image_get_plane(image, 0);
- u_dst = gst_vaapi_image_get_plane(image, 1);
- pitchy = gst_vaapi_image_get_pitch(image, 0);
- pitchu = gst_vaapi_image_get_pitch(image, 1);
-
- if (plane_count > 2) {
- v_dst = gst_vaapi_image_get_plane(image, 2);
- pitchv = gst_vaapi_image_get_pitch(image, 2);
- }
-
- /* copy from avcenc.c*/
- /* Y plane */
- for (row = 0; row < image_height; row++) {
- memcpy(y_dst, y_src, image_width);
- y_dst += pitchy;
- y_src += ENCODER_WIDTH(h264_encoder);
- }
-
- if (GST_VAAPI_IMAGE_NV12 == image_format) { /* UV plane */
- if (GST_VAAPI_IMAGE_I420 == h264_prv->format) {
- for (row = 0; row < image_height / 2; row++) {
- for (col = 0; col < image_width / 2; col++) {
- u_dst[col * 2] = u_src[col];
- u_dst[col * 2 + 1] = v_src[col];
- }
-
- u_dst += pitchu;
- u_src += (ENCODER_WIDTH(h264_encoder)>>1);
- v_src += (ENCODER_WIDTH(h264_encoder)>>1);
- }
- } else if (GST_VAAPI_IMAGE_NV12 == h264_prv->format){
- for (row = 0; row < image_height / 2; row++) {
- memcpy(u_dst, u_src, image_width);
- u_src += ENCODER_WIDTH(h264_encoder);
- u_dst += pitchu;
- }
- } else {
- ENCODER_ASSERT(0);
+ pts += h264_prv->default_cts_offset;
+ if ((gint64)(pts - GST_BUFFER_TIMESTAMP(return_buf)) < 0) {
+ pts = GST_BUFFER_TIMESTAMP(return_buf);
}
- } else {
- /* FIXME: fix this later */
- ENCODER_ASSERT(0);
+
+ GST_BUFFER_OFFSET_END(return_buf) = pts;
+ GST_BUFFER_TIMESTAMP(return_buf) = pts;
}
+#endif
- /*unmap image*/
- g_object_unref(image);
- end:
return ret;
}
+#ifdef _SIMPLE_LIB_VA_
static EncoderStatus
-h264_prepare_encoding(GstH264Encoder *h264_encoder, GstVaapiDisplay *display,
- GstVaapiContext *context, gboolean is_key, VABufferID coded_buf)
+gst_h264_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key)
{
EncoderStatus ret = ENCODER_NO_ERROR;
VAStatus va_status = VA_STATUS_SUCCESS;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
-
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
VAEncPictureParameterBufferH264 pic_h264;
VAEncSliceParameterBuffer *slice_h264 = NULL;
VADisplay va_dpy = gst_vaapi_display_get_display(display);
VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
+ *is_key = (h264_prv->cur_slice_type == SLICE_TYPE_I);
+
/* lock display */
ENCODER_ACQUIRE_DISPLAY_LOCK(display);
/*handle first surface_index*/
/*only need first frame*/
if (VA_INVALID_ID == h264_prv->seq_parameter) { /*first time*/
- VAEncSequenceParameterBufferH264 seq_h264 = {0};
-
+ VAEncSequenceParameterBufferH264 seq_h264 = { 0 };
seq_h264.level_idc = h264_encoder->level; /* 3.0 */
seq_h264.max_num_ref_frames = 1; /*Only I, P frames*/
seq_h264.picture_width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
va_status = vaCreateBuffer(va_dpy, context_id,
VAEncSequenceParameterBufferType,
sizeof(seq_h264), 1, &seq_h264, &h264_prv->seq_parameter);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_ENC_RES_ERR, "alloc seq-buffer failed.\n");
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ ENCODER_ENC_RES_ERR, "alloc seq-buffer failed.");
va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->seq_parameter, 1);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR, "vaRenderPicture seq-parameters failed.\n");
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ ENCODER_PICTURE_ERR, "vaRenderPicture seq-parameters failed.");
}
/* set pic_parameters*/
- if (!h264_prv->ref_surface) {
- h264_prv->ref_surface = gst_vaapi_context_get_surface(context);
- ENCODER_CHECK_STATUS(h264_prv->ref_surface, ENCODER_SURFACE_ERR, "reference surface, h264_pop_free_surface failed.\n");
+ if (!h264_prv->ref_surface1) {
+ h264_prv->ref_surface1 = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h264_prv->ref_surface1, ENCODER_SURFACE_ERR,
+ "reference surface, h264_pop_free_surface failed.");
}
if (!h264_prv->recon_surface) {
h264_prv->recon_surface = gst_vaapi_context_get_surface(context);
- ENCODER_CHECK_STATUS(h264_prv->recon_surface, ENCODER_SURFACE_ERR, "reconstructed surface, h264_pop_free_surface failed.\n");
+ ENCODER_CHECK_STATUS(h264_prv->recon_surface, ENCODER_SURFACE_ERR,
+ "reconstructed surface, h264_pop_free_surface failed.");
}
- pic_h264.reference_picture = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface);
+ pic_h264.reference_picture = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface1);
pic_h264.reconstructed_picture = GST_VAAPI_OBJECT_ID(h264_prv->recon_surface);
pic_h264.coded_buf = coded_buf;
pic_h264.picture_width = ENCODER_WIDTH(h264_encoder);
va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
sizeof(pic_h264), 1, &pic_h264, &h264_prv->pic_parameter);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "creating pic-param buffer failed.\n");
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ ENCODER_PICTURE_ERR, "creating pic-param buffer failed.");
va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->pic_parameter, 1);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "rendering pic-param buffer failed.\n");
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ ENCODER_PICTURE_ERR, "rendering pic-param buffer failed.");
/* set slice parameters, support multiple slices */
int i = 0;
--slice_mod_num;
}
last_row_num += slice_h264->slice_height;
- slice_h264->slice_flags.bits.is_intra = is_key;
+ slice_h264->slice_flags.bits.is_intra = *is_key;
slice_h264->slice_flags.bits.disable_deblocking_filter_idc = 0;
}
h264_encoder->slice_num,
h264_prv->slice_param_buffers,
&h264_prv->slice_parameter);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "creating slice-parameters buffer failed.\n");
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ ENCODER_PICTURE_ERR, "creating slice-parameters buffer failed.");
va_status = vaRenderPicture(va_dpy, context_id, &h264_prv->slice_parameter, 1);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_PICTURE_ERR, "rendering slice-parameters buffer failed.\n");
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ ENCODER_PICTURE_ERR, "rendering slice-parameters buffer failed.");
- /*after finished, set ref_surface_index, recon_surface_index */
- GstVaapiSurface *swap = h264_prv->ref_surface;
- h264_prv->ref_surface = h264_prv->recon_surface;
+ /*after finished, set ref_surface1_index, recon_surface_index */
+ GstVaapiSurface *swap = h264_prv->ref_surface1;
+ h264_prv->ref_surface1 = h264_prv->recon_surface;
h264_prv->recon_surface = swap;
end:
return ret;
}
+#else /* extended libva, new parameter structures*/
+
+static void h264_swap_surface(GstVaapiSurface **s1, GstVaapiSurface **s2)
+{
+ GstVaapiSurface *tmp;
+
+ g_return_if_fail(s1 && s2);
+ tmp = *s1;
+ *s1 = *s2;
+ *s2 = tmp;
+}
+
+static gboolean
+h264_recreate_seq_param(GstH264Encoder *h264_encoder,
+ VADisplay va_dpy, VAContextID context_id)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ VAEncSequenceParameterBufferH264 seq_h264 = { 0 };
+ guint width_in_mbs, height_in_mbs;
+ gboolean ret = TRUE;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+
+ /* only once */
+ if (VA_INVALID_ID != h264_prv->seq_parameter)
+ return TRUE;
+
+ width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
+ height_in_mbs = (ENCODER_HEIGHT(h264_encoder)+15)/16;
+
+ seq_h264.seq_parameter_set_id = 0;
+ seq_h264.profile_idc = h264_encoder->profile;
+ seq_h264.level_idc = h264_encoder->level; /* 3.0 */
+ seq_h264.intra_period = h264_encoder->intra_period;
+ seq_h264.ip_period = 0; // ?
+ seq_h264.max_num_ref_frames = (h264_encoder->b_frame_num < 2 ? 3 : h264_encoder->b_frame_num+1); // ?, why 4
+ seq_h264.picture_width_in_mbs = width_in_mbs;
+ seq_h264.picture_height_in_mbs = height_in_mbs;
+ seq_h264.frame_mbs_only_flag = 1;
+ seq_h264.target_usage = 1; // ?
+
+ if (h264_encoder->init_qp == -1)
+ seq_h264.rate_control_method = BR_CBR;
+ else if (h264_encoder->init_qp == -2)
+ seq_h264.rate_control_method = BR_VBR;
+ else {
+ ENCODER_ASSERT(h264_encoder->init_qp >= 0 && h264_encoder->init_qp <= 51);
+ seq_h264.rate_control_method = BR_CQP;
+ }
+
+ if (h264_encoder->bitrate> 0)
+ seq_h264.bits_per_second = h264_encoder->bitrate; /* use kbps as input */
+ else
+ seq_h264.bits_per_second = 0;
+
+ if (seq_h264.rate_control_method == BR_VBR) {
+ seq_h264.max_bits_per_second = seq_h264.bits_per_second*1.5;
+ seq_h264.min_bits_per_second = seq_h264.bits_per_second*0.3;
+ }
+ seq_h264.initial_hrd_buffer_fullness = 0; // ??
+ seq_h264.hrd_buffer_size = 0;
+ seq_h264.num_units_in_tick = 100;
+ seq_h264.time_scale = ENCODER_FPS(h264_encoder)*2*seq_h264.num_units_in_tick;
+
+ if (height_in_mbs*16 - ENCODER_HEIGHT(h264_encoder)) {
+ seq_h264.frame_cropping_flag = 1;
+ seq_h264.frame_crop_left_offset = 0;
+ seq_h264.frame_crop_right_offset = 0;
+ seq_h264.frame_crop_top_offset = 0;
+ seq_h264.frame_crop_bottom_offset =
+ (height_in_mbs * 16 - ENCODER_HEIGHT(h264_encoder))/(2 * (!seq_h264.frame_mbs_only_flag + 1));
+ }
+ seq_h264.pic_order_cnt_type = 0; // pic order cnt
+ seq_h264.direct_8x8_inference_flag = 0;
+ seq_h264.log2_max_frame_num_minus4 = 4; // log2(seq_h264.intra_period)-3 : 0
+ seq_h264.log2_max_pic_order_cnt_lsb_minus4 = seq_h264.log2_max_frame_num_minus4+2;
+ seq_h264.vui_flag = 0; // 0? or 1?
+
+ va_status = vaCreateBuffer(va_dpy, context_id,
+ VAEncSequenceParameterBufferType,
+ sizeof(seq_h264), 1,
+ &seq_h264, &h264_prv->seq_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ FALSE, "alloc seq-buffer failed.");
+
+ /*pack sps header buffer/data */
+ if (NULL == h264_prv->sps_data) {
+ VAEncPackedHeaderParameterBuffer packed_header_param_buffer = { 0 };
+ guint32 length_in_bits, offset_in_bytes;
+ guint8 *packed_seq_buffer = NULL;
+ H264Bitstream bitstream;
+ h264_bitstream_init(&bitstream, 128*8);
+ h264_bitstream_write_uint(&bitstream, 0x00000001, 32); /* start code*/
+ h264_bitstream_write_nal_header(&bitstream, NAL_REF_IDC_HIGH, NAL_SPS);
+ h264_bitstream_write_sps(&bitstream, &seq_h264);
+ ENCODER_ASSERT(BIT_STREAM_BIT_SIZE(&bitstream)%8 == 0);
+ length_in_bits = BIT_STREAM_BIT_SIZE(&bitstream);
+ packed_seq_buffer = BIT_STREAM_BUFFER(&bitstream);
+ //h264_prv->sps_data = gst_buffer_new_and_alloc((length_in_bits+7)/8);
+ //GST_BUFFER_SIZE(h264_prv->sps_data) = (length_in_bits+7)/8-4;
+ //memcpy(GST_BUFFER_DATA(h264_prv->sps_data), packed_seq_buffer+4, (length_in_bits+7)/8-4);
+
+ offset_in_bytes = 0;
+ packed_header_param_buffer.type = VAEncPackedHeaderSPS;
+ packed_header_param_buffer.insert_emulation_bytes = 1;
+ packed_header_param_buffer.skip_emulation_check_count = 5;
+ packed_header_param_buffer.num_headers = 1;
+ packed_header_param_buffer.length_in_bits = &length_in_bits;
+ packed_header_param_buffer.offset_in_bytes = &offset_in_bytes;
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncPackedHeaderParameterBufferType,
+ sizeof(packed_header_param_buffer), 1,
+ &packed_header_param_buffer,
+ &h264_prv->packed_sps_par_buf);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ FALSE,
+ "EncPackedSeqHeaderParameterBuffer failed");
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncPackedHeaderDataBufferType,
+ (length_in_bits + 7) / 8, 1,
+ packed_seq_buffer,
+ &h264_prv->packed_sps_data_buf);
+ h264_bitstream_destroy(&bitstream, TRUE);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ FALSE,
+ "EncPackedSeqHeaderDataBuffer failed");
+ }
+end:
+
+ return ret;
+}
+
+static gboolean
+h264_recreate_pic_param(GstH264Encoder *h264_encoder,
+ VADisplay va_dpy, VAContextID context_id,
+ VABufferID coded_buf)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ VAEncPictureParameterBufferH264 pic_h264;
+ gboolean ret = TRUE;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+
+ VAAPI_UNUSED_ARG(va_status);
+ memset(&pic_h264, 0, sizeof(pic_h264));
+ pic_h264.CurrPic.picture_id = GST_VAAPI_OBJECT_ID(h264_prv->recon_surface);
+ pic_h264.CurrPic.TopFieldOrderCnt = h264_prv->cur_display_num * 2; // ??? /**/
+ pic_h264.ReferenceFrames[0].picture_id = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface1);
+ pic_h264.ReferenceFrames[1].picture_id = GST_VAAPI_OBJECT_ID(h264_prv->ref_surface2);
+ pic_h264.ReferenceFrames[2].picture_id = VA_INVALID_ID;
+ pic_h264.CodedBuf = coded_buf;
+
+ pic_h264.seq_parameter_set_id = 0;
+ pic_h264.pic_parameter_set_id = 0;
+ pic_h264.last_picture = 0;
+ pic_h264.frame_num = (h264_prv->cur_slice_type == SLICE_TYPE_B ?
+ (h264_prv->cur_decode_num + 1) : h264_prv->cur_decode_num);
+ pic_h264.coding_type = 0;
+ pic_h264.pic_init_qp = (h264_encoder->init_qp >= 0 ? h264_encoder->init_qp : 26);
+ pic_h264.num_ref_idx_l0_active_minus1 = 0;
+ pic_h264.num_ref_idx_l1_active_minus1 = 0;
+ pic_h264.pic_fields.bits.idr_pic_flag = (h264_prv->cur_slice_type == SLICE_TYPE_I);
+ pic_h264.pic_fields.bits.reference_pic_flag = (h264_prv->cur_slice_type != SLICE_TYPE_B);
+ pic_h264.pic_fields.bits.entropy_coding_mode_flag = ENTROPY_MODE_CABAC;
+ pic_h264.pic_fields.bits.weighted_pred_flag = 0;
+ pic_h264.pic_fields.bits.weighted_bipred_idc = 0;
+ pic_h264.pic_fields.bits.transform_8x8_mode_flag = 1;
+ pic_h264.pic_fields.bits.deblocking_filter_control_present_flag = 1;
+
+ char *frame_type = "I";
+ if (h264_prv->cur_slice_type == SLICE_TYPE_P)
+ frame_type = "P";
+ if (h264_prv->cur_slice_type == SLICE_TYPE_B)
+ frame_type = "B";
+ ENCODER_LOG_INFO("type:%s, frame_num:%d, display_num:%d",
+ frame_type, pic_h264.frame_num, pic_h264.CurrPic.TopFieldOrderCnt);
+
+ if (VA_INVALID_ID != h264_prv->pic_parameter) { /* share the same pic_parameter*/
+ vaDestroyBuffer(va_dpy, h264_prv->pic_parameter);
+ h264_prv->pic_parameter = VA_INVALID_ID;
+ }
+ va_status = vaCreateBuffer(va_dpy, context_id, VAEncPictureParameterBufferType,
+ sizeof(pic_h264), 1, &pic_h264, &h264_prv->pic_parameter);
+
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ FALSE, "creating pic-param buffer failed.");
+
+ //if (NULL == h264_prv->pps_data) {
+ if (VA_INVALID_ID == h264_prv->packed_pps_data_buf) {
+ VAEncPackedHeaderParameterBuffer packed_header_param_buffer = { 0 };
+ guint32 length_in_bits, offset_in_bytes;
+ guint8 *packed_pic_buffer = NULL;
+ H264Bitstream bitstream;
+ h264_bitstream_init(&bitstream, 128*8);
+ h264_bitstream_write_uint(&bitstream, 0x00000001, 32); /* start code*/
+ h264_bitstream_write_nal_header(&bitstream, NAL_REF_IDC_HIGH, NAL_PPS);
+ h264_bitstream_write_pps(&bitstream, &pic_h264);
+ ENCODER_ASSERT(BIT_STREAM_BIT_SIZE(&bitstream)%8 == 0);
+ length_in_bits = BIT_STREAM_BIT_SIZE(&bitstream);
+ packed_pic_buffer = BIT_STREAM_BUFFER(&bitstream);
+ //h264_prv->pps_data = gst_buffer_new_and_alloc((length_in_bits+7)/8);
+ //GST_BUFFER_SIZE(h264_prv->pps_data) = (length_in_bits+7)/8-4;
+ //memcpy(GST_BUFFER_DATA(h264_prv->pps_data), packed_pic_buffer+4, (length_in_bits+7)/8-4);
+
+ offset_in_bytes = 0;
+ packed_header_param_buffer.type = VAEncPackedHeaderPPS;
+ packed_header_param_buffer.insert_emulation_bytes = 1;
+ packed_header_param_buffer.skip_emulation_check_count = 5;
+ packed_header_param_buffer.num_headers = 1;
+ packed_header_param_buffer.length_in_bits = &length_in_bits;
+ packed_header_param_buffer.offset_in_bytes = &offset_in_bytes;
+
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncPackedHeaderParameterBufferType,
+ sizeof(packed_header_param_buffer), 1,
+ &packed_header_param_buffer,
+ &h264_prv->packed_pps_par_buf);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ FALSE,
+ "EncPackedPicHeaderParameterBuffer failed");
+
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncPackedHeaderDataBufferType,
+ (length_in_bits + 7) / 8, 1,
+ packed_pic_buffer,
+ &h264_prv->packed_pps_data_buf);
+ h264_bitstream_destroy(&bitstream, TRUE);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status,
+ FALSE,
+ "EncPackedPicHeaderDataBuffer failed");
+ }
+
+end:
+ return ret;
+}
+
+
+static gboolean
+h264_recreate_slice_param(GstH264Encoder *h264_encoder,
+ VADisplay va_dpy, VAContextID context_id)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ VAEncSliceParameterBufferH264 *slice_h264 = NULL;
+ guint width_in_mbs;
+ gboolean ret = TRUE;
+ VAStatus va_status = VA_STATUS_SUCCESS;
+
+ width_in_mbs = (ENCODER_WIDTH(h264_encoder)+15)/16;
+
+ int i = 0;
+ guint32 last_row_num = 0;
+ guint32 slice_mod_num = h264_prv->slice_mod_mb_num;
+
+ memset(h264_prv->slice_param_buffers, 0, h264_encoder->slice_num*sizeof(h264_prv->slice_param_buffers[0]));
+ for (i = 0; i < h264_encoder->slice_num; ++i) {
+ slice_h264 = &h264_prv->slice_param_buffers[i];
+
+ slice_h264->starting_macroblock_address = last_row_num*width_in_mbs;
+ slice_h264->number_of_mbs = width_in_mbs*h264_prv->default_slice_height;
+ last_row_num += h264_prv->default_slice_height;
+ if (slice_mod_num) {
+ slice_h264->number_of_mbs += width_in_mbs;
+ ++last_row_num;
+ --slice_mod_num;
+ }
+ slice_h264->pic_parameter_set_id = 0;
+ slice_h264->slice_type = h264_prv->cur_slice_type;
+ slice_h264->direct_spatial_mv_pred_flag = 0;
+ slice_h264->num_ref_idx_l0_active_minus1 = 0;
+ slice_h264->num_ref_idx_l1_active_minus1 = 0;
+ slice_h264->cabac_init_idc = 0;
+ slice_h264->slice_qp_delta = 0;
+ slice_h264->disable_deblocking_filter_idc = 0;
+ slice_h264->slice_alpha_c0_offset_div2 = 2;
+ slice_h264->slice_beta_offset_div2 = 2;
+ slice_h264->idr_pic_id = 0;
+
+ slice_h264->ref_pic_list_modification_flag_l0 = 0;
+ slice_h264->ref_pic_list_modification_flag_l1 = 0;
+
+ }
+ ENCODER_ASSERT(last_row_num == (ENCODER_HEIGHT(h264_encoder)+15)/16);
+
+ if (VA_INVALID_ID != h264_prv->slice_parameter) {
+ vaDestroyBuffer(va_dpy, h264_prv->slice_parameter);
+ h264_prv->slice_parameter = VA_INVALID_ID;
+ }
+ va_status = vaCreateBuffer(va_dpy,
+ context_id,
+ VAEncSliceParameterBufferType,
+ sizeof(h264_prv->slice_param_buffers[0]),
+ h264_encoder->slice_num,
+ h264_prv->slice_param_buffers,
+ &h264_prv->slice_parameter);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status,
+ FALSE, "creating slice-parameters buffer failed.");
+
+end:
+ return ret;
+}
+
static EncoderStatus
-h264_query_encoding_status(GstH264Encoder *h264_encoder,
- GstVaapiDisplay *display,
- GstVaapiSurface *buffer_surface,
- gboolean is_key,
- GstClockTime timestamp,
- GstClockTime duration,
- VABufferID *coded_buf,
- GList **coded_pics)
+gst_h264_encoder_rendering(GstVaapiBaseEncoder *encoder, GstVaapiDisplay *display,
+ GstVaapiContext *context, GstVaapiSurface *surface,
+ guint frame_index, VABufferID coded_buf, gboolean *is_key)
{
EncoderStatus ret = ENCODER_NO_ERROR;
VAStatus va_status = VA_STATUS_SUCCESS;
- VASurfaceStatus surface_status = 0;
- VACodedBufferSegment *buf_list = NULL;
- GstBuffer* ret_buffer = NULL;
- gboolean has_coded_data = FALSE;
+ GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
+ VABufferID va_buffers[64];
+ guint32 va_buffers_count = 0;
+ gboolean is_params_ok = TRUE;
+
gboolean is_locked = FALSE;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
ENCODER_ASSERT(display && context);
- VASurfaceID surface_id = (VASurfaceID)GST_VAAPI_OBJECT_ID(buffer_surface);
VADisplay va_dpy = gst_vaapi_display_get_display(display);
+ VAContextID context_id = GST_VAAPI_OBJECT_ID(context);
- ENCODER_ASSERT(coded_pics && *coded_pics == NULL);
-
- /* lock display */
- ENCODER_ACQUIRE_DISPLAY_LOCK(display);
+ *is_key = (h264_prv->cur_slice_type == SLICE_TYPE_I);
- va_status = vaSyncSurface(va_dpy, surface_id);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaSyncSurface failed.\n");
+ if (!h264_prv->ref_surface1) {
+ h264_prv->ref_surface1 = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h264_prv->ref_surface1,
+ ENCODER_SURFACE_ERR,
+ "reference surface, h264_pop_free_surface failed.");
+ }
+ if (!h264_prv->ref_surface2) {
+ h264_prv->ref_surface2 = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h264_prv->ref_surface2,
+ ENCODER_SURFACE_ERR,
+ "reference surface, h264_pop_free_surface failed.");
+ }
+ if (!h264_prv->recon_surface) {
+ h264_prv->recon_surface = gst_vaapi_context_get_surface(context);
+ ENCODER_CHECK_STATUS(h264_prv->recon_surface,
+ ENCODER_SURFACE_ERR,
+ "reconstructed surface, h264_pop_free_surface failed.");
+ }
- va_status = vaQuerySurfaceStatus(va_dpy, surface_id, &surface_status);
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaQuerySurfaceStatus failed.\n");
- if (VASurfaceSkipped&surface_status) {
- ENCODER_LOG_ERROR("frame skipped, dts:%" GST_TIME_FORMAT ".\n", GST_TIME_ARGS(timestamp));
+ if (SLICE_TYPE_P == h264_prv->cur_slice_type) {
+ h264_swap_surface(&h264_prv->ref_surface1, &h264_prv->ref_surface2);
}
- va_status = vaMapBuffer(va_dpy, *coded_buf, (void **)(&buf_list));
- ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS ==va_status, ENCODER_QUERY_STATUS_ERR, "vaMapBuffer failed.\n");
+ /* set sequence parameters, need set every time */
+ is_params_ok = h264_recreate_seq_param(h264_encoder, va_dpy, context_id);
+ ENCODER_CHECK_STATUS(is_params_ok, ENCODER_PARAMETER_ERR,
+ "h264_recreate_seq_param failed");
+ /* set pic_parameters*/
+ is_params_ok = h264_recreate_pic_param(h264_encoder, va_dpy, context_id, coded_buf);
+ ENCODER_CHECK_STATUS(is_params_ok, ENCODER_PARAMETER_ERR,
+ "h264_recreate_pic_param failed");
+ /* set slice parameters, support multiple slices */
+ is_params_ok = h264_recreate_slice_param(h264_encoder, va_dpy, context_id);
+ ENCODER_CHECK_STATUS(is_params_ok, ENCODER_PARAMETER_ERR,
+ "h264_recreate_slice_param failed");
- /*unlock display*/
- ENCODER_RELEASE_DISPLAY_LOCK(display);
+ /* lock display */
+ ENCODER_ACQUIRE_DISPLAY_LOCK(display);
- while (buf_list != NULL) {
- if (!h264_prv->sps_data || !h264_prv->pps_data) {
- h264_encoder_read_sps_pps(h264_prv, buf_list->buf, buf_list->size);
- }
- ret_buffer = h264_encoder_create_coded_buffer(h264_prv, buf_list->buf, buf_list->size, coded_buf);
- GST_BUFFER_TIMESTAMP(ret_buffer) = timestamp;
- GST_BUFFER_DURATION(ret_buffer) = duration;
- if (!is_key) {
- GST_BUFFER_FLAG_SET(ret_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
- }
- *coded_pics = g_list_append(*coded_pics, ret_buffer);
- buf_list = (VACodedBufferSegment*)buf_list->next;
- ENCODER_ASSERT(NULL == buf_list);
- has_coded_data = TRUE;
+ /*render all buffers*/
+ if (VA_INVALID_ID != h264_prv->seq_parameter) {
+ va_buffers[va_buffers_count++] = h264_prv->seq_parameter;
}
-
-#if SHARE_CODED_BUF
- if (!has_coded_data)
-#endif
- { // if non-related, push back to available_code_buffers
- ENCODER_ACQUIRE_DISPLAY_LOCK(display);
- vaUnmapBuffer(va_dpy, *coded_buf);
- ENCODER_RELEASE_DISPLAY_LOCK(display);
- push_available_coded_buffer(h264_prv, coded_buf);
+ if (VA_INVALID_ID != h264_prv->pic_parameter) {
+ va_buffers[va_buffers_count++] = h264_prv->pic_parameter;
+ }
+ if (VA_INVALID_ID != h264_prv->slice_parameter) {
+ va_buffers[va_buffers_count++] = h264_prv->slice_parameter;
+ }
+ if (SLICE_TYPE_I == h264_prv->cur_slice_type) {
+ if (VA_INVALID_ID != h264_prv->packed_sps_par_buf) {
+ va_buffers[va_buffers_count++] = h264_prv->packed_sps_par_buf;
+ }
+ if (VA_INVALID_ID != h264_prv->packed_sps_data_buf) {
+ va_buffers[va_buffers_count++] = h264_prv->packed_sps_data_buf;
+ }
+ if (VA_INVALID_ID != h264_prv->packed_pps_par_buf) {
+ va_buffers[va_buffers_count++] = h264_prv->packed_pps_par_buf;
+ }
+ if (VA_INVALID_ID != h264_prv->packed_pps_data_buf) {
+ va_buffers[va_buffers_count++] = h264_prv->packed_pps_data_buf;
+ }
}
- return ENCODER_NO_ERROR;
+ va_status = vaRenderPicture(va_dpy, context_id, va_buffers, va_buffers_count);
+ ENCODER_CHECK_STATUS(VA_STATUS_SUCCESS == va_status, ENCODER_PICTURE_ERR,
+ "vaRenderH264Picture failed.");
-end:
- /*unlock display*/
+ /*after finished, swap recon and surface2*/
+ if (SLICE_TYPE_P == h264_prv->cur_slice_type ||
+ SLICE_TYPE_I == h264_prv->cur_slice_type) {
+ h264_swap_surface(&h264_prv->recon_surface, &h264_prv->ref_surface2);
+ }
+
+ end:
ENCODER_RELEASE_DISPLAY_LOCK(display);
return ret;
}
+#endif
+
static GstBuffer *
-h264_encoder_create_coded_buffer(GstH264EncoderPrivate *h264_prv,
+gst_h264_encoder_copy_coded_buffer(GstVaapiBaseEncoder *encoder,
guint8 *frame,
guint32 frame_size,
VABufferID *coded_buf)
{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
GstBuffer *ret_buffer;
guint32 nal_size;
const guint8 *nal_start;
guint8 *frame_end;
-#if SHARE_CODED_BUF
- ret_buffer = gst_h264_encode_buffer_new(h264_prv, coded_buf);
- ENCODER_ASSERT(ret_buffer);
- GST_BUFFER_MALLOCDATA(ret_buffer) = NULL;
- GST_BUFFER_DATA(ret_buffer) = buf_list->buf;
- GST_BUFFER_SIZE(ret_buffer) = buf_list->size;
-#else
ret_buffer = gst_buffer_new();
ENCODER_ASSERT(ret_buffer);
H264Bitstream bitstream;
GST_BUFFER_DATA(ret_buffer) = BIT_STREAM_BUFFER(&bitstream);
GST_BUFFER_SIZE(ret_buffer) = BIT_STREAM_BIT_SIZE(&bitstream)/8;
h264_bitstream_destroy(&bitstream, FALSE);
-#endif
return ret_buffer;
-
}
static EncoderStatus
return ENCODER_NO_ERROR;
}
+static void
+gst_h264_notify_frame(GstVaapiBaseEncoder *encoder, guint8 *buf, guint32 size)
+{
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(encoder);
+ if (!h264_prv->sps_data || !h264_prv->pps_data) {
+ h264_encoder_read_sps_pps(h264_prv, buf, size);
+ }
+ if (h264_prv->sps_data && h264_prv->pps_data) {
+ gst_vaapi_base_encoder_set_frame_notify(GST_VAAPI_BASE_ENCODER(encoder), FALSE);
+ }
+}
+
+
static gboolean
h264_read_sps_attributes(const guint8 *sps_data, guint32 sps_size,
guint32 *profile_idc, guint32 *profile_comp, guint32 *level_idc)
}
-EncoderStatus
+static EncoderStatus
gst_h264_encoder_flush(GstVaapiEncoder* encoder, GstVaapiDisplay *display,
GstVaapiContext *context, GList **coded_pics)
{
GstH264Encoder* h264_encoder = GST_H264_ENCODER(encoder);
EncoderStatus ret = ENCODER_NO_ERROR;
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
- h264_prv->frame_count = 0;
- /*do we need destroy h264_prv->seq_parameter? */
+ //h264_prv->frame_count = 0;
+ h264_prv->cur_display_num = 0;
+ h264_prv->cur_decode_num = 0;
+ h264_prv->cur_slice_type = SLICE_TYPE_I;
+ h264_prv->gop_count = g_queue_get_length(h264_prv->queued_buffers);
+ //gst_vaapi_base_encoder_set_frame_notify((GST_VAAPI_BASE_ENCODER)encoder, TRUE);
//end:
return ret;
h264_encoder->profile = 64;
h264_encoder->level = 30;
- h264_encoder->parent.width = 1280;
- h264_encoder->parent.height = 720;
- h264_encoder->parent.frame_rate = 10;
+ encoder->width = 1280;
+ encoder->height = 720;
+ encoder->frame_rate = 10;
h264_encoder->bitrate = 512*1000;
h264_encoder->intra_period = 30;
ret = gst_vaapi_encoder_initialize(encoder);
/*set buffers*/
int box_width=8;
int row_shift=0;
+
+ VAAPI_UNUSED_ARG(v_width);
+ VAAPI_UNUSED_ARG(u_width);
+ VAAPI_UNUSED_ARG(y_width);
raw_buffer = (GstBuffer**)g_malloc0(raw_buffer_num*sizeof(GstBuffer*));
for (i = 0; i < raw_buffer_num; i++) {
raw_buffer[i] = gst_buffer_new_and_alloc(buffer_size);
gst_h264_encoder_get_avcC_codec_data(GstVaapiEncoder *encoder, GstBuffer **buffer)
{
GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
GstBuffer *avc_codec;
const guint32 configuration_version = 0x01;
const guint32 length_size_minus_one = 0x03;
gst_h264_encoder_get_nal_codec_data(GstVaapiEncoder *encoder, GstBuffer **buffer)
{
GstH264Encoder *h264_encoder = GST_H264_ENCODER(encoder);
- GstH264EncoderPrivate *h264_prv = ENCPRV(h264_encoder);
+ GstH264EncoderPrivate *h264_prv = GST_H264_ENCODER_GET_PRIVATE(h264_encoder);
GstBuffer *nal_sps_pps;
ENCODER_ASSERT(buffer);
return TRUE;
}
- ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, bit_size), FALSE, "h264_bitstream_auto_grow failed.\n");
+ VAAPI_UNUSED_ARG(ret);
+ ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, bit_size),
+ FALSE,
+ "h264_bitstream_auto_grow failed.");
byte_pos = (bitstream->bit_size>>3);
bit_offset = (bitstream->bit_size&0x07);
cur_byte = bitstream->buffer + byte_pos;
bit_offset = 0;
}
ENCODER_ASSERT(cur_byte <= bitstream->buffer + bitstream->max_bit_capability/8);
- return TRUE;
end:
- return FALSE;
+ return ret;
}
static gboolean h264_bitstream_align(H264Bitstream *bitstream, guint32 value)
if (!byte_size) {
return 0;
}
- ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, byte_size<<3), FALSE, "h264_bitstream_auto_grow failed.\n");
+
+ VAAPI_UNUSED_ARG(ret);
+ ENCODER_CHECK_STATUS(TRUE == h264_bitstream_auto_grow(bitstream, byte_size<<3),
+ FALSE,
+ "h264_bitstream_auto_grow failed.");
if (0 == (bitstream->bit_size&0x07)) {
memcpy(&bitstream->buffer[bitstream->bit_size>>3], buf, byte_size);
bitstream->bit_size += (byte_size<<3);
++buf;
}
}
- return TRUE;
end:
- return FALSE;
+ return ret;
}
static gboolean
++size_in_bits;
tmp_value >>= 1;
}
- ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, 0, size_in_bits-1), FALSE, "h264_bitstream_write_ue failed.\n");
- ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, value, size_in_bits), FALSE, "h264_bitstream_write_ue failed.\n");
- return TRUE;
+ ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, 0, size_in_bits-1),
+ FALSE,
+ "h264_bitstream_write_ue failed.");
+ ENCODER_CHECK_STATUS(h264_bitstream_write_uint(bitstream, value, size_in_bits),
+ FALSE,
+ "h264_bitstream_write_ue failed.");
end:
- return FALSE;
+ return ret;
}
static gboolean
-h264_bitstream_write_se(H264Bitstream *bitstream, guint32 value)
+h264_bitstream_write_se(H264Bitstream *bitstream, gint32 value)
{
gboolean ret = TRUE;
guint32 new_val;
new_val = (value<<1) - 1;
}
- ENCODER_CHECK_STATUS(h264_bitstream_write_ue(bitstream, new_val), FALSE, "h264_bitstream_write_se failed.\n");
- return TRUE;
+ ENCODER_CHECK_STATUS(h264_bitstream_write_ue(bitstream, new_val),
+ FALSE,
+ "h264_bitstream_write_se failed.");
- end:
- return FALSE;
+end:
+ return ret;
}
static gboolean
}
static gboolean
-h264_bitstream_write_sps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
+h264_bitstream_write_nal_header(H264Bitstream *bitstream,
+ guint nal_ref_idc, guint nal_unit_type)
{
- guint32 constraint_set0_flag, constraint_set1_flag, constraint_set2_flag, constraint_set3_flag;
- guint32 seq_parameter_set_id = 0;
+ h264_bitstream_write_uint(bitstream, 0, 1);
+ h264_bitstream_write_uint(bitstream, nal_ref_idc, 2);
+ h264_bitstream_write_uint(bitstream, nal_unit_type, 5);
+ return TRUE;
+}
- /*need to set the values*/
- guint32 log2_max_frame_num_minus4 = 0; // 1? 3?
- guint32 pic_order_cnt_type = 0; // Must be 0
- guint32 log2_max_pic_order_cnt_lsb_minus4 = 0; // 2 ? 4?
- guint32 num_ref_frames = 1; // only P frames
+static gboolean
+h264_bitstream_write_sps(H264Bitstream *bitstream,
+ VAEncSequenceParameterBufferH264 *seq)
+{
+ guint32 constraint_set0_flag, constraint_set1_flag, constraint_set2_flag, constraint_set3_flag;
guint32 gaps_in_frame_num_value_allowed_flag = 0; // ??
- guint32 mb_width = (ENCODER_WIDTH(h264_prv->public)+15)/16; // mb_width
- guint32 mb_height = (ENCODER_HEIGHT(h264_prv->public)+15)/16; // mb_height
- guint32 frame_mbs_only_flag = 1; // only mbs
- guint32 frame_cropping_flag = 0;
- guint32 frame_crop_bottom_offset = 0;
- guint32 vui_present_flag = 0; // no vui flags
+ guint32 b_qpprime_y_zero_transform_bypass = (seq->rate_control_method == BR_CQP);
+ guint32 residual_color_transform_flag = 0;
+ guint32 pic_height_in_map_units = (seq->frame_mbs_only_flag ?
+ seq->picture_height_in_mbs :
+ seq->picture_height_in_mbs/2);
+ guint32 mb_adaptive_frame_field = !seq->frame_mbs_only_flag;
+ guint32 i = 0;
- constraint_set0_flag = h264_prv->public->profile == H264_PROFILE_BASELINE;
- constraint_set1_flag = h264_prv->public->profile <= H264_PROFILE_MAIN;
+ constraint_set0_flag = seq->profile_idc == H264_PROFILE_BASELINE;
+ constraint_set1_flag = seq->profile_idc <= H264_PROFILE_MAIN;
constraint_set2_flag = 0;
constraint_set3_flag = 0;
- if (mb_height * 16 - ENCODER_HEIGHT(h264_prv->public)) {
- frame_cropping_flag = 1;
- frame_crop_bottom_offset =
- (mb_height * 16 - ENCODER_HEIGHT(h264_prv->public)) / (2 * (!frame_mbs_only_flag + 1));
- }
-
- h264_bitstream_write_uint(bitstream, h264_prv->public->profile, 8); /* profile_idc */
+ h264_bitstream_write_uint(bitstream, seq->profile_idc, 8); /* profile_idc */
h264_bitstream_write_uint(bitstream, constraint_set0_flag, 1); /* constraint_set0_flag */
h264_bitstream_write_uint(bitstream, constraint_set1_flag, 1); /* constraint_set1_flag */
h264_bitstream_write_uint(bitstream, constraint_set2_flag, 1); /* constraint_set2_flag */
h264_bitstream_write_uint(bitstream, constraint_set3_flag, 1); /* constraint_set3_flag */
h264_bitstream_write_uint(bitstream, 0, 4); /* reserved_zero_4bits */
- h264_bitstream_write_uint(bitstream, h264_prv->public->level, 8); /* level_idc */
- h264_bitstream_write_ue(bitstream, seq_parameter_set_id); /* seq_parameter_set_id */
+ h264_bitstream_write_uint(bitstream, seq->level_idc, 8); /* level_idc */
+ h264_bitstream_write_ue(bitstream, seq->seq_parameter_set_id); /* seq_parameter_set_id */
- if (h264_prv->public->profile >= H264_PROFILE_HIGH) {
- /* FIXME: fix for high profile */
- ENCODER_ASSERT(0);
+ if (seq->profile_idc >= H264_PROFILE_HIGH) {
+ /* for high profile */
+ ENCODER_ASSERT(0);
+ h264_bitstream_write_ue(bitstream, seq->seq_fields.bits.chroma_format_idc); /* chroma_format_idc = 1, 4:2:0*/
+ if (3 == seq->seq_fields.bits.chroma_format_idc) {
+ h264_bitstream_write_uint(bitstream, residual_color_transform_flag, 1);
+ }
+ h264_bitstream_write_ue(bitstream, seq->bit_depth_luma_minus8); /* bit_depth_luma_minus8 */
+ h264_bitstream_write_ue(bitstream, seq->bit_depth_chroma_minus8); /* bit_depth_chroma_minus8 */
+ h264_bitstream_write_uint(bitstream, b_qpprime_y_zero_transform_bypass, 1); /* b_qpprime_y_zero_transform_bypass */
+ ENCODER_ASSERT(seq->seq_fields.bits.seq_scaling_matrix_present_flag == 0);
+ h264_bitstream_write_uint(bitstream, seq->seq_fields.bits.seq_scaling_matrix_present_flag, 1); /*seq_scaling_matrix_present_flag */
+
+ if (seq->seq_fields.bits.seq_scaling_matrix_present_flag) {
+ for (i = 0; i < (seq->seq_fields.bits.chroma_format_idc != 3 ? 8 : 12); i++) {
+ h264_bitstream_write_uint(bitstream, seq->seq_fields.bits.seq_scaling_list_present_flag, 1);
+ if (seq->seq_fields.bits.seq_scaling_list_present_flag) {
+ ENCODER_ASSERT(0);
+ /* FIXME, need write scaling list if seq_scaling_matrix_present_flag ==1*/
+ }
+ }
+ }
}
- h264_bitstream_write_ue(bitstream, log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
- h264_bitstream_write_ue(bitstream, pic_order_cnt_type); /* pic_order_cnt_type */
+ h264_bitstream_write_ue(bitstream, seq->log2_max_frame_num_minus4); /* log2_max_frame_num_minus4 */
+ h264_bitstream_write_ue(bitstream, seq->pic_order_cnt_type); /* pic_order_cnt_type */
- if (pic_order_cnt_type == 0)
- h264_bitstream_write_ue(bitstream, log2_max_pic_order_cnt_lsb_minus4);/* log2_max_pic_order_cnt_lsb_minus4 */
- else {
- ENCODER_ASSERT(0);
+ if (seq->pic_order_cnt_type == 0)
+ h264_bitstream_write_ue(bitstream, seq->log2_max_pic_order_cnt_lsb_minus4);/* log2_max_pic_order_cnt_lsb_minus4 */
+ else if (seq->pic_order_cnt_type == 1) {
+ ENCODER_ASSERT(0);
+ h264_bitstream_write_uint(bitstream, seq->seq_fields.bits.delta_pic_order_always_zero_flag, 1);
+ h264_bitstream_write_se(bitstream, seq->offset_for_non_ref_pic);
+ h264_bitstream_write_se(bitstream, seq->offset_for_top_to_bottom_field);
+ h264_bitstream_write_ue(bitstream, seq->num_ref_frames_in_pic_order_cnt_cycle);
+ for ( i = 0; i < seq->num_ref_frames_in_pic_order_cnt_cycle; i++) {
+ h264_bitstream_write_se(bitstream, seq->offset_for_ref_frame[i]);
+ }
}
- h264_bitstream_write_ue(bitstream, num_ref_frames); /* num_ref_frames */
+ h264_bitstream_write_ue(bitstream, seq->max_num_ref_frames); /* num_ref_frames */
h264_bitstream_write_uint(bitstream, gaps_in_frame_num_value_allowed_flag, 1); /* gaps_in_frame_num_value_allowed_flag */
- h264_bitstream_write_ue(bitstream, mb_width - 1); /* pic_width_in_mbs_minus1 */
- h264_bitstream_write_ue(bitstream, mb_height - 1); /* pic_height_in_map_units_minus1 */
- h264_bitstream_write_uint(bitstream, frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
+ h264_bitstream_write_ue(bitstream, seq->picture_width_in_mbs - 1); /* pic_width_in_mbs_minus1 */
+ h264_bitstream_write_ue(bitstream, pic_height_in_map_units - 1); /* pic_height_in_map_units_minus1 */
+ h264_bitstream_write_uint(bitstream, seq->frame_mbs_only_flag, 1); /* frame_mbs_only_flag */
- if (!frame_mbs_only_flag) { //ONLY mbs
+ if (!seq->frame_mbs_only_flag) { //ONLY mbs
ENCODER_ASSERT(0);
+ h264_bitstream_write_uint(bitstream, mb_adaptive_frame_field, 1);
}
- h264_bitstream_write_uint(bitstream, 0, 1); /* direct_8x8_inference_flag */
- h264_bitstream_write_uint(bitstream, frame_cropping_flag, 1); /* frame_cropping_flag */
+ h264_bitstream_write_uint(bitstream, 0, 1); /* direct_8x8_inference_flag */
+ h264_bitstream_write_uint(bitstream, seq->frame_cropping_flag, 1); /* frame_cropping_flag */
- if (frame_cropping_flag) {
- h264_bitstream_write_ue(bitstream, 0); /* frame_crop_left_offset */
- h264_bitstream_write_ue(bitstream, 0); /* frame_crop_right_offset */
- h264_bitstream_write_ue(bitstream, 0); /* frame_crop_top_offset */
- h264_bitstream_write_ue(bitstream, frame_crop_bottom_offset); /* frame_crop_bottom_offset */
+ if (seq->frame_cropping_flag) {
+ h264_bitstream_write_ue(bitstream, seq->frame_crop_left_offset); /* frame_crop_left_offset */
+ h264_bitstream_write_ue(bitstream, seq->frame_crop_right_offset); /* frame_crop_right_offset */
+ h264_bitstream_write_ue(bitstream, seq->frame_crop_top_offset); /* frame_crop_top_offset */
+ h264_bitstream_write_ue(bitstream, seq->frame_crop_bottom_offset); /* frame_crop_bottom_offset */
}
-
- h264_bitstream_write_uint(bitstream, vui_present_flag, 1); /* vui_parameters_present_flag */
- h264_bitstream_write_trailing_bits(bitstream); /* rbsp_trailing_bits */
+ ENCODER_ASSERT(seq->vui_flag == 0);
+ h264_bitstream_write_uint(bitstream, seq->vui_flag, 1); /* vui_parameters_present_flag */
+ if (seq->vui_flag) {
+ /*FIXME, to write vui parameters*/
+ }
+ h264_bitstream_write_trailing_bits(bitstream); /* rbsp_trailing_bits */
return TRUE;
+}
- //end:
- //return FALSE;
+static gboolean
+h264_bitstream_write_pps(H264Bitstream *bitstream,
+ VAEncPictureParameterBufferH264 *pic)
+{
+ guint32 num_slice_groups_minus1 = 0;
+ guint32 pic_init_qs_minus26 = 0;
+ guint32 redundant_pic_cnt_present_flag = 0;
+
+ h264_bitstream_write_ue(bitstream, pic->pic_parameter_set_id); /* pic_parameter_set_id */
+ h264_bitstream_write_ue(bitstream, pic->seq_parameter_set_id); /* seq_parameter_set_id */
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.entropy_coding_mode_flag, 1); /* entropy_coding_mode_flag */
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.pic_order_present_flag, 1); /* pic_order_present_flag */
+ h264_bitstream_write_ue(bitstream, num_slice_groups_minus1); /*slice_groups-1*/
+
+ if (num_slice_groups_minus1 > 0) {
+ /*FIXME*/
+ ENCODER_ASSERT(0);
+ }
+ h264_bitstream_write_ue(bitstream, pic->num_ref_idx_l0_active_minus1);
+ h264_bitstream_write_ue(bitstream, pic->num_ref_idx_l1_active_minus1);
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.weighted_pred_flag, 1);
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.weighted_bipred_idc, 2);
+ h264_bitstream_write_se(bitstream, pic->pic_init_qp-26); /* pic_init_qp_minus26 */
+ h264_bitstream_write_se(bitstream, pic_init_qs_minus26); /* pic_init_qs_minus26 */
+ h264_bitstream_write_se(bitstream, pic->chroma_qp_index_offset); /*chroma_qp_index_offset*/
+
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.deblocking_filter_control_present_flag, 1);
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.constrained_intra_pred_flag, 1);
+ h264_bitstream_write_uint(bitstream, redundant_pic_cnt_present_flag, 1);
+
+ /*more_rbsp_data*/
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.transform_8x8_mode_flag, 1);
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.pic_scaling_matrix_present_flag, 1);
+ if (pic->pic_fields.bits.pic_scaling_matrix_present_flag) {
+ ENCODER_ASSERT(0);
+ /* FIXME */
+ /*
+ for (i = 0; i <
+ (6+(-( (chroma_format_idc ! = 3) ? 2 : 6) * -pic->pic_fields.bits.transform_8x8_mode_flag));
+ i++) {
+ h264_bitstream_write_uint(bitstream, pic->pic_fields.bits.pic_scaling_list_present_flag, 1);
+ }
+ */
+ }
+
+ h264_bitstream_write_se(bitstream, pic->second_chroma_qp_index_offset);
+ h264_bitstream_write_trailing_bits(bitstream);
+ return TRUE;
}
+
static const guint8 *
h264_next_nal(const guint8 *buffer, guint32 len, guint32 *nal_size)
{
return nal_start;
}
-
-static gboolean
-h264_bitstream_write_pps(H264Bitstream *bitstream, GstH264EncoderPrivate *h264_prv)
-{
- ENCODER_ASSERT(0);
- return TRUE;
-}
-
static int draw_picture(int width, int height,
unsigned char *Y_start,
unsigned char *U_start,