From: Edward Hervey Date: Thu, 3 Nov 2011 13:01:41 +0000 (+0100) Subject: [MOVED FROM BAD 094/134] vp8: Port to 0.11 X-Git-Tag: RELEASE-0.11.99~62 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=c03ae2f3c2d8fd453fa2899f98b75052c3fc392a;p=platform%2Fupstream%2Fgst-plugins-good.git [MOVED FROM BAD 094/134] vp8: Port to 0.11 --- diff --git a/ext/vp8/gstvp8dec.c b/ext/vp8/gstvp8dec.c index 4376f4b..4a4a260 100644 --- a/ext/vp8/gstvp8dec.c +++ b/ext/vp8/gstvp8dec.c @@ -104,7 +104,7 @@ static gboolean gst_vp8_dec_reset (GstBaseVideoDecoder * decoder); static GstFlowReturn gst_vp8_dec_parse_data (GstBaseVideoDecoder * decoder, gboolean at_eos); static GstFlowReturn gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, - GstVideoFrame * frame); + GstVideoFrameState * frame); static GstStaticPadTemplate gst_vp8_dec_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", @@ -117,36 +117,22 @@ static GstStaticPadTemplate gst_vp8_dec_src_template = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")) ); -GST_BOILERPLATE (GstVP8Dec, gst_vp8_dec, GstBaseVideoDecoder, - GST_TYPE_BASE_VIDEO_DECODER); - -static void -gst_vp8_dec_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_vp8_dec_src_template)); - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_vp8_dec_sink_template)); - - gst_element_class_set_details_simple (element_class, - "On2 VP8 Decoder", - "Codec/Decoder/Video", - "Decode VP8 video streams", "David Schleef "); -} +#define gst_vp8_dec_parent_class parent_class +G_DEFINE_TYPE (GstVP8Dec, gst_vp8_dec, GST_TYPE_BASE_VIDEO_DECODER); static void gst_vp8_dec_class_init (GstVP8DecClass * klass) { GObjectClass *gobject_class; + GstElementClass *element_class; GstBaseVideoDecoderClass *base_video_decoder_class; gobject_class = G_OBJECT_CLASS (klass); base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); gobject_class->set_property = gst_vp8_dec_set_property; gobject_class->get_property = gst_vp8_dec_get_property; @@ -174,6 +160,16 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) 0, 16, DEFAULT_NOISE_LEVEL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vp8_dec_src_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_vp8_dec_sink_template)); + + gst_element_class_set_details_simple (element_class, + "On2 VP8 Decoder", + "Codec/Decoder/Video", + "Decode VP8 video streams", "David Schleef "); + base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_vp8_dec_start); base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vp8_dec_stop); base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_vp8_dec_reset); @@ -188,7 +184,7 @@ gst_vp8_dec_class_init (GstVP8DecClass * klass) } static void -gst_vp8_dec_init (GstVP8Dec * gst_vp8_dec, GstVP8DecClass * klass) +gst_vp8_dec_init (GstVP8Dec * gst_vp8_dec) { GstBaseVideoDecoder *decoder = (GstBaseVideoDecoder *) gst_vp8_dec; @@ -318,58 +314,50 @@ gst_vp8_dec_send_tags (GstVP8Dec * dec) { GstTagList *list; - list = gst_tag_list_new (); + list = gst_tag_list_new_empty (); gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, GST_TAG_VIDEO_CODEC, "VP8 video", NULL); - gst_element_found_tags_for_pad (GST_ELEMENT (dec), - GST_BASE_VIDEO_CODEC_SRC_PAD (dec), list); + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (dec), + gst_event_new_tag (list)); } static void gst_vp8_dec_image_to_buffer (GstVP8Dec * dec, const vpx_image_t * img, GstBuffer * buffer) { - int stride, w, h, i; - guint8 *d; - GstVideoState *state = &GST_BASE_VIDEO_CODEC (dec)->state; - - d = GST_BUFFER_DATA (buffer) + - gst_video_format_get_component_offset (state->format, 0, - state->width, state->height); - stride = gst_video_format_get_row_stride (state->format, 0, state->width); - h = gst_video_format_get_component_height (state->format, 0, state->height); - h = MIN (h, img->h); - w = gst_video_format_get_component_width (state->format, 0, state->width); - w = MIN (w, img->w); - - for (i = 0; i < h; i++) - memcpy (d + i * stride, - img->planes[VPX_PLANE_Y] + i * img->stride[VPX_PLANE_Y], w); - - d = GST_BUFFER_DATA (buffer) + - gst_video_format_get_component_offset (state->format, 1, - state->width, state->height); - stride = gst_video_format_get_row_stride (state->format, 1, state->width); - h = gst_video_format_get_component_height (state->format, 1, state->height); - h = MIN (h, img->h >> img->y_chroma_shift); - w = gst_video_format_get_component_width (state->format, 1, state->width); - w = MIN (w, img->w >> img->x_chroma_shift); - for (i = 0; i < h; i++) - memcpy (d + i * stride, - img->planes[VPX_PLANE_U] + i * img->stride[VPX_PLANE_U], w); - - d = GST_BUFFER_DATA (buffer) + - gst_video_format_get_component_offset (state->format, 2, - state->width, state->height); - /* Same stride, height, width as above */ - for (i = 0; i < h; i++) - memcpy (d + i * stride, - img->planes[VPX_PLANE_V] + i * img->stride[VPX_PLANE_V], w); + int deststride, srcstride, height, width, line, comp; + guint8 *dest, *src; + GstVideoFrame frame; + GstVideoInfo *info = &GST_BASE_VIDEO_CODEC (dec)->info; + + if (!gst_video_frame_map (&frame, info, buffer, GST_MAP_WRITE)) { + GST_ERROR_OBJECT (dec, "Could not map video buffer"); + } + + for (comp = 0; comp < 3; comp++) { + dest = GST_VIDEO_FRAME_COMP_DATA (&frame, comp); + src = img->planes[comp]; + width = GST_VIDEO_FRAME_COMP_WIDTH (&frame, comp); + height = GST_VIDEO_FRAME_COMP_HEIGHT (&frame, comp); + deststride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, comp); + srcstride = img->stride[comp]; + + /* FIXME (Edward) : Do a plane memcpy is srcstride == deststride instead + * of copying line by line */ + for (line = 0; line < height; line++) { + memcpy (dest, src, width); + dest += deststride; + src += srcstride; + } + } + + gst_video_frame_unmap (&frame); } static GstFlowReturn -gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) +gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, + GstVideoFrameState * frame) { GstVP8Dec *dec; GstFlowReturn ret = GST_FLOW_OK; @@ -378,11 +366,14 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) vpx_image_t *img; long decoder_deadline = 0; GstClockTimeDiff deadline; + gsize size; + gpointer data; GST_DEBUG_OBJECT (decoder, "handle_frame"); dec = GST_VP8_DEC (decoder); + /* FIXME : Move this to a separate function for clarity */ if (!dec->decoder_inited) { int flags = 0; vpx_codec_stream_info_t stream_info; @@ -392,9 +383,13 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) memset (&stream_info, 0, sizeof (stream_info)); stream_info.sz = sizeof (stream_info); - status = vpx_codec_peek_stream_info (&vpx_codec_vp8_dx_algo, - GST_BUFFER_DATA (frame->sink_buffer), - GST_BUFFER_SIZE (frame->sink_buffer), &stream_info); + data = gst_buffer_map (frame->sink_buffer, &size, NULL, GST_MAP_READ); + + status = + vpx_codec_peek_stream_info (&vpx_codec_vp8_dx_algo, data, size, + &stream_info); + + gst_buffer_unmap (frame->sink_buffer, data, size); if (status != VPX_CODEC_OK || !stream_info.is_kf) { GST_WARNING_OBJECT (decoder, "No keyframe, skipping"); @@ -461,9 +456,12 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) decoder_deadline = MAX (1, deadline / GST_MSECOND); } - status = vpx_codec_decode (&dec->decoder, - GST_BUFFER_DATA (frame->sink_buffer), - GST_BUFFER_SIZE (frame->sink_buffer), NULL, decoder_deadline); + data = gst_buffer_map (frame->sink_buffer, &size, NULL, GST_MAP_READ); + + status = vpx_codec_decode (&dec->decoder, data, size, NULL, decoder_deadline); + + gst_buffer_unmap (frame->sink_buffer, data, size); + if (status) { GST_ELEMENT_ERROR (decoder, LIBRARY, ENCODE, ("Failed to decode frame"), ("%s", gst_vpx_error_name (status))); diff --git a/ext/vp8/gstvp8enc.c b/ext/vp8/gstvp8enc.c index e832975..7a18328 100644 --- a/ext/vp8/gstvp8enc.c +++ b/ext/vp8/gstvp8enc.c @@ -222,12 +222,12 @@ static void gst_vp8_enc_get_property (GObject * object, guint prop_id, static gboolean gst_vp8_enc_start (GstBaseVideoEncoder * encoder); static gboolean gst_vp8_enc_stop (GstBaseVideoEncoder * encoder); static gboolean gst_vp8_enc_set_format (GstBaseVideoEncoder * - base_video_encoder, GstVideoState * state); + base_video_encoder, GstVideoInfo * info); static gboolean gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder); static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder * - base_video_encoder, GstVideoFrame * frame); + base_video_encoder, GstVideoFrameState * frame); static GstFlowReturn gst_vp8_enc_shape_output (GstBaseVideoEncoder * encoder, - GstVideoFrame * frame); + GstVideoFrameState * frame); static gboolean gst_vp8_enc_sink_event (GstBaseVideoEncoder * base_video_encoder, GstEvent * event); @@ -235,7 +235,7 @@ static GstStaticPadTemplate gst_vp8_enc_sink_template = GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, - GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("I420")) ); static GstStaticPadTemplate gst_vp8_enc_src_template = @@ -245,29 +245,27 @@ GST_STATIC_PAD_TEMPLATE ("src", GST_STATIC_CAPS ("video/x-vp8") ); +#define gst_vp8_enc_parent_class parent_class +G_DEFINE_TYPE_WITH_CODE (GstVP8Enc, gst_vp8_enc, GST_TYPE_BASE_VIDEO_ENCODER, + G_IMPLEMENT_INTERFACE (GST_TYPE_TAG_SETTER, NULL); + G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);); + + static void -do_init (GType vp8enc_type) +gst_vp8_enc_class_init (GstVP8EncClass * klass) { - static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL }; - const GInterfaceInfo preset_interface_info = { - NULL, /* interface_init */ - NULL, /* interface_finalize */ - NULL /* interface_data */ - }; + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseVideoEncoderClass *base_video_encoder_class; - g_type_add_interface_static (vp8enc_type, GST_TYPE_TAG_SETTER, - &tag_setter_info); - g_type_add_interface_static (vp8enc_type, GST_TYPE_PRESET, - &preset_interface_info); -} -GST_BOILERPLATE_FULL (GstVP8Enc, gst_vp8_enc, GstBaseVideoEncoder, - GST_TYPE_BASE_VIDEO_ENCODER, do_init); + gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); + base_video_encoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass); -static void -gst_vp8_enc_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + gobject_class->set_property = gst_vp8_enc_set_property; + gobject_class->get_property = gst_vp8_enc_get_property; + gobject_class->finalize = gst_vp8_enc_finalize; gst_element_class_add_pad_template (element_class, gst_static_pad_template_get (&gst_vp8_enc_src_template)); @@ -278,20 +276,6 @@ gst_vp8_enc_base_init (gpointer g_class) "On2 VP8 Encoder", "Codec/Encoder/Video", "Encode VP8 video streams", "David Schleef "); -} - -static void -gst_vp8_enc_class_init (GstVP8EncClass * klass) -{ - GObjectClass *gobject_class; - GstBaseVideoEncoderClass *base_video_encoder_class; - - gobject_class = G_OBJECT_CLASS (klass); - base_video_encoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass); - - gobject_class->set_property = gst_vp8_enc_set_property; - gobject_class->get_property = gst_vp8_enc_get_property; - gobject_class->finalize = gst_vp8_enc_finalize; base_video_encoder_class->start = gst_vp8_enc_start; base_video_encoder_class->stop = gst_vp8_enc_stop; @@ -446,7 +430,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass) } static void -gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc, GstVP8EncClass * klass) +gst_vp8_enc_init (GstVP8Enc * gst_vp8_enc) { GST_DEBUG_OBJECT (gst_vp8_enc, "init"); @@ -706,7 +690,7 @@ gst_vp8_enc_stop (GstBaseVideoEncoder * base_video_encoder) static gboolean gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, - GstVideoState * state) + GstVideoInfo * info) { GstVP8Enc *encoder; vpx_codec_enc_cfg_t cfg; @@ -732,10 +716,10 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, return FALSE; } - cfg.g_w = state->width; - cfg.g_h = state->height; - cfg.g_timebase.num = state->fps_d; - cfg.g_timebase.den = state->fps_n; + cfg.g_w = info->width; + cfg.g_h = info->height; + cfg.g_timebase.num = info->fps_d; + cfg.g_timebase.den = info->fps_n; cfg.g_error_resilient = encoder->error_resilient; cfg.g_lag_in_frames = encoder->max_latency; @@ -838,7 +822,7 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, gst_base_video_encoder_set_latency (base_video_encoder, 0, gst_util_uint64_scale (encoder->max_latency, - state->fps_d * GST_SECOND, state->fps_n)); + info->fps_d * GST_SECOND, info->fps_n)); encoder->inited = TRUE; /* prepare cached image buffer setup */ @@ -848,39 +832,29 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, image->fmt = VPX_IMG_FMT_I420; image->bps = 12; image->x_chroma_shift = image->y_chroma_shift = 1; - image->w = image->d_w = state->width; - image->h = image->d_h = state->height; - - image->stride[VPX_PLANE_Y] = - gst_video_format_get_row_stride (state->format, 0, state->width); - image->stride[VPX_PLANE_U] = - gst_video_format_get_row_stride (state->format, 1, state->width); - image->stride[VPX_PLANE_V] = - gst_video_format_get_row_stride (state->format, 2, state->width); - image->planes[VPX_PLANE_Y] = - data + gst_video_format_get_component_offset (state->format, 0, - state->width, state->height); - image->planes[VPX_PLANE_U] = - data + gst_video_format_get_component_offset (state->format, 1, - state->width, state->height); - image->planes[VPX_PLANE_V] = - data + gst_video_format_get_component_offset (state->format, 2, - state->width, state->height); + image->w = image->d_w = info->width; + image->h = image->d_h = info->height; + image->stride[VPX_PLANE_Y] = GST_VIDEO_INFO_COMP_STRIDE (info, 0); + image->stride[VPX_PLANE_U] = GST_VIDEO_INFO_COMP_STRIDE (info, 1); + image->stride[VPX_PLANE_V] = GST_VIDEO_INFO_COMP_STRIDE (info, 2); + image->planes[VPX_PLANE_Y] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 0); + image->planes[VPX_PLANE_U] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 1); + image->planes[VPX_PLANE_V] = data + GST_VIDEO_INFO_COMP_OFFSET (info, 2); caps = gst_caps_new_simple ("video/x-vp8", - "width", G_TYPE_INT, state->width, - "height", G_TYPE_INT, state->height, - "framerate", GST_TYPE_FRACTION, state->fps_n, - state->fps_d, - "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n, - state->par_d, NULL); + "width", G_TYPE_INT, info->width, + "height", G_TYPE_INT, info->height, + "framerate", GST_TYPE_FRACTION, info->fps_n, + info->fps_d, + "pixel-aspect-ratio", GST_TYPE_FRACTION, info->par_n, info->par_d, NULL); { GstStructure *s; GstBuffer *stream_hdr, *vorbiscomment; const GstTagList *iface_tags; GValue array = { 0, }; GValue value = { 0, }; + gsize size; s = gst_caps_get_structure (caps, 0); /* put buffers in a fixed list */ @@ -889,19 +863,21 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, /* Create Ogg stream-info */ stream_hdr = gst_buffer_new_and_alloc (26); - data = GST_BUFFER_DATA (stream_hdr); + data = gst_buffer_map (stream_hdr, &size, NULL, GST_MAP_WRITE); GST_WRITE_UINT8 (data, 0x4F); GST_WRITE_UINT32_BE (data + 1, 0x56503830); /* "VP80" */ GST_WRITE_UINT8 (data + 5, 0x01); /* stream info header */ GST_WRITE_UINT8 (data + 6, 1); /* Major version 1 */ GST_WRITE_UINT8 (data + 7, 0); /* Minor version 0 */ - GST_WRITE_UINT16_BE (data + 8, state->width); - GST_WRITE_UINT16_BE (data + 10, state->height); - GST_WRITE_UINT24_BE (data + 12, state->par_n); - GST_WRITE_UINT24_BE (data + 15, state->par_d); - GST_WRITE_UINT32_BE (data + 18, state->fps_n); - GST_WRITE_UINT32_BE (data + 22, state->fps_d); + GST_WRITE_UINT16_BE (data + 8, info->width); + GST_WRITE_UINT16_BE (data + 10, info->height); + GST_WRITE_UINT24_BE (data + 12, info->par_n); + GST_WRITE_UINT24_BE (data + 15, info->par_d); + GST_WRITE_UINT32_BE (data + 18, info->fps_n); + GST_WRITE_UINT32_BE (data + 22, info->fps_d); + + gst_buffer_unmap (stream_hdr, data, size); GST_BUFFER_FLAG_SET (stream_hdr, GST_BUFFER_FLAG_IN_CAPS); gst_value_set_buffer (&value, stream_hdr); @@ -943,7 +919,7 @@ gst_vp8_enc_process (GstVP8Enc * encoder) const vpx_codec_cx_pkt_t *pkt; GstBaseVideoEncoder *base_video_encoder; GstVP8EncCoderHook *hook; - GstVideoFrame *frame; + GstVideoFrameState *frame; GstFlowReturn ret = GST_FLOW_OK; base_video_encoder = GST_BASE_VIDEO_ENCODER (encoder); @@ -966,7 +942,7 @@ gst_vp8_enc_process (GstVP8Enc * encoder) frame = gst_base_video_encoder_get_oldest_frame (base_video_encoder); if (frame != NULL) { buffer = gst_buffer_new (); - GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_PREROLL); + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_LIVE); frame->src_buffer = buffer; gst_base_video_encoder_finish_frame (base_video_encoder, frame); } @@ -985,9 +961,10 @@ gst_vp8_enc_process (GstVP8Enc * encoder) frame->is_sync_point = (pkt->data.frame.flags & VPX_FRAME_IS_KEY) != 0; hook = frame->coder_hook; - buffer = gst_buffer_new_and_alloc (pkt->data.frame.sz); - - memcpy (GST_BUFFER_DATA (buffer), pkt->data.frame.buf, pkt->data.frame.sz); + /* FIXME : It would be nice to avoid the memory copy ... */ + buffer = + gst_buffer_new_wrapped (g_memdup (pkt->data.frame.buf, + pkt->data.frame.sz), pkt->data.frame.sz); if (hook->image) g_slice_free (vpx_image_t, hook->image); @@ -1049,21 +1026,28 @@ static vpx_image_t * gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer) { vpx_image_t *image = g_slice_new (vpx_image_t); - guint8 *data = GST_BUFFER_DATA (buffer); + GstVideoFrame frame; memcpy (image, &enc->image, sizeof (*image)); - image->img_data = data; - image->planes[VPX_PLANE_Y] += (data - (guint8 *) NULL); - image->planes[VPX_PLANE_U] += (data - (guint8 *) NULL); - image->planes[VPX_PLANE_V] += (data - (guint8 *) NULL); + gst_video_frame_map (&frame, &GST_BASE_VIDEO_CODEC (enc)->info, + buffer, GST_MAP_READ); + + image->img_data = frame.data[0]; + image->planes[VPX_PLANE_Y] = frame.data[0]; + image->planes[VPX_PLANE_U] = frame.data[1]; + image->planes[VPX_PLANE_V] = frame.data[2]; + + /* FIXME : We should only unmap when we're done with it */ + + gst_video_frame_unmap (&frame); return image; } static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstVP8Enc *encoder; const GstVideoState *state; @@ -1125,7 +1109,7 @@ _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist) static GstFlowReturn gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, - GstVideoFrame * frame) + GstVideoFrameState * frame) { GstVP8Enc *encoder; GstBuffer *buf; @@ -1164,8 +1148,6 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, gst_util_uint64_scale (frame->presentation_frame_number + 1, GST_SECOND * state->fps_d, state->fps_n); - gst_buffer_set_caps (buf, - GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))); ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf); if (ret != GST_FLOW_OK) {