This saves a memcpy, which is always something.
frame->decode_frame_number = frame->system_frame_number -
base_video_decoder->reorder_depth;
- frame->decode_timestamp = -1;
- frame->presentation_timestamp = -1;
- frame->presentation_duration = -1;
+ frame->decode_timestamp = GST_CLOCK_TIME_NONE;
+ frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
+ frame->presentation_duration = GST_CLOCK_TIME_NONE;
frame->n_fields = 2;
return frame;
GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
- GST_BUFFER_OFFSET (frame->src_buffer) = -1;
- GST_BUFFER_OFFSET_END (frame->src_buffer) = -1;
+ GST_BUFFER_OFFSET (frame->src_buffer) = GST_BUFFER_OFFSET_NONE;
+ GST_BUFFER_OFFSET_END (frame->src_buffer) = GST_BUFFER_OFFSET_NONE;
GST_DEBUG ("pushing frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
GstVideoFrame *frame = base_video_decoder->current_frame;
GstBaseVideoDecoderClass *base_video_decoder_class;
GstFlowReturn ret = GST_FLOW_OK;
+ GstClockTime running_time;
+ GstClockTimeDiff deadline;
base_video_decoder_class =
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
base_video_decoder->frames = g_list_append (base_video_decoder->frames,
frame);
+ running_time = gst_segment_to_running_time (&base_video_decoder->segment,
+ GST_FORMAT_TIME, frame->presentation_timestamp);
+
+ if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->earliest_time))
+ deadline = GST_CLOCK_DIFF (base_video_decoder->earliest_time, running_time);
+ else
+ deadline = 0;
+
/* do something with frame */
- ret = base_video_decoder_class->handle_frame (base_video_decoder, frame);
+ ret = base_video_decoder_class->handle_frame (base_video_decoder, frame,
+ deadline);
if (!GST_FLOW_IS_SUCCESS (ret)) {
GST_DEBUG ("flow error!");
}
int offset, int n);
GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder, gboolean at_eos);
GstFlowReturn (*finish) (GstBaseVideoDecoder *coder);
- GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame);
+ GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame,
+ GstClockTimeDiff deadline);
GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder, GstVideoFrame *frame);
GstCaps *(*get_caps) (GstBaseVideoDecoder *coder);
struct _GstVideoFrame
{
- guint64 decode_timestamp;
- guint64 presentation_timestamp;
- guint64 presentation_duration;
+ GstClockTime decode_timestamp;
+ GstClockTime presentation_timestamp;
+ GstClockTime presentation_duration;
gint system_frame_number;
gint decode_frame_number;
static GstFlowReturn gst_vp8_dec_parse_data (GstBaseVideoDecoder * decoder,
gboolean at_eos);
static GstFlowReturn gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder,
- GstVideoFrame * frame);
+ GstVideoFrame * frame, GstClockTimeDiff deadline);
GType gst_vp8_dec_get_type (void);
}
static GstFlowReturn
-gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame)
+gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame,
+ GstClockTimeDiff deadline)
{
GstVP8Dec *dec;
GstFlowReturn ret = GST_FLOW_OK;
img = vpx_codec_get_frame (&dec->decoder, &iter);
if (img) {
- ret = gst_base_video_decoder_alloc_src_frame (decoder, frame);
-
- if (ret == GST_FLOW_OK) {
- gst_vp8_dec_image_to_buffer (dec, img, frame->src_buffer);
- gst_base_video_decoder_finish_frame (decoder, frame);
- } else {
+ if (deadline < 0) {
+ GST_LOG_OBJECT (dec, "Skipping late frame (%f s past deadline)",
+ (double) -deadline / GST_SECOND);
gst_base_video_decoder_skip_frame (decoder, frame);
+ } else {
+ ret = gst_base_video_decoder_alloc_src_frame (decoder, frame);
+
+ if (ret == GST_FLOW_OK) {
+ gst_vp8_dec_image_to_buffer (dec, img, frame->src_buffer);
+ gst_base_video_decoder_finish_frame (decoder, frame);
+ } else {
+ gst_base_video_decoder_skip_frame (decoder, frame);
+ }
}
vpx_img_free (img);