... and remove unused start, stop method from subclass.
Current implementation does not require subclass specific behavior
for the handle_frame() method.
static gboolean gst_d3d11_h264_dec_open (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h264_dec_close (GstVideoDecoder * decoder);
-static gboolean gst_d3d11_h264_dec_start (GstVideoDecoder * decoder);
-static gboolean gst_d3d11_h264_dec_stop (GstVideoDecoder * decoder);
-static GstFlowReturn gst_d3d11_h264_dec_handle_frame (GstVideoDecoder *
- decoder, GstVideoCodecFrame * frame);
static gboolean gst_d3d11_h264_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h264_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_open);
decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_close);
- decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_start);
- decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_stop);
- decoder_class->handle_frame =
- GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_handle_frame);
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_d3d11_h264_dec_decide_allocation);
{
GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
- gst_clear_object (&self->d3d11_decoder);
- gst_clear_object (&self->device);
-
- return TRUE;
-}
-
-static gboolean
-gst_d3d11_h264_dec_start (GstVideoDecoder * decoder)
-{
- return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder);
-}
-
-static gboolean
-gst_d3d11_h264_dec_stop (GstVideoDecoder * decoder)
-{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
-
- gst_h264_picture_replace (&self->current_picture, NULL);
if (self->output_state)
gst_video_codec_state_unref (self->output_state);
self->output_state = NULL;
- return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
-}
-
-static GstFlowReturn
-gst_d3d11_h264_dec_handle_frame (GstVideoDecoder * decoder,
- GstVideoCodecFrame * frame)
-{
- GstD3D11H264Dec *self = GST_D3D11_H264_DEC (decoder);
- GstBuffer *in_buf = frame->input_buffer;
-
- GST_LOG_OBJECT (self,
- "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
- GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
- GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
-
- if (!self->current_picture) {
- GST_ERROR_OBJECT (self, "No current picture");
- gst_video_decoder_drop_frame (decoder, frame);
-
- return GST_FLOW_ERROR;
- }
-
- gst_video_codec_frame_set_user_data (frame,
- self->current_picture, (GDestroyNotify) gst_h264_picture_unref);
- self->current_picture = NULL;
-
- gst_video_codec_frame_unref (frame);
+ gst_clear_object (&self->d3d11_decoder);
+ gst_clear_object (&self->device);
- return GST_FLOW_OK;
+ return TRUE;
}
static gboolean
GST_LOG_OBJECT (self, "New h264picture %p", picture);
- gst_h264_picture_replace (&self->current_picture, picture);
-
return TRUE;
}
GstD3D11Decoder *d3d11_decoder;
- GstH264Picture *current_picture;
-
/* Pointing current bitstream buffer */
guint current_offset;
guint bitstream_buffer_size;
static gboolean gst_d3d11_h265_dec_open (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h265_dec_close (GstVideoDecoder * decoder);
-static gboolean gst_d3d11_h265_dec_start (GstVideoDecoder * decoder);
-static gboolean gst_d3d11_h265_dec_stop (GstVideoDecoder * decoder);
-static GstFlowReturn gst_d3d11_h265_dec_handle_frame (GstVideoDecoder *
- decoder, GstVideoCodecFrame * frame);
static gboolean gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_d3d11_h265_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_open);
decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_close);
- decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_start);
- decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_stop);
- decoder_class->handle_frame =
- GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_handle_frame);
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_d3d11_h265_dec_decide_allocation);
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
+ if (self->output_state)
+ gst_video_codec_state_unref (self->output_state);
+ self->output_state = NULL;
+
gst_clear_object (&self->d3d11_decoder);
gst_clear_object (&self->device);
}
static gboolean
-gst_d3d11_h265_dec_start (GstVideoDecoder * decoder)
-{
- return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder);
-}
-
-static gboolean
-gst_d3d11_h265_dec_stop (GstVideoDecoder * decoder)
-{
- GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
-
- gst_h265_picture_replace (&self->current_picture, NULL);
-
- return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
-}
-
-static GstFlowReturn
-gst_d3d11_h265_dec_handle_frame (GstVideoDecoder * decoder,
- GstVideoCodecFrame * frame)
-{
- GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
- GstBuffer *in_buf = frame->input_buffer;
-
- GST_LOG_OBJECT (self,
- "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
- GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
- GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
-
- if (!self->current_picture) {
- GST_ERROR_OBJECT (self, "No current picture");
- gst_video_decoder_drop_frame (decoder, frame);
-
- return GST_FLOW_ERROR;
- }
-
- gst_video_codec_frame_set_user_data (frame,
- self->current_picture, (GDestroyNotify) gst_h265_picture_unref);
- self->current_picture = NULL;
-
- gst_video_codec_frame_unref (frame);
-
- return GST_FLOW_OK;
-}
-
-static gboolean
gst_d3d11_h265_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11H265Dec *self = GST_D3D11_H265_DEC (decoder);
GST_LOG_OBJECT (self, "New h265picture %p", picture);
- gst_h265_picture_replace (&self->current_picture, picture);
-
return TRUE;
}
GstD3D11Decoder *d3d11_decoder;
- GstH265Picture *current_picture;
-
/* Pointing current bitstream buffer */
guint current_offset;
guint bitstream_buffer_size;
static gboolean gst_d3d11_vp9_dec_open (GstVideoDecoder * decoder);
static gboolean gst_d3d11_vp9_dec_close (GstVideoDecoder * decoder);
-static gboolean gst_d3d11_vp9_dec_start (GstVideoDecoder * decoder);
-static gboolean gst_d3d11_vp9_dec_stop (GstVideoDecoder * decoder);
-static GstFlowReturn gst_d3d11_vp9_dec_handle_frame (GstVideoDecoder *
- decoder, GstVideoCodecFrame * frame);
static gboolean gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder);
static gboolean gst_d3d11_vp9_dec_decide_allocation (GstVideoDecoder *
decoder, GstQuery * query);
decoder_class->open = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_open);
decoder_class->close = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_close);
- decoder_class->start = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_start);
- decoder_class->stop = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_stop);
- decoder_class->handle_frame =
- GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_handle_frame);
decoder_class->negotiate = GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_negotiate);
decoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_d3d11_vp9_dec_decide_allocation);
}
static gboolean
-gst_d3d11_vp9_dec_start (GstVideoDecoder * decoder)
-{
- return GST_VIDEO_DECODER_CLASS (parent_class)->start (decoder);
-}
-
-static gboolean
-gst_d3d11_vp9_dec_stop (GstVideoDecoder * decoder)
-{
- GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
-
- gst_vp9_picture_replace (&self->current_picture, NULL);
-
- return GST_VIDEO_DECODER_CLASS (parent_class)->stop (decoder);
-}
-
-static GstFlowReturn
-gst_d3d11_vp9_dec_handle_frame (GstVideoDecoder * decoder,
- GstVideoCodecFrame * frame)
-{
- GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
- GstBuffer *in_buf = frame->input_buffer;
-
- GST_LOG_OBJECT (self,
- "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
- GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
- GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
-
- if (!self->current_picture) {
- GST_ERROR_OBJECT (self, "No current picture");
- gst_video_decoder_drop_frame (decoder, frame);
-
- return GST_FLOW_ERROR;
- }
-
- gst_video_codec_frame_set_user_data (frame,
- self->current_picture, (GDestroyNotify) gst_vp9_picture_unref);
- self->current_picture = NULL;
-
- gst_video_codec_frame_unref (frame);
-
- return GST_FLOW_OK;
-}
-
-static gboolean
gst_d3d11_vp9_dec_negotiate (GstVideoDecoder * decoder)
{
GstD3D11Vp9Dec *self = GST_D3D11_VP9_DEC (decoder);
GST_LOG_OBJECT (self, "New VP9 picture %p", picture);
- gst_vp9_picture_replace (&self->current_picture, picture);
-
return TRUE;
}
gst_vp9_picture_set_user_data (new_picture,
gst_buffer_ref (view_buffer), (GDestroyNotify) gst_buffer_unref);
- gst_vp9_picture_replace (&self->current_picture, new_picture);
-
return new_picture;
}
GstD3D11Decoder *d3d11_decoder;
- GstVp9Picture *current_picture;
-
guint width, height;
GstVP9Profile profile;
static GstFlowReturn gst_h264_decoder_finish (GstVideoDecoder * decoder);
static gboolean gst_h264_decoder_flush (GstVideoDecoder * decoder);
static GstFlowReturn gst_h264_decoder_drain (GstVideoDecoder * decoder);
+static GstFlowReturn gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame);
/* codec spcific functions */
static gboolean gst_h264_decoder_process_sps (GstH264Decoder * self,
decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h264_decoder_finish);
decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h264_decoder_flush);
decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h264_decoder_drain);
+ decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_h264_decoder_handle_frame);
}
static void
return gst_h264_decoder_drain (decoder);
}
+static GstFlowReturn
+gst_h264_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame)
+{
+ GstH264Decoder *self = GST_H264_DECODER (decoder);
+ GstH264DecoderPrivate *priv = self->priv;
+ GstBuffer *in_buf = frame->input_buffer;
+
+ GST_LOG_OBJECT (self,
+ "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
+
+ if (!priv->current_picture) {
+ GST_ERROR_OBJECT (self, "No current picture");
+ gst_video_decoder_drop_frame (decoder, frame);
+
+ return GST_FLOW_ERROR;
+ }
+
+ gst_video_codec_frame_set_user_data (frame,
+ gst_h264_picture_ref (priv->current_picture),
+ (GDestroyNotify) gst_h264_picture_unref);
+
+ gst_video_codec_frame_unref (frame);
+
+ return GST_FLOW_OK;
+}
+
static gboolean
gst_h264_decoder_parse_sps (GstH264Decoder * self, GstH264NalUnit * nalu)
{
{
GstH264DecoderPrivate *priv = self->priv;
GstH264DecoderClass *klass;
- GstH264Picture *picture;
gboolean ret = TRUE;
if (!priv->current_picture)
return TRUE;
- picture = priv->current_picture;
- priv->current_picture = NULL;
-
klass = GST_H264_DECODER_GET_CLASS (self);
if (klass->end_picture)
- ret = klass->end_picture (self, picture);
+ ret = klass->end_picture (self, priv->current_picture);
gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
/* finish picture takes ownership of the picture */
- if (!gst_h264_decoder_finish_picture (self, picture)) {
+ ret = gst_h264_decoder_finish_picture (self, priv->current_picture);
+ priv->current_picture = NULL;
+
+ if (!ret) {
GST_ERROR_OBJECT (self, "Failed to finish picture");
return FALSE;
}
- return ret;
+ return TRUE;
}
static gint
static GstFlowReturn gst_h265_decoder_finish (GstVideoDecoder * decoder);
static gboolean gst_h265_decoder_flush (GstVideoDecoder * decoder);
static GstFlowReturn gst_h265_decoder_drain (GstVideoDecoder * decoder);
+static GstFlowReturn gst_h265_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame);
static gboolean gst_h265_decoder_finish_current_picture (GstH265Decoder * self);
static void gst_h265_decoder_clear_dpb (GstH265Decoder * self);
decoder_class->finish = GST_DEBUG_FUNCPTR (gst_h265_decoder_finish);
decoder_class->flush = GST_DEBUG_FUNCPTR (gst_h265_decoder_flush);
decoder_class->drain = GST_DEBUG_FUNCPTR (gst_h265_decoder_drain);
+ decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_h265_decoder_handle_frame);
}
static void
{
GstH265DecoderPrivate *priv = self->priv;
GstH265DecoderClass *klass;
- GstH265Picture *picture;
gboolean ret = TRUE;
if (!priv->current_picture)
return TRUE;
- picture = priv->current_picture;
- priv->current_picture = NULL;
-
klass = GST_H265_DECODER_GET_CLASS (self);
if (klass->end_picture)
- ret = klass->end_picture (self, picture);
+ ret = klass->end_picture (self, priv->current_picture);
- if (picture->output_flag) {
+ if (priv->current_picture->output_flag) {
gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
} else {
- GST_DEBUG_OBJECT (self, "Skip have_frame for picture %p", picture);
+ GST_DEBUG_OBJECT (self, "Skip have_frame for picture %p",
+ priv->current_picture);
}
/* finish picture takes ownership of the picture */
- if (!gst_h265_decoder_finish_picture (self, picture)) {
+ ret = gst_h265_decoder_finish_picture (self, priv->current_picture);
+ priv->current_picture = NULL;
+
+ if (!ret) {
GST_ERROR_OBJECT (self, "Failed to finish picture");
return FALSE;
}
- return ret;
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_h265_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame)
+{
+ GstH265Decoder *self = GST_H265_DECODER (decoder);
+ GstH265DecoderPrivate *priv = self->priv;
+ GstBuffer *in_buf = frame->input_buffer;
+
+ GST_LOG_OBJECT (self,
+ "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
+
+ if (!priv->current_picture) {
+ GST_ERROR_OBJECT (self, "No current picture");
+ gst_video_decoder_drop_frame (decoder, frame);
+
+ return GST_FLOW_ERROR;
+ }
+
+ gst_video_codec_frame_set_user_data (frame,
+ gst_h265_picture_ref (priv->current_picture),
+ (GDestroyNotify) gst_h265_picture_unref);
+
+ gst_video_codec_frame_unref (frame);
+
+ return GST_FLOW_OK;
}
GstVp9Parser *parser;
GstVp9Dpb *dpb;
+ GstVp9Picture *current_picture;
+
guint num_frames; /* number of frames in a super frame */
gsize frame_sizes[8]; /* size of frames in a super frame */
guint frame_cnt; /* frame count variable for super frame */
GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos);
static gboolean gst_vp9_decoder_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state);
+static GstFlowReturn gst_vp9_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame);
static GstVp9Picture *gst_vp9_decoder_duplicate_picture_default (GstVp9Decoder *
decoder, GstVp9Picture * picture);
decoder_class->stop = GST_DEBUG_FUNCPTR (gst_vp9_decoder_stop);
decoder_class->parse = GST_DEBUG_FUNCPTR (gst_vp9_decoder_parse);
decoder_class->set_format = GST_DEBUG_FUNCPTR (gst_vp9_decoder_set_format);
+ decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_vp9_decoder_handle_frame);
klass->duplicate_picture =
GST_DEBUG_FUNCPTR (gst_vp9_decoder_duplicate_picture_default);
picture->size = buf_size;
gst_video_decoder_add_to_frame (GST_VIDEO_DECODER (self), picture->size);
+
+ /* hold pointer to picture. default handle_frame implementation uses it */
+ priv->current_picture = picture;
flow_ret = gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
if (flow_ret == GST_FLOW_OK) {
}
gst_vp9_picture_unref (picture);
+ priv->current_picture = NULL;
return flow_ret;
}
gst_adapter_unmap (adapter);
gst_video_decoder_add_to_frame (GST_VIDEO_DECODER (self), picture->size);
+
+ /* hold pointer to picture. default handle_frame implementation uses it */
+ priv->current_picture = picture;
flow_ret = gst_video_decoder_have_frame (GST_VIDEO_DECODER (self));
if (flow_ret == GST_FLOW_OK && klass->output_picture) {
picture->data = NULL;
gst_vp9_dpb_add (priv->dpb, picture);
+ priv->current_picture = NULL;
return flow_ret;
return new_picture;
}
+
+static GstFlowReturn
+gst_vp9_decoder_handle_frame (GstVideoDecoder * decoder,
+ GstVideoCodecFrame * frame)
+{
+ GstVp9Decoder *self = GST_VP9_DECODER (decoder);
+ GstVp9DecoderPrivate *priv = self->priv;
+ GstBuffer *in_buf = frame->input_buffer;
+
+ GST_LOG_OBJECT (self,
+ "handle frame, PTS: %" GST_TIME_FORMAT ", DTS: %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_PTS (in_buf)),
+ GST_TIME_ARGS (GST_BUFFER_DTS (in_buf)));
+
+ if (!priv->current_picture) {
+ GST_ERROR_OBJECT (self, "No current picture");
+ gst_video_decoder_drop_frame (decoder, frame);
+
+ return GST_FLOW_ERROR;
+ }
+
+ gst_video_codec_frame_set_user_data (frame,
+ gst_vp9_picture_ref (priv->current_picture),
+ (GDestroyNotify) gst_vp9_picture_unref);
+
+ gst_video_codec_frame_unref (frame);
+
+ return GST_FLOW_OK;
+}