gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
}
-/**
- * gst_video_encoder_finish_frame:
- * @encoder: a #GstVideoEncoder
- * @frame: (transfer full): an encoded #GstVideoCodecFrame
- *
- * @frame must have a valid encoded data buffer, whose metadata fields
- * are then appropriately set according to frame data or no buffer at
- * all if the frame should be dropped.
- * It is subsequently pushed downstream or provided to @pre_push.
- * In any case, the frame is considered finished and released.
- *
- * After calling this function the output buffer of the frame is to be
- * considered read-only. This function will also change the metadata
- * of the buffer.
- *
- * Returns: a #GstFlowReturn resulting from sending data downstream
- */
-GstFlowReturn
-gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
- GstVideoCodecFrame * frame)
+static GstFlowReturn
+gst_video_encoder_can_push_unlocked (GstVideoEncoder * encoder)
{
GstVideoEncoderPrivate *priv = encoder->priv;
- GstFlowReturn ret = GST_FLOW_OK;
- GstVideoEncoderClass *encoder_class;
- GList *l;
- gboolean send_headers = FALSE;
- gboolean discont = (frame->presentation_frame_number == 0);
- GstBuffer *buffer;
- gboolean needs_reconfigure = FALSE;
-
- encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
-
- GST_LOG_OBJECT (encoder,
- "finish frame fpn %d", frame->presentation_frame_number);
-
- GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
- ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
- GST_TIME_ARGS (frame->dts));
-
- GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+ gboolean needs_reconfigure;
needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
if (!gst_video_encoder_negotiate_unlocked (encoder)) {
gst_pad_mark_reconfigure (encoder->srcpad);
if (GST_PAD_IS_FLUSHING (encoder->srcpad))
- ret = GST_FLOW_FLUSHING;
+ return GST_FLOW_FLUSHING;
else
- ret = GST_FLOW_NOT_NEGOTIATED;
- goto done;
+ return GST_FLOW_NOT_NEGOTIATED;
}
}
- if (G_UNLIKELY (priv->output_state == NULL))
- goto no_output_state;
+ if (G_UNLIKELY (priv->output_state == NULL)) {
+ GST_ERROR_OBJECT (encoder, "Output state was not configured");
+ GST_ELEMENT_ERROR (encoder, LIBRARY, FAILED,
+ ("Output state was not configured"), (NULL));
+ return GST_FLOW_ERROR;
+ }
+
+ return GST_FLOW_OK;
+}
+
+static void
+gst_video_encoder_push_pending_unlocked (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GList *l;
/* Push all pending events that arrived before this frame */
for (l = priv->frames; l; l = l->next) {
}
gst_video_encoder_check_and_push_tags (encoder);
+}
- /* no buffer data means this frame is skipped/dropped */
- if (!frame->output_buffer) {
- gst_video_encoder_drop_frame (encoder, frame);
- goto done;
+static void
+gst_video_encoder_infer_dts_unlocked (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ /* DTS is expected to be monotonously increasing,
+ * so a good guess is the lowest unsent PTS (all being OK) */
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GList *l;
+ GstClockTime min_ts = GST_CLOCK_TIME_NONE;
+ GstVideoCodecFrame *oframe = NULL;
+ gboolean seen_none = FALSE;
+
+ /* some maintenance regardless */
+ for (l = priv->frames; l; l = l->next) {
+ GstVideoCodecFrame *tmp = l->data;
+
+ if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
+ seen_none = TRUE;
+ continue;
+ }
+
+ if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
+ min_ts = tmp->abidata.ABI.ts;
+ oframe = tmp;
+ }
+ }
+ /* save a ts if needed */
+ if (oframe && oframe != frame) {
+ oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
}
- priv->processed++;
+ /* and set if needed */
+ if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
+ frame->dts = min_ts;
+ GST_DEBUG_OBJECT (encoder,
+ "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (frame->pts));
+ }
+}
- if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) {
- GstClockTime stream_time, running_time;
- GstEvent *ev;
- ForcedKeyUnitEvent *fevt = NULL;
- GList *l;
+static void
+gst_video_encoder_send_header_unlocked (GstVideoEncoder * encoder,
+ gboolean * discont)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
- running_time =
- gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
- frame->pts);
+ if (G_UNLIKELY (priv->new_headers)) {
+ GList *tmp, *copy = NULL;
- GST_OBJECT_LOCK (encoder);
- for (l = priv->force_key_unit; l; l = l->next) {
- ForcedKeyUnitEvent *tmp = l->data;
+ GST_DEBUG_OBJECT (encoder, "Sending headers");
- /* Skip non-pending keyunits */
- if (!tmp->pending)
- continue;
+ /* First make all buffers metadata-writable */
+ for (tmp = priv->headers; tmp; tmp = tmp->next) {
+ GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
- /* Exact match using the frame id */
- if (frame->system_frame_number == tmp->frame_id) {
- fevt = tmp;
- break;
- }
+ copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
+ }
+ g_list_free (priv->headers);
+ priv->headers = copy;
- /* Simple case, keyunit ASAP */
- if (tmp->running_time == GST_CLOCK_TIME_NONE) {
- fevt = tmp;
- break;
- }
+ for (tmp = priv->headers; tmp; tmp = tmp->next) {
+ GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
- /* Event for before this frame */
- if (tmp->running_time <= running_time) {
- fevt = tmp;
- break;
+ GST_OBJECT_LOCK (encoder);
+ priv->bytes += gst_buffer_get_size (tmpbuf);
+ GST_OBJECT_UNLOCK (encoder);
+ if (G_UNLIKELY (*discont)) {
+ GST_LOG_OBJECT (encoder, "marking discont");
+ GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
+ *discont = FALSE;
}
+
+ gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
}
+ priv->new_headers = FALSE;
+ }
+}
- if (fevt) {
- priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
+static void
+gst_video_encoder_transform_meta_unlocked (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ if (encoder_class->transform_meta) {
+ if (G_LIKELY (frame->input_buffer)) {
+ CopyMetaData data;
+
+ data.encoder = encoder;
+ data.frame = frame;
+ gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
+ } else {
+ GST_FIXME_OBJECT (encoder,
+ "Can't copy metadata because input frame disappeared");
}
- GST_OBJECT_UNLOCK (encoder);
+ }
+}
- if (fevt) {
- stream_time =
- gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
- frame->pts);
+static void
+gst_video_encoder_send_key_unit_unlocked (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame, gboolean * send_headers)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GstClockTime stream_time, running_time;
+ GstEvent *ev;
+ ForcedKeyUnitEvent *fevt = NULL;
+ GList *l;
- ev = gst_video_event_new_downstream_force_key_unit
- (frame->pts, stream_time, running_time,
- fevt->all_headers, fevt->count);
+ running_time =
+ gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
+ frame->pts);
- gst_video_encoder_push_event (encoder, ev);
+ GST_OBJECT_LOCK (encoder);
+ for (l = priv->force_key_unit; l; l = l->next) {
+ ForcedKeyUnitEvent *tmp = l->data;
- if (fevt->all_headers)
- send_headers = TRUE;
+ /* Skip non-pending keyunits */
+ if (!tmp->pending)
+ continue;
- GST_DEBUG_OBJECT (encoder,
- "Forced key unit: running-time %" GST_TIME_FORMAT
- ", all_headers %d, count %u",
- GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
- forced_key_unit_event_free (fevt);
+ /* Exact match using the frame id */
+ if (frame->system_frame_number == tmp->frame_id) {
+ fevt = tmp;
+ break;
}
+
+ /* Simple case, keyunit ASAP */
+ if (tmp->running_time == GST_CLOCK_TIME_NONE) {
+ fevt = tmp;
+ break;
+ }
+
+ /* Event for before this frame */
+ if (tmp->running_time <= running_time) {
+ fevt = tmp;
+ break;
+ }
+ }
+
+ if (fevt) {
+ priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
+ }
+ GST_OBJECT_UNLOCK (encoder);
+
+ if (fevt) {
+ stream_time =
+ gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
+ frame->pts);
+
+ ev = gst_video_event_new_downstream_force_key_unit
+ (frame->pts, stream_time, running_time, fevt->all_headers, fevt->count);
+
+ gst_video_encoder_push_event (encoder, ev);
+
+ if (fevt->all_headers)
+ *send_headers = TRUE;
+
+ GST_DEBUG_OBJECT (encoder,
+ "Forced key unit: running-time %" GST_TIME_FORMAT
+ ", all_headers %d, count %u",
+ GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
+ forced_key_unit_event_free (fevt);
+ }
+}
+
+/**
+ * gst_video_encoder_finish_frame:
+ * @encoder: a #GstVideoEncoder
+ * @frame: (transfer full): an encoded #GstVideoCodecFrame
+ *
+ * @frame must have a valid encoded data buffer, whose metadata fields
+ * are then appropriately set according to frame data or no buffer at
+ * all if the frame should be dropped.
+ * It is subsequently pushed downstream or provided to @pre_push.
+ * In any case, the frame is considered finished and released.
+ *
+ * After calling this function the output buffer of the frame is to be
+ * considered read-only. This function will also change the metadata
+ * of the buffer.
+ *
+ * Returns: a #GstFlowReturn resulting from sending data downstream
+ */
+GstFlowReturn
+gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstVideoEncoderClass *encoder_class;
+ gboolean send_headers = FALSE;
+ gboolean discont = FALSE;
+ GstBuffer *buffer;
+
+ g_return_val_if_fail (frame, GST_FLOW_ERROR);
+
+ discont = (frame->presentation_frame_number == 0
+ && frame->abidata.ABI.num_subframes == 0);
+
+ encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ GST_LOG_OBJECT (encoder,
+ "finish frame fpn %d sync point: %d", frame->presentation_frame_number,
+ GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
+
+ GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
+ ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
+ GST_TIME_ARGS (frame->dts));
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ ret = gst_video_encoder_can_push_unlocked (encoder);
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ if (frame->abidata.ABI.num_subframes == 0)
+ gst_video_encoder_push_pending_unlocked (encoder, frame);
+
+ /* no buffer data means this frame is skipped/dropped */
+ if (!frame->output_buffer) {
+ gst_video_encoder_drop_frame (encoder, frame);
+ goto done;
}
- if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
+ priv->processed++;
+
+ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit)
+ gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
+
+
+ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
+ && frame->abidata.ABI.num_subframes == 0) {
priv->distance_from_sync = 0;
GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
/* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
}
- /* DTS is expected monotone ascending,
- * so a good guess is the lowest unsent PTS (all being OK) */
- {
- GstClockTime min_ts = GST_CLOCK_TIME_NONE;
- GstVideoCodecFrame *oframe = NULL;
- gboolean seen_none = FALSE;
-
- /* some maintenance regardless */
- for (l = priv->frames; l; l = l->next) {
- GstVideoCodecFrame *tmp = l->data;
-
- if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
- seen_none = TRUE;
- continue;
- }
-
- if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
- min_ts = tmp->abidata.ABI.ts;
- oframe = tmp;
- }
- }
- /* save a ts if needed */
- if (oframe && oframe != frame) {
- oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
- }
-
- /* and set if needed */
- if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
- frame->dts = min_ts;
- GST_DEBUG_OBJECT (encoder,
- "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
- GST_TIME_ARGS (frame->pts));
- }
- }
+ gst_video_encoder_infer_dts_unlocked (encoder, frame);
frame->distance_from_sync = priv->distance_from_sync;
priv->distance_from_sync++;
GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
+ /* At this stage we have a full frame in subframe use case ,
+ * let's mark it to enabled some latency optimization
+ * in some uses cases like RTP. */
+
+ GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_MARKER);
+
GST_OBJECT_LOCK (encoder);
/* update rate estimate */
priv->bytes += gst_buffer_get_size (frame->output_buffer);
}
GST_OBJECT_UNLOCK (encoder);
- if (G_UNLIKELY (send_headers || priv->new_headers)) {
- GList *tmp, *copy = NULL;
-
- GST_DEBUG_OBJECT (encoder, "Sending headers");
+ if (G_UNLIKELY (send_headers))
+ priv->new_headers = TRUE;
- /* First make all buffers metadata-writable */
- for (tmp = priv->headers; tmp; tmp = tmp->next) {
- GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
-
- copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
- }
- g_list_free (priv->headers);
- priv->headers = copy;
-
- for (tmp = priv->headers; tmp; tmp = tmp->next) {
- GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
-
- GST_OBJECT_LOCK (encoder);
- priv->bytes += gst_buffer_get_size (tmpbuf);
- GST_OBJECT_UNLOCK (encoder);
- if (G_UNLIKELY (discont)) {
- GST_LOG_OBJECT (encoder, "marking discont");
- GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
- discont = FALSE;
- }
-
- GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
- gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
- GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
- }
- priv->new_headers = FALSE;
- }
+ gst_video_encoder_send_header_unlocked (encoder, &discont);
if (G_UNLIKELY (discont)) {
GST_LOG_OBJECT (encoder, "marking discont");
if (encoder_class->pre_push)
ret = encoder_class->pre_push (encoder, frame);
- if (encoder_class->transform_meta) {
- if (G_LIKELY (frame->input_buffer)) {
- CopyMetaData data;
-
- data.encoder = encoder;
- data.frame = frame;
- gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
- } else {
- GST_WARNING_OBJECT (encoder,
- "Can't copy metadata because input frame disappeared");
- }
- }
+ gst_video_encoder_transform_meta_unlocked (encoder, frame);
/* Get an additional ref to the buffer, which is going to be pushed
* downstream, the original ref is owned by the frame */
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
return ret;
+}
- /* ERRORS */
-no_output_state:
- {
- gst_video_encoder_release_frame (encoder, frame);
- GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
- GST_ERROR_OBJECT (encoder, "Output state was not configured");
- return GST_FLOW_ERROR;
+/**
+ * gst_video_encoder_finish_subframe:
+ * @encoder: a #GstVideoEncoder
+ * @frame: (transfer none): a #GstVideoCodecFrame being encoded
+ *
+ * If multiple subframes are produced for one input frame then use this method
+ * for each subframe, except for the last one. Before calling this function,
+ * you need to fill frame->output_buffer with the encoded buffer to push.
+
+ * You must call #gst_video_encoder_finish_frame() for the last sub-frame
+ * to tell the encoder that the frame has been fully encoded.
+ *
+ * This function will change the metadata of @frame and frame->output_buffer
+ * will be pushed downstream.
+ *
+ * Returns: a #GstFlowReturn resulting from pushing the buffer downstream.
+ *
+ * Since: 1.18
+ */
+GstFlowReturn
+gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame)
+{
+ GstVideoEncoderPrivate *priv = encoder->priv;
+ GstVideoEncoderClass *encoder_class;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstBuffer *subframe_buffer = NULL;
+ gboolean discont = FALSE;
+ gboolean send_headers = FALSE;
+
+ g_return_val_if_fail (frame, GST_FLOW_ERROR);
+ g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
+
+ subframe_buffer = frame->output_buffer;
+ discont = (frame->presentation_frame_number == 0
+ && frame->abidata.ABI.num_subframes == 0);
+
+ encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
+
+ GST_LOG_OBJECT (encoder,
+ "finish subframe %u of frame fpn %u PTS %" GST_TIME_FORMAT ", DTS %"
+ GST_TIME_FORMAT " sync point: %d", frame->abidata.ABI.num_subframes,
+ frame->presentation_frame_number, GST_TIME_ARGS (frame->pts),
+ GST_TIME_ARGS (frame->dts), GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
+
+ GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
+
+ ret = gst_video_encoder_can_push_unlocked (encoder);
+ if (ret != GST_FLOW_OK)
+ goto done;
+
+ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit)
+ gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
+
+ /* Push pending events only for the first subframe ie segment event.
+ * Push new incoming events on finish_frame otherwise.
+ */
+ if (frame->abidata.ABI.num_subframes == 0)
+ gst_video_encoder_push_pending_unlocked (encoder, frame);
+
+ if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
+ && frame->abidata.ABI.num_subframes == 0) {
+ GST_BUFFER_FLAG_UNSET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
+ if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
+ frame->dts = frame->pts;
+ }
+ } else {
+ GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
}
+
+ gst_video_encoder_infer_dts_unlocked (encoder, frame);
+
+ GST_BUFFER_PTS (subframe_buffer) = frame->pts;
+ GST_BUFFER_DTS (subframe_buffer) = frame->dts;
+ GST_BUFFER_DURATION (subframe_buffer) = frame->duration;
+
+ GST_OBJECT_LOCK (encoder);
+ /* update rate estimate */
+ priv->bytes += gst_buffer_get_size (subframe_buffer);
+ GST_OBJECT_UNLOCK (encoder);
+
+ if (G_UNLIKELY (send_headers))
+ priv->new_headers = TRUE;
+
+ gst_video_encoder_send_header_unlocked (encoder, &discont);
+
+ if (G_UNLIKELY (discont)) {
+ GST_LOG_OBJECT (encoder, "marking discont buffer: %" GST_PTR_FORMAT,
+ subframe_buffer);
+ GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ if (encoder_class->pre_push) {
+ ret = encoder_class->pre_push (encoder, frame);
+ }
+
+ gst_video_encoder_transform_meta_unlocked (encoder, frame);
+
+ if (ret == GST_FLOW_OK) {
+ ret = gst_pad_push (encoder->srcpad, subframe_buffer);
+ subframe_buffer = NULL;
+ }
+
+done:
+ frame->abidata.ABI.num_subframes++;
+ if (subframe_buffer)
+ gst_buffer_unref (subframe_buffer);
+ frame->output_buffer = NULL;
+
+ GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
+
+ return ret;
}
/**
#define TEST_VIDEO_FPS_D 1
#define GST_VIDEO_ENCODER_TESTER_TYPE gst_video_encoder_tester_get_type()
+#define GST_VIDEO_ENCODER_TESTER(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_VIDEO_ENCODER_TESTER_TYPE, GstVideoEncoderTester))
static GType gst_video_encoder_tester_get_type (void);
typedef struct _GstVideoEncoderTester GstVideoEncoderTester;
GstVideoEncoder parent;
GstFlowReturn pre_push_result;
+ gint num_subframes;
+ gint current_subframe;
+ gboolean send_headers;
+ gboolean key_frame_sent;
+ gboolean enable_step_by_step;
+ GstVideoCodecFrame *last_frame;
};
struct _GstVideoEncoderTesterClass
{
+ GstFlowReturn (*step_by_step) (GstVideoEncoder * encoder,
+ GstVideoCodecFrame * frame, int steps);
GstVideoEncoderClass parent_class;
};
}
static GstFlowReturn
-gst_video_encoder_tester_handle_frame (GstVideoEncoder * enc,
- GstVideoCodecFrame * frame)
+gst_video_encoder_push_subframe (GstVideoEncoder * enc,
+ GstVideoCodecFrame * frame, int current_subframe)
{
guint8 *data;
GstMapInfo map;
guint64 input_num;
- GstClockTimeDiff deadline;
-
- deadline = gst_video_encoder_get_max_encode_time (enc, frame);
- if (deadline < 0) {
- /* Calling finish_frame() with frame->output_buffer == NULL means to drop it */
- goto out;
+ GstVideoEncoderTester *enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
+
+ if (enc_tester->send_headers) {
+ GstBuffer *hdr;
+ GList *headers = NULL;
+ hdr = gst_buffer_new_and_alloc (0);
+ GST_BUFFER_FLAG_SET (hdr, GST_BUFFER_FLAG_HEADER);
+ headers = g_list_append (headers, hdr);
+ gst_video_encoder_set_headers (enc, headers);
+ enc_tester->send_headers = FALSE;
}
gst_buffer_map (frame->input_buffer, &map, GST_MAP_READ);
input_num = *((guint64 *) map.data);
gst_buffer_unmap (frame->input_buffer, &map);
+ if (!enc_tester->key_frame_sent) {
+ GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
+ enc_tester->key_frame_sent = TRUE;
+ }
+
data = g_malloc (sizeof (guint64));
*(guint64 *) data = input_num;
-
frame->output_buffer = gst_buffer_new_wrapped (data, sizeof (guint64));
frame->pts = GST_BUFFER_PTS (frame->input_buffer);
frame->duration = GST_BUFFER_DURATION (frame->input_buffer);
-out:
- return gst_video_encoder_finish_frame (enc, frame);
+ if (current_subframe < enc_tester->num_subframes - 1)
+ return gst_video_encoder_finish_subframe (enc, frame);
+ else
+ return gst_video_encoder_finish_frame (enc, frame);
+}
+
+static GstFlowReturn
+gst_video_encoder_tester_output_step_by_step (GstVideoEncoder * enc,
+ GstVideoCodecFrame * frame, gint steps)
+{
+ GstVideoEncoderTester *enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
+ GstFlowReturn ret = GST_FLOW_OK;
+ int i;
+ for (i = enc_tester->current_subframe;
+ i < MIN (steps + enc_tester->current_subframe, enc_tester->num_subframes);
+ i++) {
+ ret = gst_video_encoder_push_subframe (enc, frame, i);
+ }
+ enc_tester->current_subframe = i;
+ if (enc_tester->current_subframe >= enc_tester->num_subframes) {
+ enc_tester->current_subframe = 0;
+ gst_video_codec_frame_unref (enc_tester->last_frame);
+ }
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_video_encoder_tester_handle_frame (GstVideoEncoder * enc,
+ GstVideoCodecFrame * frame)
+{
+ GstClockTimeDiff deadline;
+ GstVideoEncoderTester *enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
+
+ deadline = gst_video_encoder_get_max_encode_time (enc, frame);
+ if (deadline < 0) {
+ /* Calling finish_frame() with frame->output_buffer == NULL means to drop it */
+ return gst_video_encoder_finish_frame (enc, frame);
+ }
+
+ enc_tester->last_frame = gst_video_codec_frame_ref (frame);
+ if (enc_tester->enable_step_by_step)
+ return GST_FLOW_OK;
+
+ return gst_video_encoder_tester_output_step_by_step (enc, frame,
+ enc_tester->num_subframes);
}
static GstFlowReturn
GstVideoCodecFrame * frame)
{
GstVideoEncoderTester *tester = (GstVideoEncoderTester *) enc;
-
return tester->pre_push_result;
}
videoencoder_class->handle_frame = gst_video_encoder_tester_handle_frame;
videoencoder_class->pre_push = gst_video_encoder_tester_pre_push;
videoencoder_class->set_format = gst_video_encoder_tester_set_format;
+
}
static void
gst_video_encoder_tester_init (GstVideoEncoderTester * tester)
{
tester->pre_push_result = GST_FLOW_OK;
+ /* One subframe is considered as a whole single frame. */
+ tester->num_subframes = 1;
}
static gboolean
}
static void
+setup_videoencodertester_with_subframes (int num_subframes)
+{
+ GstVideoEncoderTester *enc_tester;
+ setup_videoencodertester ();
+ enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
+ enc_tester->num_subframes = num_subframes;
+ enc_tester->send_headers = TRUE;
+}
+
+static void
cleanup_videoencodertest (void)
{
gst_pad_set_active (mysrcpad, FALSE);
{
GstVideoEncoderTester *tester;
GstHarness *h;
+ GstFlowReturn ret;
tester = g_object_new (GST_VIDEO_ENCODER_TESTER_TYPE, NULL);
tester->pre_push_result = GST_FLOW_ERROR;
h = gst_harness_new_with_element (GST_ELEMENT (tester), "sink", "src");
gst_harness_set_src_caps (h, create_test_caps ());
- fail_unless_equals_int (gst_harness_push (h, create_test_buffer (0)),
- GST_FLOW_ERROR);
+ ret = gst_harness_push (h, create_test_buffer (0));
+ fail_unless_equals_int (ret, GST_FLOW_ERROR);
gst_harness_teardown (h);
gst_object_unref (tester);
GST_END_TEST;
+#define NUM_BUFFERS 100
+GST_START_TEST (videoencoder_playback_subframes)
+{
+ GstSegment segment;
+ GstBuffer *buffer;
+ guint64 i;
+ GList *iter;
+ int subframes = 4;
+
+ setup_videoencodertester_with_subframes (subframes);
+
+ gst_pad_set_active (mysrcpad, TRUE);
+ gst_element_set_state (enc, GST_STATE_PLAYING);
+ gst_pad_set_active (mysinkpad, TRUE);
+
+ send_startup_events ();
+
+ /* push a new segment */
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment)));
+
+ /* push buffers, the data is actually a number so we can track them */
+ for (i = 0; i < NUM_BUFFERS; i++) {
+ buffer = create_test_buffer (i);
+
+ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
+ }
+
+ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()));
+
+ /* check that all buffers (plus one header buffer) were received by our source pad */
+ fail_unless (g_list_length (buffers) == NUM_BUFFERS * subframes + 1);
+ /* check that first buffer is an header */
+ buffer = buffers->data;
+ fail_unless (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER));
+ /* check the other buffers */
+ i = 0;
+ for (iter = g_list_next (buffers); iter; iter = g_list_next (iter)) {
+ /* first buffer should be the header */
+ GstMapInfo map;
+ guint64 num;
+ buffer = iter->data;
+ fail_unless (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER));
+ gst_buffer_map (buffer, &map, GST_MAP_READ);
+
+ num = *(guint64 *) map.data;
+ fail_unless (i / subframes == num);
+
+ if (i % subframes)
+ fail_unless (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT));
+
+ fail_unless (GST_BUFFER_PTS (buffer) ==
+ gst_util_uint64_scale_round (i / subframes,
+ GST_SECOND * TEST_VIDEO_FPS_D, TEST_VIDEO_FPS_N));
+ fail_unless (GST_BUFFER_DURATION (buffer) ==
+ gst_util_uint64_scale_round (GST_SECOND, TEST_VIDEO_FPS_D,
+ TEST_VIDEO_FPS_N));
+ gst_buffer_unmap (buffer, &map);
+
+
+ i++;
+ }
+
+ g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref);
+ buffers = NULL;
+
+ cleanup_videoencodertest ();
+}
+
+GST_END_TEST;
+
+GST_START_TEST (videoencoder_playback_events_subframes)
+{
+ GstSegment segment;
+ GstBuffer *buffer;
+ GList *iter;
+ gint subframes = 4;
+ gint i, header_found;
+ GstVideoEncoderTester *enc_tester;
+
+ setup_videoencodertester_with_subframes (subframes);
+
+ enc_tester = GST_VIDEO_ENCODER_TESTER (enc);
+ enc_tester->send_headers = TRUE;
+ enc_tester->enable_step_by_step = TRUE;
+
+ gst_pad_set_active (mysrcpad, TRUE);
+ gst_element_set_state (enc, GST_STATE_PLAYING);
+ gst_pad_set_active (mysinkpad, TRUE);
+
+ send_startup_events ();
+
+ /* push a new segment -> no new buffer and no new events (still pending two custom events) */
+ gst_segment_init (&segment, GST_FORMAT_TIME);
+ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_segment (&segment)));
+ fail_unless (g_list_length (buffers) == 0 && g_list_length (events) == 0);
+
+ /* push a first buffer -> no new buffer and no new events (still pending two custom events) */
+ buffer = create_test_buffer (0);
+ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
+ fail_unless (g_list_length (buffers) == 0 && g_list_length (events) == 0);
+
+ /* ouput only one subframe -> 2 buffers(header + subframe) and 3 events (stream-start, caps, segment) */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 1);
+ fail_unless (g_list_length (buffers) == 2 && g_list_length (events) == 3);
+ fail_unless (GST_BUFFER_FLAG_IS_SET ((GstBuffer *) buffers->data,
+ GST_BUFFER_FLAG_HEADER));
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 0)->data)) == GST_EVENT_STREAM_START);
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 1)->data)) == GST_EVENT_CAPS);
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 2)->data)) == GST_EVENT_SEGMENT);
+
+ /* output 3 last subframes -> 2 more buffers and no new events */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 3);
+ fail_unless (g_list_length (buffers) == 5 && g_list_length (events) == 3);
+
+ /* push a new buffer -> no new buffer and no new events */
+ buffer = create_test_buffer (1);
+ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
+ fail_unless (g_list_length (buffers) == 5 && g_list_length (events) == 3);
+
+ /* push an event in between -> no new buffer and no new event */
+ fail_unless (gst_pad_push_event (mysrcpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new_empty ("custom1"))));
+ fail_unless (g_list_length (buffers) == 5 && g_list_length (events) == 3);
+
+ /* output 1 subframe -> one new buffer and no new events */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 1);
+ fail_unless (g_list_length (buffers) == 6 && g_list_length (events) == 3);
+
+ /* push another custom event in between , no new event should appear until the next frame is handled */
+ fail_unless (gst_pad_push_event (mysrcpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new_empty ("custom2"))));
+ fail_unless (g_list_length (buffers) == 6 && g_list_length (events) == 3);
+
+ /* output 2 subframes -> 2 new buffers and no new events */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 2);
+ fail_unless (g_list_length (buffers) == 8 && g_list_length (events) == 3);
+
+ /* output 1 last subframe -> 1 new buffers and no new events */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 1);
+ fail_unless (g_list_length (buffers) == 9 && g_list_length (events) == 3);
+
+ /* push a third buffer -> no new buffer and no new events (still pending two custom events) */
+ buffer = create_test_buffer (2);
+ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
+ fail_unless (g_list_length (buffers) == 9 && g_list_length (events) == 3);
+
+ /* output 1 subframes -> 1 new buffer and 2 custom events from the last input frame */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 1);
+ fail_unless (g_list_length (buffers) == 10 && g_list_length (events) == 5);
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 3)->data)) == GST_EVENT_CUSTOM_DOWNSTREAM);
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 4)->data)) == GST_EVENT_CUSTOM_DOWNSTREAM);
+
+ /* push another custom event in between , no new event should appear until eos */
+ fail_unless (gst_pad_push_event (mysrcpad,
+ gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
+ gst_structure_new_empty ("custom3"))));
+ fail_unless (g_list_length (buffers) == 10 && g_list_length (events) == 5);
+
+ /* output 3 subframes -> 3 new buffer and no new events */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 3);
+ fail_unless (g_list_length (buffers) == 13 && g_list_length (events) == 5);
+
+ /* push a force key-unit event */
+ enc_tester->key_frame_sent = FALSE;
+ fail_unless (gst_pad_push_event (mysrcpad,
+ gst_video_event_new_downstream_force_key_unit (GST_CLOCK_TIME_NONE,
+ GST_CLOCK_TIME_NONE, GST_CLOCK_TIME_NONE, TRUE, 1)));
+
+ /* Create a new buffer which should be a key unit -> no new buffer and no new event */
+ buffer = create_test_buffer (3);
+ fail_unless (gst_pad_push (mysrcpad, buffer) == GST_FLOW_OK);
+ fail_unless (g_list_length (buffers) == 13 && g_list_length (events) == 5);
+
+ /* output 2 subframes -> 3 new buffer(one header and two subframes and two events key-unit and custom3 */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 2);
+ fail_unless (g_list_length (buffers) == 16 && g_list_length (events) == 7);
+
+ /* output 2 subframes -> 2 new buffer correspong the two last subframes */
+ gst_video_encoder_tester_output_step_by_step (GST_VIDEO_ENCODER (enc),
+ enc_tester->last_frame, 2);
+ fail_unless (g_list_length (buffers) == 18 && g_list_length (events) == 7);
+
+ /* push eos event -> 1 new event ( eos) */
+ fail_unless (gst_pad_push_event (mysrcpad, gst_event_new_eos ()));
+ fail_unless (g_list_length (buffers) == 18 && g_list_length (events) == 8);
+
+ /* check the order of the last events received */
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 6)->data)) == GST_EVENT_CUSTOM_DOWNSTREAM);
+ fail_unless (GST_EVENT_TYPE ((GstEvent *) (g_list_nth (events,
+ 7)->data)) == GST_EVENT_EOS);
+
+ /* check that only last subframe owns the GST_VIDEO_BUFFER_FLAG_MARKER flag */
+ i = 0;
+ header_found = 0;
+ for (iter = g_list_next (buffers); iter; iter = g_list_next (iter)) {
+ buffer = (GstBuffer *) (iter->data);
+ if (!GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_HEADER)) {
+ if ((i - header_found) % subframes == (subframes - 1))
+ fail_unless (GST_BUFFER_FLAG_IS_SET (buffer,
+ GST_VIDEO_BUFFER_FLAG_MARKER));
+ else
+ fail_unless (!GST_BUFFER_FLAG_IS_SET (buffer,
+ GST_VIDEO_BUFFER_FLAG_MARKER));
+ } else {
+ fail_unless (!GST_BUFFER_FLAG_IS_SET (buffer,
+ GST_VIDEO_BUFFER_FLAG_MARKER));
+ header_found++;
+ }
+ i++;
+ }
+
+ g_list_free_full (buffers, (GDestroyNotify) gst_buffer_unref);
+ buffers = NULL;
+
+ cleanup_videoencodertest ();
+}
+
+GST_END_TEST;
+
static Suite *
gst_videoencoder_suite (void)
{
tcase_add_test (tc, videoencoder_flush_events);
tcase_add_test (tc, videoencoder_pre_push_fails);
tcase_add_test (tc, videoencoder_qos);
+ tcase_add_test (tc, videoencoder_playback_subframes);
+ tcase_add_test (tc, videoencoder_playback_events_subframes);
return s;
}