2 * Copyright (C) 2008 David Schleef <ds@schleef.org>
3 * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4 * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5 * Contact: Stefan Kost <stefan.kost@nokia.com>
6 * Copyright (C) 2012 Collabora Ltd.
7 * Author : Edward Hervey <edward@collabora.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
22 * Boston, MA 02111-1307, USA.
26 * SECTION:gstvideoencoder
27 * @short_description: Base class for video encoders
30 * This base class is for video encoders turning raw video into
33 * GstVideoEncoder and subclass should cooperate as follows.
36 * <itemizedlist><title>Configuration</title>
38 * Initially, GstVideoEncoder calls @start when the encoder element
39 * is activated, which allows subclass to perform any global setup.
42 * GstVideoEncoder calls @set_format to inform subclass of the format
43 * of input video data that it is about to receive. Subclass should
44 * setup for encoding and configure base class as appropriate
45 * (e.g. latency). While unlikely, it might be called more than once,
46 * if changing input parameters require reconfiguration. Baseclass
47 * will ensure that processing of current configuration is finished.
50 * GstVideoEncoder calls @stop at end of all processing.
56 * <title>Data processing</title>
58 * Base class collects input data and metadata into a frame and hands
59 * this to subclass' @handle_frame.
62 * If codec processing results in encoded data, subclass should call
63 * @gst_video_encoder_finish_frame to have encoded data pushed
67 * If implemented, baseclass calls subclass @pre_push just prior to
68 * pushing to allow subclasses to modify some metadata on the buffer.
69 * If it returns GST_FLOW_OK, the buffer is pushed downstream.
72 * GstVideoEncoderClass will handle both srcpad and sinkpad events.
73 * Sink events will be passed to subclass if @event callback has been
79 * <itemizedlist><title>Shutdown phase</title>
81 * GstVideoEncoder class calls @stop to inform the subclass that data
82 * parsing will be stopped.
88 * Subclass is responsible for providing pad template caps for
89 * source and sink pads. The pads need to be named "sink" and "src". It should
90 * also be able to provide fixed src pad caps in @getcaps by the time it calls
91 * @gst_video_encoder_finish_frame.
93 * Things that subclass need to take care of:
95 * <listitem><para>Provide pad templates</para></listitem>
97 * Provide source pad caps before pushing the first buffer
100 * Accept data in @handle_frame and provide encoded results to
101 * @gst_video_encoder_finish_frame.
113 * * Change _set_output_format() to steal the reference of the provided caps
114 * * Calculate actual latency based on input/output timestamp/frame_number
115 * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
118 /* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
119 * with newer GLib versions (>= 2.31.0) */
120 #define GLIB_DISABLE_DEPRECATION_WARNINGS
122 #include "gstvideoencoder.h"
123 #include "gstvideoutils.h"
125 #include <gst/video/gstvideometa.h>
129 GST_DEBUG_CATEGORY (videoencoder_debug);
130 #define GST_CAT_DEFAULT videoencoder_debug
132 #define GST_VIDEO_ENCODER_GET_PRIVATE(obj) \
133 (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VIDEO_ENCODER, \
134 GstVideoEncoderPrivate))
136 struct _GstVideoEncoderPrivate
138 guint64 presentation_frame_number;
139 int distance_from_sync;
141 /* FIXME : (and introduce a context ?) */
148 GList *current_frame_events;
151 gboolean new_headers; /* Whether new headers were just set */
153 GList *force_key_unit; /* List of pending forced keyunits */
155 guint64 system_frame_number;
157 GList *frames; /* Protected with OBJECT_LOCK */
158 GstVideoCodecState *input_state;
159 GstVideoCodecState *output_state;
160 gboolean output_state_changed;
166 typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
167 struct _ForcedKeyUnitEvent
169 GstClockTime running_time;
170 gboolean pending; /* TRUE if this was requested already */
171 gboolean all_headers;
176 forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
178 g_slice_free (ForcedKeyUnitEvent, evt);
181 static ForcedKeyUnitEvent *
182 forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
185 ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
187 evt->running_time = running_time;
188 evt->all_headers = all_headers;
194 static GstElementClass *parent_class = NULL;
195 static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
196 static void gst_video_encoder_init (GstVideoEncoder * enc,
197 GstVideoEncoderClass * klass);
199 static void gst_video_encoder_finalize (GObject * object);
201 static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
203 static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
205 static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
207 static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
209 static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
211 static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
212 element, GstStateChange transition);
213 static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
215 static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
217 static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
218 encoder, GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
220 static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
222 static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
224 static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
225 encoder, GstQuery * query);
227 /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
228 * method to get to the padtemplates */
230 gst_video_encoder_get_type (void)
232 static volatile gsize type = 0;
234 if (g_once_init_enter (&type)) {
236 static const GTypeInfo info = {
237 sizeof (GstVideoEncoderClass),
240 (GClassInitFunc) gst_video_encoder_class_init,
243 sizeof (GstVideoEncoder),
245 (GInstanceInitFunc) gst_video_encoder_init,
247 const GInterfaceInfo preset_interface_info = {
248 NULL, /* interface_init */
249 NULL, /* interface_finalize */
250 NULL /* interface_data */
253 _type = g_type_register_static (GST_TYPE_ELEMENT,
254 "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
255 g_type_add_interface_static (_type, GST_TYPE_PRESET,
256 &preset_interface_info);
257 g_once_init_leave (&type, _type);
263 gst_video_encoder_class_init (GstVideoEncoderClass * klass)
265 GObjectClass *gobject_class;
266 GstElementClass *gstelement_class;
268 gobject_class = G_OBJECT_CLASS (klass);
269 gstelement_class = GST_ELEMENT_CLASS (klass);
271 GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
272 "Base Video Encoder");
274 parent_class = g_type_class_peek_parent (klass);
276 g_type_class_add_private (klass, sizeof (GstVideoEncoderPrivate));
278 gobject_class->finalize = gst_video_encoder_finalize;
280 gstelement_class->change_state =
281 GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
283 klass->sink_event = gst_video_encoder_sink_event_default;
284 klass->src_event = gst_video_encoder_src_event_default;
285 klass->propose_allocation = gst_video_encoder_propose_allocation_default;
289 gst_video_encoder_reset (GstVideoEncoder * encoder)
291 GstVideoEncoderPrivate *priv = encoder->priv;
294 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
296 priv->presentation_frame_number = 0;
297 priv->distance_from_sync = 0;
299 g_list_foreach (priv->force_key_unit, (GFunc) forced_key_unit_event_free,
301 g_list_free (priv->force_key_unit);
302 priv->force_key_unit = NULL;
304 priv->drained = TRUE;
305 priv->min_latency = 0;
306 priv->max_latency = 0;
308 g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
309 g_list_free (priv->headers);
310 priv->headers = NULL;
311 priv->new_headers = FALSE;
313 g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
314 g_list_free (priv->current_frame_events);
315 priv->current_frame_events = NULL;
317 for (g = priv->frames; g; g = g->next) {
318 gst_video_codec_frame_unref ((GstVideoCodecFrame *) g->data);
320 g_list_free (priv->frames);
326 if (priv->input_state)
327 gst_video_codec_state_unref (priv->input_state);
328 priv->input_state = NULL;
329 if (priv->output_state)
330 gst_video_codec_state_unref (priv->output_state);
331 priv->output_state = NULL;
333 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
337 gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
339 GstVideoEncoderPrivate *priv;
340 GstPadTemplate *pad_template;
343 GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
345 priv = encoder->priv = GST_VIDEO_ENCODER_GET_PRIVATE (encoder);
348 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
349 g_return_if_fail (pad_template != NULL);
351 encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
353 gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
354 gst_pad_set_event_function (pad,
355 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
356 gst_pad_set_query_function (pad,
357 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
358 gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
361 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
362 g_return_if_fail (pad_template != NULL);
364 encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
366 gst_pad_set_query_function (pad,
367 GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
368 gst_pad_set_event_function (pad,
369 GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
370 gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
372 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
373 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
375 g_rec_mutex_init (&encoder->stream_lock);
377 priv->at_eos = FALSE;
378 priv->headers = NULL;
379 priv->new_headers = FALSE;
381 gst_video_encoder_reset (encoder);
385 gst_video_encoded_video_convert (gint64 bytes, gint64 time,
386 GstFormat src_format, gint64 src_value, GstFormat * dest_format,
389 gboolean res = FALSE;
391 g_return_val_if_fail (dest_format != NULL, FALSE);
392 g_return_val_if_fail (dest_value != NULL, FALSE);
394 if (G_UNLIKELY (src_format == *dest_format || src_value == 0 ||
397 *dest_value = src_value;
401 if (bytes <= 0 || time <= 0) {
402 GST_DEBUG ("not enough metadata yet to convert");
406 switch (src_format) {
407 case GST_FORMAT_BYTES:
408 switch (*dest_format) {
409 case GST_FORMAT_TIME:
410 *dest_value = gst_util_uint64_scale (src_value, time, bytes);
417 case GST_FORMAT_TIME:
418 switch (*dest_format) {
419 case GST_FORMAT_BYTES:
420 *dest_value = gst_util_uint64_scale (src_value, bytes, time);
428 GST_DEBUG ("unhandled conversion from %d to %d", src_format,
438 * gst_video_encoder_set_headers:
439 * @encoder: a #GstVideoEncoder
440 * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
442 * Set the codec headers to be sent downstream whenever requested.
447 gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
449 GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
451 GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
452 if (video_encoder->priv->headers) {
453 g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
455 g_list_free (video_encoder->priv->headers);
457 video_encoder->priv->headers = headers;
458 video_encoder->priv->new_headers = TRUE;
460 GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
464 gst_video_encoder_drain (GstVideoEncoder * enc)
466 GstVideoEncoderPrivate *priv;
467 GstVideoEncoderClass *enc_class;
470 enc_class = GST_VIDEO_ENCODER_GET_CLASS (enc);
473 GST_DEBUG_OBJECT (enc, "draining");
476 GST_DEBUG_OBJECT (enc, "already drained");
480 if (enc_class->reset) {
481 GST_DEBUG_OBJECT (enc, "requesting subclass to finish");
482 ret = enc_class->reset (enc, TRUE);
484 /* everything should be away now */
486 /* not fatal/impossible though if subclass/enc eats stuff */
487 g_list_foreach (priv->frames, (GFunc) gst_video_codec_frame_unref, NULL);
488 g_list_free (priv->frames);
495 static GstVideoCodecState *
496 _new_output_state (GstCaps * caps, GstVideoCodecState * reference)
498 GstVideoCodecState *state;
500 state = g_slice_new0 (GstVideoCodecState);
501 state->ref_count = 1;
502 gst_video_info_init (&state->info);
503 gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0);
508 GstVideoInfo *tgt, *ref;
511 ref = &reference->info;
513 /* Copy over extra fields from reference state */
514 tgt->interlace_mode = ref->interlace_mode;
515 tgt->flags = ref->flags;
516 tgt->width = ref->width;
517 tgt->height = ref->height;
518 tgt->chroma_site = ref->chroma_site;
519 tgt->colorimetry = ref->colorimetry;
520 tgt->par_n = ref->par_n;
521 tgt->par_d = ref->par_d;
522 tgt->fps_n = ref->fps_n;
523 tgt->fps_d = ref->fps_d;
529 static GstVideoCodecState *
530 _new_input_state (GstCaps * caps)
532 GstVideoCodecState *state;
534 state = g_slice_new0 (GstVideoCodecState);
535 state->ref_count = 1;
536 gst_video_info_init (&state->info);
537 if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
539 state->caps = gst_caps_ref (caps);
545 g_slice_free (GstVideoCodecState, state);
551 gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
553 GstVideoEncoderClass *encoder_class;
554 GstVideoCodecState *state;
556 gboolean samecaps = FALSE;
558 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
560 /* subclass should do something here ... */
561 g_return_val_if_fail (encoder_class->set_format != NULL, FALSE);
563 GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
565 state = _new_input_state (caps);
566 if (G_UNLIKELY (!state))
569 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
571 if (encoder->priv->input_state)
573 gst_video_info_is_equal (&state->info,
574 &encoder->priv->input_state->info);
577 /* arrange draining pending frames */
578 gst_video_encoder_drain (encoder);
580 /* and subclass should be ready to configure format at any time around */
581 ret = encoder_class->set_format (encoder, state);
583 if (encoder->priv->input_state)
584 gst_video_codec_state_unref (encoder->priv->input_state);
585 encoder->priv->input_state = state;
587 gst_video_codec_state_unref (state);
589 /* no need to stir things up */
590 GST_DEBUG_OBJECT (encoder,
591 "new video format identical to configured format");
592 gst_video_codec_state_unref (state);
596 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
599 GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
605 GST_WARNING_OBJECT (encoder, "Failed to parse caps");
611 * gst_video_encoder_proxy_getcaps:
612 * @enc: a #GstVideoEncoder
613 * @caps: initial caps
615 * Returns caps that express @caps (or sink template caps if @caps == NULL)
616 * restricted to resolution/format/... combinations supported by downstream
617 * elements (e.g. muxers).
619 * Returns: a #GstCaps owned by caller
624 gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
629 GstCaps *fcaps, *filter_caps;
632 /* Allow downstream to specify width/height/framerate/PAR constraints
633 * and forward them upstream for video converters to handle
636 caps ? gst_caps_ref (caps) :
637 gst_pad_get_pad_template_caps (encoder->sinkpad);
638 allowed = gst_pad_get_allowed_caps (encoder->srcpad);
640 if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) {
645 GST_LOG_OBJECT (encoder, "template caps %" GST_PTR_FORMAT, templ_caps);
646 GST_LOG_OBJECT (encoder, "allowed caps %" GST_PTR_FORMAT, allowed);
648 filter_caps = gst_caps_new_empty ();
650 for (i = 0; i < gst_caps_get_size (templ_caps); i++) {
652 gst_structure_get_name_id (gst_caps_get_structure (templ_caps, i));
654 for (j = 0; j < gst_caps_get_size (allowed); j++) {
655 const GstStructure *allowed_s = gst_caps_get_structure (allowed, j);
659 s = gst_structure_new_id_empty (q_name);
660 if ((val = gst_structure_get_value (allowed_s, "width")))
661 gst_structure_set_value (s, "width", val);
662 if ((val = gst_structure_get_value (allowed_s, "height")))
663 gst_structure_set_value (s, "height", val);
664 if ((val = gst_structure_get_value (allowed_s, "framerate")))
665 gst_structure_set_value (s, "framerate", val);
666 if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio")))
667 gst_structure_set_value (s, "pixel-aspect-ratio", val);
669 filter_caps = gst_caps_merge_structure (filter_caps, s);
673 fcaps = gst_caps_intersect (filter_caps, templ_caps);
674 gst_caps_unref (filter_caps);
675 gst_caps_unref (templ_caps);
678 GST_LOG_OBJECT (encoder, "intersecting with %" GST_PTR_FORMAT, filter);
679 filter_caps = gst_caps_intersect (fcaps, filter);
680 gst_caps_unref (fcaps);
685 gst_caps_replace (&allowed, NULL);
687 GST_LOG_OBJECT (encoder, "proxy caps %" GST_PTR_FORMAT, fcaps);
693 gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
695 GstVideoEncoderClass *klass;
698 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
701 caps = klass->getcaps (encoder, filter);
703 caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
705 GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
711 gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
718 gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
721 GstVideoEncoder *encoder;
722 gboolean res = FALSE;
724 encoder = GST_VIDEO_ENCODER (parent);
726 switch (GST_QUERY_TYPE (query)) {
729 GstCaps *filter, *caps;
731 gst_query_parse_caps (query, &filter);
732 caps = gst_video_encoder_sink_getcaps (encoder, filter);
733 gst_query_set_caps_result (query, caps);
734 gst_caps_unref (caps);
738 case GST_QUERY_ALLOCATION:
740 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
742 if (klass->propose_allocation)
743 res = klass->propose_allocation (encoder, query);
747 res = gst_pad_query_default (pad, parent, query);
754 gst_video_encoder_finalize (GObject * object)
756 GstVideoEncoder *encoder;
758 GST_DEBUG_OBJECT (object, "finalize");
760 encoder = GST_VIDEO_ENCODER (object);
761 if (encoder->priv->headers) {
762 g_list_foreach (encoder->priv->headers, (GFunc) gst_buffer_unref, NULL);
763 g_list_free (encoder->priv->headers);
765 g_rec_mutex_clear (&encoder->stream_lock);
767 G_OBJECT_CLASS (parent_class)->finalize (object);
771 gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
773 switch (GST_EVENT_TYPE (event)) {
774 case GST_EVENT_SEGMENT:
778 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
780 gst_event_copy_segment (event, &segment);
782 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
784 if (segment.format != GST_FORMAT_TIME) {
785 GST_DEBUG_OBJECT (encoder, "received non TIME segment");
786 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
790 encoder->output_segment = segment;
791 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
798 return gst_pad_push_event (encoder->srcpad, event);
802 gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
805 GstVideoEncoderClass *encoder_class;
806 gboolean ret = FALSE;
808 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
810 switch (GST_EVENT_TYPE (event)) {
815 gst_event_parse_caps (event, &caps);
816 ret = gst_video_encoder_setcaps (encoder, caps);
817 gst_event_unref (event);
823 GstFlowReturn flow_ret;
825 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
826 encoder->priv->at_eos = TRUE;
828 if (encoder_class->finish) {
829 flow_ret = encoder_class->finish (encoder);
831 flow_ret = GST_FLOW_OK;
834 ret = (flow_ret == GST_FLOW_OK);
835 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
838 case GST_EVENT_SEGMENT:
842 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
844 gst_event_copy_segment (event, &segment);
846 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
848 if (segment.format != GST_FORMAT_TIME) {
849 GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
850 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
854 encoder->priv->at_eos = FALSE;
856 encoder->input_segment = segment;
858 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
861 case GST_EVENT_CUSTOM_DOWNSTREAM:
863 if (gst_video_event_is_force_key_unit (event)) {
864 GstClockTime running_time;
865 gboolean all_headers;
868 if (gst_video_event_parse_downstream_force_key_unit (event,
869 NULL, NULL, &running_time, &all_headers, &count)) {
870 ForcedKeyUnitEvent *fevt;
872 GST_OBJECT_LOCK (encoder);
873 fevt = forced_key_unit_event_new (running_time, all_headers, count);
874 encoder->priv->force_key_unit =
875 g_list_append (encoder->priv->force_key_unit, fevt);
876 GST_OBJECT_UNLOCK (encoder);
878 GST_DEBUG_OBJECT (encoder,
879 "force-key-unit event: running-time %" GST_TIME_FORMAT
880 ", all_headers %d, count %u",
881 GST_TIME_ARGS (running_time), all_headers, count);
883 gst_event_unref (event);
893 /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
894 * For EOS this is required because no buffer or serialized event
895 * will come after EOS and nothing could trigger another
896 * _finish_frame() call. *
897 * If the subclass handles sending of EOS manually it can simply
898 * not chain up to the parent class' event handler
900 * For FLUSH_STOP this is required because it is expected
901 * to be forwarded immediately and no buffers are queued anyway.
904 if (!GST_EVENT_IS_SERIALIZED (event)
905 || GST_EVENT_TYPE (event) == GST_EVENT_EOS
906 || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
907 ret = gst_video_encoder_push_event (encoder, event);
909 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
910 encoder->priv->current_frame_events =
911 g_list_prepend (encoder->priv->current_frame_events, event);
912 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
921 gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
924 GstVideoEncoder *enc;
925 GstVideoEncoderClass *klass;
928 enc = GST_VIDEO_ENCODER (parent);
929 klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
931 GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
932 GST_EVENT_TYPE_NAME (event));
934 if (klass->sink_event)
935 ret = klass->sink_event (enc, event);
941 gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
944 gboolean ret = FALSE;
946 switch (GST_EVENT_TYPE (event)) {
947 case GST_EVENT_CUSTOM_UPSTREAM:
949 if (gst_video_event_is_force_key_unit (event)) {
950 GstClockTime running_time;
951 gboolean all_headers;
954 if (gst_video_event_parse_upstream_force_key_unit (event,
955 &running_time, &all_headers, &count)) {
956 ForcedKeyUnitEvent *fevt;
958 GST_OBJECT_LOCK (encoder);
959 fevt = forced_key_unit_event_new (running_time, all_headers, count);
960 encoder->priv->force_key_unit =
961 g_list_append (encoder->priv->force_key_unit, fevt);
962 GST_OBJECT_UNLOCK (encoder);
964 GST_DEBUG_OBJECT (encoder,
965 "force-key-unit event: running-time %" GST_TIME_FORMAT
966 ", all_headers %d, count %u",
967 GST_TIME_ARGS (running_time), all_headers, count);
969 gst_event_unref (event);
981 gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
988 gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
990 GstVideoEncoder *encoder;
991 GstVideoEncoderClass *klass;
992 gboolean ret = FALSE;
994 encoder = GST_VIDEO_ENCODER (parent);
995 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
997 GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
999 if (klass->src_event)
1000 ret = klass->src_event (encoder, event);
1006 gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
1008 GstVideoEncoderPrivate *priv;
1009 GstVideoEncoder *enc;
1012 enc = GST_VIDEO_ENCODER (parent);
1015 GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
1017 switch (GST_QUERY_TYPE (query)) {
1018 case GST_QUERY_CONVERT:
1020 GstFormat src_fmt, dest_fmt;
1021 gint64 src_val, dest_val;
1023 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
1025 gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
1026 src_val, &dest_fmt, &dest_val);
1029 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
1032 case GST_QUERY_LATENCY:
1035 GstClockTime min_latency, max_latency;
1037 res = gst_pad_peer_query (enc->sinkpad, query);
1039 gst_query_parse_latency (query, &live, &min_latency, &max_latency);
1040 GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
1041 GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
1042 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1044 GST_OBJECT_LOCK (enc);
1045 min_latency += priv->min_latency;
1046 if (enc->priv->max_latency == GST_CLOCK_TIME_NONE) {
1047 max_latency = GST_CLOCK_TIME_NONE;
1048 } else if (max_latency != GST_CLOCK_TIME_NONE) {
1049 max_latency += enc->priv->max_latency;
1051 GST_OBJECT_UNLOCK (enc);
1053 gst_query_set_latency (query, live, min_latency, max_latency);
1058 res = gst_pad_query_default (pad, parent, query);
1063 GST_DEBUG_OBJECT (enc, "query failed");
1067 static GstVideoCodecFrame *
1068 gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
1069 GstClockTime timestamp, GstClockTime duration)
1071 GstVideoEncoderPrivate *priv = encoder->priv;
1072 GstVideoCodecFrame *frame;
1074 frame = g_slice_new0 (GstVideoCodecFrame);
1076 frame->ref_count = 1;
1078 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1079 frame->system_frame_number = priv->system_frame_number;
1080 priv->system_frame_number++;
1082 frame->presentation_frame_number = priv->presentation_frame_number;
1083 priv->presentation_frame_number++;
1084 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1086 frame->events = priv->current_frame_events;
1087 priv->current_frame_events = NULL;
1088 frame->input_buffer = buf;
1089 frame->pts = timestamp;
1090 frame->duration = duration;
1096 static GstFlowReturn
1097 gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1099 GstVideoEncoder *encoder;
1100 GstVideoEncoderPrivate *priv;
1101 GstVideoEncoderClass *klass;
1102 GstVideoCodecFrame *frame;
1103 GstFlowReturn ret = GST_FLOW_OK;
1104 guint64 start, stop = GST_CLOCK_TIME_NONE, cstart, cstop;
1106 encoder = GST_VIDEO_ENCODER (parent);
1107 priv = encoder->priv;
1108 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1110 g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
1112 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1114 start = GST_BUFFER_TIMESTAMP (buf);
1115 if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
1116 stop = start + GST_BUFFER_DURATION (buf);
1118 GST_LOG_OBJECT (encoder,
1119 "received buffer of size %d with ts %" GST_TIME_FORMAT
1120 ", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf),
1121 GST_TIME_ARGS (start), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
1128 /* Drop buffers outside of segment */
1129 if (!gst_segment_clip (&encoder->output_segment,
1130 GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
1131 GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
1132 gst_buffer_unref (buf);
1136 frame = gst_video_encoder_new_frame (encoder, buf, cstart, cstop - cstart);
1138 GST_OBJECT_LOCK (encoder);
1139 if (priv->force_key_unit) {
1140 ForcedKeyUnitEvent *fevt = NULL;
1141 GstClockTime running_time;
1145 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1146 GST_BUFFER_TIMESTAMP (buf));
1148 for (l = priv->force_key_unit; l; l = l->next) {
1149 ForcedKeyUnitEvent *tmp = l->data;
1151 /* Skip pending keyunits */
1155 /* Simple case, keyunit ASAP */
1156 if (tmp->running_time == GST_CLOCK_TIME_NONE) {
1161 /* Event for before this frame */
1162 if (tmp->running_time <= running_time) {
1169 GST_DEBUG_OBJECT (encoder,
1170 "Forcing a key unit at running time %" GST_TIME_FORMAT,
1171 GST_TIME_ARGS (running_time));
1172 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
1173 if (fevt->all_headers)
1174 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
1175 fevt->pending = TRUE;
1178 GST_OBJECT_UNLOCK (encoder);
1180 priv->frames = g_list_append (priv->frames, frame);
1182 /* new data, more finish needed */
1183 priv->drained = FALSE;
1185 GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
1186 frame->presentation_frame_number);
1188 ret = klass->handle_frame (encoder, frame);
1191 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1196 static GstStateChangeReturn
1197 gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
1199 GstVideoEncoder *encoder;
1200 GstVideoEncoderClass *encoder_class;
1201 GstStateChangeReturn ret;
1203 encoder = GST_VIDEO_ENCODER (element);
1204 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
1206 switch (transition) {
1207 case GST_STATE_CHANGE_NULL_TO_READY:
1208 /* open device/library if needed */
1209 if (encoder_class->open && !encoder_class->open (encoder))
1212 case GST_STATE_CHANGE_READY_TO_PAUSED:
1213 /* Initialize device/library if needed */
1214 if (encoder_class->start && !encoder_class->start (encoder))
1221 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1223 switch (transition) {
1224 case GST_STATE_CHANGE_PAUSED_TO_READY:
1225 gst_video_encoder_reset (encoder);
1226 if (encoder_class->stop && !encoder_class->stop (encoder))
1229 case GST_STATE_CHANGE_READY_TO_NULL:
1230 /* close device/library if needed */
1231 if (encoder_class->close && !encoder_class->close (encoder))
1244 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1245 ("Failed to open encoder"));
1246 return GST_STATE_CHANGE_FAILURE;
1251 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1252 ("Failed to start encoder"));
1253 return GST_STATE_CHANGE_FAILURE;
1258 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1259 ("Failed to stop encoder"));
1260 return GST_STATE_CHANGE_FAILURE;
1265 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1266 ("Failed to close encoder"));
1267 return GST_STATE_CHANGE_FAILURE;
1272 gst_video_encoder_set_src_caps (GstVideoEncoder * encoder)
1275 GstVideoCodecState *state = encoder->priv->output_state;
1276 GstVideoInfo *info = &state->info;
1278 g_return_val_if_fail (state->caps != NULL, FALSE);
1280 if (encoder->priv->output_state_changed) {
1281 state->caps = gst_caps_make_writable (state->caps);
1284 gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
1285 "height", G_TYPE_INT, info->height,
1286 "pixel-aspect-ratio", GST_TYPE_FRACTION,
1287 info->par_n, info->par_d, NULL);
1288 if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
1289 /* variable fps with a max-framerate */
1290 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
1291 "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
1293 /* no variable fps or no max-framerate */
1294 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
1295 info->fps_n, info->fps_d, NULL);
1297 if (state->codec_data)
1298 gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
1299 state->codec_data, NULL);
1300 encoder->priv->output_state_changed = FALSE;
1303 ret = gst_pad_set_caps (encoder->srcpad, state->caps);
1309 * gst_video_encoder_finish_frame:
1310 * @encoder: a #GstVideoEncoder
1311 * @frame: (transfer full): an encoded #GstVideoCodecFrame
1313 * @frame must have a valid encoded data buffer, whose metadata fields
1314 * are then appropriately set according to frame data or no buffer at
1315 * all if the frame should be dropped.
1316 * It is subsequently pushed downstream or provided to @pre_push.
1317 * In any case, the frame is considered finished and released.
1319 * Returns: a #GstFlowReturn resulting from sending data downstream
1324 gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
1325 GstVideoCodecFrame * frame)
1327 GstVideoEncoderPrivate *priv = encoder->priv;
1328 GstFlowReturn ret = GST_FLOW_OK;
1329 GstVideoEncoderClass *encoder_class;
1331 gboolean send_headers = FALSE;
1332 gboolean discont = (frame->presentation_frame_number == 0);
1334 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1336 GST_LOG_OBJECT (encoder,
1337 "finish frame fpn %d", frame->presentation_frame_number);
1339 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1341 if (G_UNLIKELY (priv->output_state_changed))
1342 gst_video_encoder_set_src_caps (encoder);
1344 if (G_UNLIKELY (priv->output_state == NULL))
1345 goto no_output_state;
1347 /* Push all pending events that arrived before this frame */
1348 for (l = priv->frames; l; l = l->next) {
1349 GstVideoCodecFrame *tmp = l->data;
1354 for (k = g_list_last (tmp->events); k; k = k->prev)
1355 gst_video_encoder_push_event (encoder, k->data);
1356 g_list_free (tmp->events);
1364 /* no buffer data means this frame is skipped/dropped */
1365 if (!frame->output_buffer) {
1366 GST_DEBUG_OBJECT (encoder, "skipping frame %" GST_TIME_FORMAT,
1367 GST_TIME_ARGS (frame->pts));
1371 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) {
1372 GstClockTime stream_time, running_time;
1374 ForcedKeyUnitEvent *fevt = NULL;
1378 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1381 GST_OBJECT_LOCK (encoder);
1382 for (l = priv->force_key_unit; l; l = l->next) {
1383 ForcedKeyUnitEvent *tmp = l->data;
1385 /* Skip non-pending keyunits */
1389 /* Simple case, keyunit ASAP */
1390 if (tmp->running_time == GST_CLOCK_TIME_NONE) {
1395 /* Event for before this frame */
1396 if (tmp->running_time <= running_time) {
1403 priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
1405 GST_OBJECT_UNLOCK (encoder);
1409 gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
1412 ev = gst_video_event_new_downstream_force_key_unit
1413 (frame->pts, stream_time, running_time,
1414 fevt->all_headers, fevt->count);
1416 gst_video_encoder_push_event (encoder, ev);
1418 if (fevt->all_headers)
1419 send_headers = TRUE;
1421 GST_DEBUG_OBJECT (encoder,
1422 "Forced key unit: running-time %" GST_TIME_FORMAT
1423 ", all_headers %d, count %u",
1424 GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
1425 forced_key_unit_event_free (fevt);
1429 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
1430 priv->distance_from_sync = 0;
1431 GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
1432 /* For keyframes, DTS = PTS */
1433 frame->dts = frame->pts;
1435 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
1438 frame->distance_from_sync = priv->distance_from_sync;
1439 priv->distance_from_sync++;
1441 GST_BUFFER_TIMESTAMP (frame->output_buffer) = frame->pts;
1442 GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
1444 /* update rate estimate */
1445 priv->bytes += gst_buffer_get_size (frame->output_buffer);
1446 if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
1447 priv->time += frame->duration;
1449 /* better none than nothing valid */
1450 priv->time = GST_CLOCK_TIME_NONE;
1453 if (G_UNLIKELY (send_headers || priv->new_headers)) {
1454 GList *tmp, *copy = NULL;
1456 GST_DEBUG_OBJECT (encoder, "Sending headers");
1458 /* First make all buffers metadata-writable */
1459 for (tmp = priv->headers; tmp; tmp = tmp->next) {
1460 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
1462 copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
1464 g_list_free (priv->headers);
1465 priv->headers = copy;
1467 for (tmp = priv->headers; tmp; tmp = tmp->next) {
1468 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
1470 gst_buffer_ref (tmpbuf);
1471 priv->bytes += gst_buffer_get_size (tmpbuf);
1472 if (G_UNLIKELY (discont)) {
1473 GST_LOG_OBJECT (encoder, "marking discont");
1474 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
1478 gst_pad_push (encoder->srcpad, tmpbuf);
1480 priv->new_headers = FALSE;
1483 if (G_UNLIKELY (discont)) {
1484 GST_LOG_OBJECT (encoder, "marking discont");
1485 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
1488 if (encoder_class->pre_push)
1489 ret = encoder_class->pre_push (encoder, frame);
1491 if (ret == GST_FLOW_OK)
1492 ret = gst_pad_push (encoder->srcpad, frame->output_buffer);
1494 frame->output_buffer = NULL;
1498 priv->frames = g_list_remove (priv->frames, frame);
1500 gst_video_codec_frame_unref (frame);
1502 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1509 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1510 GST_ERROR_OBJECT (encoder, "Output state was not configured");
1511 return GST_FLOW_ERROR;
1516 * gst_video_encoder_get_output_state:
1517 * @encoder: a #GstVideoEncoder
1519 * Get the current #GstVideoCodecState
1521 * Returns: (transfer full): #GstVideoCodecState describing format of video data.
1525 GstVideoCodecState *
1526 gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
1528 GstVideoCodecState *state;
1530 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1531 state = gst_video_codec_state_ref (encoder->priv->output_state);
1532 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1538 * gst_video_encoder_set_output_state:
1539 * @encoder: a #GstVideoEncoder
1540 * @caps: (transfer full): the #GstCaps to use for the output
1541 * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
1543 * Creates a new #GstVideoCodecState with the specified caps as the output state
1545 * Any previously set output state on @decoder will be replaced by the newly
1548 * The specified @caps should not contain any resolution, pixel-aspect-ratio,
1549 * framerate, codec-data, .... Those should be specified instead in the returned
1550 * #GstVideoCodecState.
1552 * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
1553 * or framerate) from an existing #GstVideoCodecState, it can be provided as a
1556 * If the subclass wishes to override some fields from the output state (like
1557 * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
1559 * The new output state will only take effect (set on pads and buffers) starting
1560 * from the next call to #gst_video_encoder_finish_frame().
1562 * Returns: (transfer full): the newly configured output state.
1566 GstVideoCodecState *
1567 gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
1568 GstVideoCodecState * reference)
1570 GstVideoEncoderPrivate *priv = encoder->priv;
1571 GstVideoCodecState *state;
1573 g_return_val_if_fail (caps != NULL, NULL);
1575 state = _new_output_state (caps, reference);
1577 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1578 if (priv->output_state)
1579 gst_video_codec_state_unref (priv->output_state);
1580 priv->output_state = gst_video_codec_state_ref (state);
1582 priv->output_state_changed = TRUE;
1583 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1589 * gst_video_encoder_set_latency:
1590 * @encoder: a #GstVideoEncoder
1591 * @min_latency: minimum latency
1592 * @max_latency: maximum latency
1594 * Informs baseclass of encoding latency.
1599 gst_video_encoder_set_latency (GstVideoEncoder * encoder,
1600 GstClockTime min_latency, GstClockTime max_latency)
1602 g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
1603 g_return_if_fail (max_latency >= min_latency);
1605 GST_OBJECT_LOCK (encoder);
1606 encoder->priv->min_latency = min_latency;
1607 encoder->priv->max_latency = max_latency;
1608 GST_OBJECT_UNLOCK (encoder);
1610 gst_element_post_message (GST_ELEMENT_CAST (encoder),
1611 gst_message_new_latency (GST_OBJECT_CAST (encoder)));
1615 * gst_video_encoder_get_latency:
1616 * @encoder: a #GstVideoEncoder
1617 * @min_latency: (out) (allow-none): the configured minimum latency
1618 * @max_latency: (out) (allow-none): the configured maximum latency
1620 * Returns the configured encoding latency.
1625 gst_video_encoder_get_latency (GstVideoEncoder * encoder,
1626 GstClockTime * min_latency, GstClockTime * max_latency)
1628 GST_OBJECT_LOCK (encoder);
1630 *min_latency = encoder->priv->min_latency;
1632 *max_latency = encoder->priv->max_latency;
1633 GST_OBJECT_UNLOCK (encoder);
1637 * gst_video_encoder_get_oldest_frame:
1638 * @encoder: a #GstVideoEncoder
1640 * Get the oldest unfinished pending #GstVideoCodecFrame
1642 * Returns: oldest unfinished pending #GstVideoCodecFrame
1646 GstVideoCodecFrame *
1647 gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
1651 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1652 g = encoder->priv->frames;
1653 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1657 return (GstVideoCodecFrame *) (g->data);
1661 * gst_video_encoder_get_frame:
1662 * @encoder: a #GstVideoEnccoder
1663 * @frame_number: system_frame_number of a frame
1665 * Get a pending unfinished #GstVideoCodecFrame
1667 * Returns: (transfer none): pending unfinished #GstVideoCodecFrame identified by @frame_number.
1671 GstVideoCodecFrame *
1672 gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
1675 GstVideoCodecFrame *frame = NULL;
1677 GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
1679 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1680 for (g = encoder->priv->frames; g; g = g->next) {
1681 GstVideoCodecFrame *tmp = g->data;
1683 if (tmp->system_frame_number == frame_number) {
1688 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);