2 * Copyright (C) 2008 David Schleef <ds@schleef.org>
3 * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4 * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5 * Contact: Stefan Kost <stefan.kost@nokia.com>
6 * Copyright (C) 2012 Collabora Ltd.
7 * Author : Edward Hervey <edward@collabora.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
26 * SECTION:gstvideoencoder
27 * @title: GstVideoEncoder
28 * @short_description: Base class for video encoders
30 * This base class is for video encoders turning raw video into
33 * GstVideoEncoder and subclass should cooperate as follows.
37 * * Initially, GstVideoEncoder calls @start when the encoder element
38 * is activated, which allows subclass to perform any global setup.
39 * * GstVideoEncoder calls @set_format to inform subclass of the format
40 * of input video data that it is about to receive. Subclass should
41 * setup for encoding and configure base class as appropriate
42 * (e.g. latency). While unlikely, it might be called more than once,
43 * if changing input parameters require reconfiguration. Baseclass
44 * will ensure that processing of current configuration is finished.
45 * * GstVideoEncoder calls @stop at end of all processing.
49 * * Base class collects input data and metadata into a frame and hands
50 * this to subclass' @handle_frame.
52 * * If codec processing results in encoded data, subclass should call
53 * @gst_video_encoder_finish_frame to have encoded data pushed
56 * * If implemented, baseclass calls subclass @pre_push just prior to
57 * pushing to allow subclasses to modify some metadata on the buffer.
58 * If it returns GST_FLOW_OK, the buffer is pushed downstream.
60 * * GstVideoEncoderClass will handle both srcpad and sinkpad events.
61 * Sink events will be passed to subclass if @event callback has been
66 * * GstVideoEncoder class calls @stop to inform the subclass that data
67 * parsing will be stopped.
69 * Subclass is responsible for providing pad template caps for
70 * source and sink pads. The pads need to be named "sink" and "src". It should
71 * also be able to provide fixed src pad caps in @getcaps by the time it calls
72 * @gst_video_encoder_finish_frame.
74 * Things that subclass need to take care of:
76 * * Provide pad templates
77 * * Provide source pad caps before pushing the first buffer
78 * * Accept data in @handle_frame and provide encoded results to
79 * @gst_video_encoder_finish_frame.
82 * The #GstVideoEncoder:qos property will enable the Quality-of-Service
83 * features of the encoder which gather statistics about the real-time
84 * performance of the downstream elements. If enabled, subclasses can
85 * use gst_video_encoder_get_max_encode_time() to check if input frames
86 * are already late and drop them right away to give a chance to the
87 * pipeline to catch up.
96 * * Calculate actual latency based on input/output timestamp/frame_number
97 * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
100 #include <gst/video/video.h>
101 #include "gstvideoencoder.h"
102 #include "gstvideoutils.h"
103 #include "gstvideoutilsprivate.h"
105 #include <gst/video/gstvideometa.h>
106 #include <gst/video/gstvideopool.h>
110 GST_DEBUG_CATEGORY (videoencoder_debug);
111 #define GST_CAT_DEFAULT videoencoder_debug
115 #define DEFAULT_QOS FALSE
116 #define DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL 0
122 PROP_MIN_FORCE_KEY_UNIT_INTERVAL,
126 struct _GstVideoEncoderPrivate
128 guint64 presentation_frame_number;
129 int distance_from_sync;
131 /* FIXME : (and introduce a context ?) */
137 /* FIXME 2.0: Use a GQueue or similar, see GstVideoCodecFrame::events */
138 GList *current_frame_events;
141 gboolean new_headers; /* Whether new headers were just set */
143 GQueue force_key_unit; /* List of pending forced keyunits */
144 GstClockTime min_force_key_unit_interval;
145 GstClockTime last_force_key_unit_request;
146 GstClockTime last_key_unit;
148 guint32 system_frame_number;
150 GQueue frames; /* Protected with OBJECT_LOCK */
151 GstVideoCodecState *input_state;
152 GstVideoCodecState *output_state;
153 gboolean output_state_changed;
158 GstAllocator *allocator;
159 GstAllocationParams params;
161 /* upstream stream tags (global tags are passed through as-is) */
162 GstTagList *upstream_tags;
166 GstTagMergeMode tags_merge_mode;
168 gboolean tags_changed;
170 GstClockTime min_pts;
171 /* adjustment needed on pts, dts, segment start and stop to accommodate
173 GstClockTime time_adjustment;
176 gint qos_enabled; /* ATOMIC */
177 gdouble proportion; /* OBJECT_LOCK */
178 GstClockTime earliest_time; /* OBJECT_LOCK */
179 GstClockTime qos_frame_duration; /* OBJECT_LOCK */
180 /* qos messages: frames dropped/processed */
185 typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
186 struct _ForcedKeyUnitEvent
188 GstClockTime running_time;
189 gboolean pending; /* TRUE if this was requested already */
190 gboolean all_headers;
196 forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
198 g_slice_free (ForcedKeyUnitEvent, evt);
201 static ForcedKeyUnitEvent *
202 forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
205 ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
207 evt->running_time = running_time;
208 evt->all_headers = all_headers;
215 forced_key_unit_event_compare (const ForcedKeyUnitEvent * a,
216 const ForcedKeyUnitEvent * b, gpointer user_data)
218 if (a->running_time == b->running_time) {
219 /* Sort pending ones before non-pending ones */
220 if (a->pending && !b->pending)
222 if (!a->pending && b->pending)
227 if (a->running_time == GST_CLOCK_TIME_NONE)
229 if (b->running_time == GST_CLOCK_TIME_NONE)
231 if (a->running_time < b->running_time)
236 static GstElementClass *parent_class = NULL;
237 static gint private_offset = 0;
239 /* cached quark to avoid contention on the global quark table lock */
240 #define META_TAG_VIDEO meta_tag_video_quark
241 static GQuark meta_tag_video_quark;
243 static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
244 static void gst_video_encoder_init (GstVideoEncoder * enc,
245 GstVideoEncoderClass * klass);
247 static void gst_video_encoder_finalize (GObject * object);
249 static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
251 static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
253 static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
255 static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
257 static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
259 static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
260 element, GstStateChange transition);
261 static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
263 static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
265 static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
266 encoder, GstBuffer * buf, GstClockTime pts, GstClockTime dts,
267 GstClockTime duration);
269 static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
271 static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
273 static gboolean gst_video_encoder_decide_allocation_default (GstVideoEncoder *
274 encoder, GstQuery * query);
275 static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
276 encoder, GstQuery * query);
277 static gboolean gst_video_encoder_negotiate_default (GstVideoEncoder * encoder);
278 static gboolean gst_video_encoder_negotiate_unlocked (GstVideoEncoder *
281 static gboolean gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
283 static gboolean gst_video_encoder_src_query_default (GstVideoEncoder * encoder,
286 static gboolean gst_video_encoder_transform_meta_default (GstVideoEncoder *
287 encoder, GstVideoCodecFrame * frame, GstMeta * meta);
289 /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
290 * method to get to the padtemplates */
292 gst_video_encoder_get_type (void)
294 static gsize type = 0;
296 if (g_once_init_enter (&type)) {
298 static const GTypeInfo info = {
299 sizeof (GstVideoEncoderClass),
302 (GClassInitFunc) gst_video_encoder_class_init,
305 sizeof (GstVideoEncoder),
307 (GInstanceInitFunc) gst_video_encoder_init,
309 const GInterfaceInfo preset_interface_info = {
310 NULL, /* interface_init */
311 NULL, /* interface_finalize */
312 NULL /* interface_data */
315 _type = g_type_register_static (GST_TYPE_ELEMENT,
316 "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
318 g_type_add_instance_private (_type, sizeof (GstVideoEncoderPrivate));
319 g_type_add_interface_static (_type, GST_TYPE_PRESET,
320 &preset_interface_info);
321 g_once_init_leave (&type, _type);
326 static inline GstVideoEncoderPrivate *
327 gst_video_encoder_get_instance_private (GstVideoEncoder * self)
329 return (G_STRUCT_MEMBER_P (self, private_offset));
333 gst_video_encoder_set_property (GObject * object, guint prop_id,
334 const GValue * value, GParamSpec * pspec)
336 GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
340 gst_video_encoder_set_qos_enabled (sink, g_value_get_boolean (value));
342 case PROP_MIN_FORCE_KEY_UNIT_INTERVAL:
343 gst_video_encoder_set_min_force_key_unit_interval (sink,
344 g_value_get_uint64 (value));
347 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
353 gst_video_encoder_get_property (GObject * object, guint prop_id, GValue * value,
356 GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
360 g_value_set_boolean (value, gst_video_encoder_is_qos_enabled (sink));
362 case PROP_MIN_FORCE_KEY_UNIT_INTERVAL:
363 g_value_set_uint64 (value,
364 gst_video_encoder_get_min_force_key_unit_interval (sink));
367 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
373 gst_video_encoder_class_init (GstVideoEncoderClass * klass)
375 GObjectClass *gobject_class;
376 GstElementClass *gstelement_class;
378 gobject_class = G_OBJECT_CLASS (klass);
379 gstelement_class = GST_ELEMENT_CLASS (klass);
381 GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
382 "Base Video Encoder");
384 parent_class = g_type_class_peek_parent (klass);
386 if (private_offset != 0)
387 g_type_class_adjust_private_offset (klass, &private_offset);
389 gobject_class->set_property = gst_video_encoder_set_property;
390 gobject_class->get_property = gst_video_encoder_get_property;
391 gobject_class->finalize = gst_video_encoder_finalize;
393 gstelement_class->change_state =
394 GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
396 klass->sink_event = gst_video_encoder_sink_event_default;
397 klass->src_event = gst_video_encoder_src_event_default;
398 klass->propose_allocation = gst_video_encoder_propose_allocation_default;
399 klass->decide_allocation = gst_video_encoder_decide_allocation_default;
400 klass->negotiate = gst_video_encoder_negotiate_default;
401 klass->sink_query = gst_video_encoder_sink_query_default;
402 klass->src_query = gst_video_encoder_src_query_default;
403 klass->transform_meta = gst_video_encoder_transform_meta_default;
405 g_object_class_install_property (gobject_class, PROP_QOS,
406 g_param_spec_boolean ("qos", "Qos",
407 "Handle Quality-of-Service events from downstream", DEFAULT_QOS,
408 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
411 * GstVideoEncoder:min-force-key-unit-interval:
413 * Minimum interval between force-keyunit requests in nanoseconds. See
414 * gst_video_encoder_set_min_force_key_unit_interval() for more details.
418 g_object_class_install_property (gobject_class,
419 PROP_MIN_FORCE_KEY_UNIT_INTERVAL,
420 g_param_spec_uint64 ("min-force-key-unit-interval",
421 "Minimum Force Keyunit Interval",
422 "Minimum interval between force-keyunit requests in nanoseconds", 0,
423 G_MAXUINT64, DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL,
424 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
426 meta_tag_video_quark = g_quark_from_static_string (GST_META_TAG_VIDEO_STR);
430 _flush_events (GstPad * pad, GList * events)
434 for (tmp = events; tmp; tmp = tmp->next) {
435 if (GST_EVENT_TYPE (tmp->data) != GST_EVENT_EOS &&
436 GST_EVENT_TYPE (tmp->data) != GST_EVENT_SEGMENT &&
437 GST_EVENT_IS_STICKY (tmp->data)) {
438 gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data));
440 gst_event_unref (tmp->data);
442 g_list_free (events);
447 #if !GLIB_CHECK_VERSION(2, 60, 0)
448 #define g_queue_clear_full queue_clear_full
450 queue_clear_full (GQueue * queue, GDestroyNotify free_func)
454 while ((data = g_queue_pop_head (queue)) != NULL)
460 gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard)
462 GstVideoEncoderPrivate *priv = encoder->priv;
465 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
467 priv->presentation_frame_number = 0;
468 priv->distance_from_sync = 0;
470 g_queue_clear_full (&priv->force_key_unit,
471 (GDestroyNotify) forced_key_unit_event_free);
472 priv->last_force_key_unit_request = GST_CLOCK_TIME_NONE;
473 priv->last_key_unit = GST_CLOCK_TIME_NONE;
475 priv->drained = TRUE;
477 GST_OBJECT_LOCK (encoder);
480 GST_OBJECT_UNLOCK (encoder);
482 priv->time_adjustment = GST_CLOCK_TIME_NONE;
485 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
486 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
488 if (priv->input_state)
489 gst_video_codec_state_unref (priv->input_state);
490 priv->input_state = NULL;
491 if (priv->output_state)
492 gst_video_codec_state_unref (priv->output_state);
493 priv->output_state = NULL;
495 if (priv->upstream_tags) {
496 gst_tag_list_unref (priv->upstream_tags);
497 priv->upstream_tags = NULL;
500 gst_tag_list_unref (priv->tags);
502 priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
503 priv->tags_changed = FALSE;
505 g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
506 g_list_free (priv->headers);
507 priv->headers = NULL;
508 priv->new_headers = FALSE;
510 if (priv->allocator) {
511 gst_object_unref (priv->allocator);
512 priv->allocator = NULL;
515 g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
516 g_list_free (priv->current_frame_events);
517 priv->current_frame_events = NULL;
519 GST_OBJECT_LOCK (encoder);
520 priv->proportion = 0.5;
521 priv->earliest_time = GST_CLOCK_TIME_NONE;
522 priv->qos_frame_duration = 0;
523 GST_OBJECT_UNLOCK (encoder);
530 for (l = priv->frames.head; l; l = l->next) {
531 GstVideoCodecFrame *frame = l->data;
533 frame->events = _flush_events (encoder->srcpad, frame->events);
535 priv->current_frame_events = _flush_events (encoder->srcpad,
536 encoder->priv->current_frame_events);
539 g_queue_clear_full (&priv->frames,
540 (GDestroyNotify) gst_video_codec_frame_unref);
542 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
547 /* Always call reset() in one way or another after this */
549 gst_video_encoder_flush (GstVideoEncoder * encoder)
551 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
555 ret = klass->flush (encoder);
561 gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
563 GstVideoEncoderPrivate *priv;
564 GstPadTemplate *pad_template;
567 GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
569 priv = encoder->priv = gst_video_encoder_get_instance_private (encoder);
572 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
573 g_return_if_fail (pad_template != NULL);
575 encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
577 gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
578 gst_pad_set_event_function (pad,
579 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
580 gst_pad_set_query_function (pad,
581 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
582 gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
585 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
586 g_return_if_fail (pad_template != NULL);
588 encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
590 gst_pad_set_query_function (pad,
591 GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
592 gst_pad_set_event_function (pad,
593 GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
594 gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
596 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
597 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
599 g_rec_mutex_init (&encoder->stream_lock);
601 priv->headers = NULL;
602 priv->new_headers = FALSE;
604 g_queue_init (&priv->frames);
605 g_queue_init (&priv->force_key_unit);
607 priv->min_latency = 0;
608 priv->max_latency = 0;
609 priv->min_pts = GST_CLOCK_TIME_NONE;
610 priv->time_adjustment = GST_CLOCK_TIME_NONE;
612 gst_video_encoder_reset (encoder, TRUE);
616 * gst_video_encoder_set_headers:
617 * @encoder: a #GstVideoEncoder
618 * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
620 * Set the codec headers to be sent downstream whenever requested.
623 gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
625 GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
627 GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
628 if (video_encoder->priv->headers) {
629 g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
631 g_list_free (video_encoder->priv->headers);
633 video_encoder->priv->headers = headers;
634 video_encoder->priv->new_headers = TRUE;
636 GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
639 static GstVideoCodecState *
640 _new_output_state (GstCaps * caps, GstVideoCodecState * reference)
642 GstVideoCodecState *state;
644 state = g_slice_new0 (GstVideoCodecState);
645 state->ref_count = 1;
646 gst_video_info_init (&state->info);
648 if (!gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0)) {
649 g_slice_free (GstVideoCodecState, state);
656 GstVideoInfo *tgt, *ref;
659 ref = &reference->info;
661 /* Copy over extra fields from reference state */
662 tgt->interlace_mode = ref->interlace_mode;
663 tgt->flags = ref->flags;
664 tgt->width = ref->width;
665 tgt->height = ref->height;
666 tgt->chroma_site = ref->chroma_site;
667 tgt->colorimetry = ref->colorimetry;
668 tgt->par_n = ref->par_n;
669 tgt->par_d = ref->par_d;
670 tgt->fps_n = ref->fps_n;
671 tgt->fps_d = ref->fps_d;
673 GST_VIDEO_INFO_FIELD_ORDER (tgt) = GST_VIDEO_INFO_FIELD_ORDER (ref);
675 GST_VIDEO_INFO_MULTIVIEW_MODE (tgt) = GST_VIDEO_INFO_MULTIVIEW_MODE (ref);
676 GST_VIDEO_INFO_MULTIVIEW_FLAGS (tgt) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (ref);
678 if (reference->mastering_display_info) {
679 state->mastering_display_info = g_slice_dup (GstVideoMasteringDisplayInfo,
680 reference->mastering_display_info);
682 if (reference->content_light_level) {
683 state->content_light_level = g_slice_dup (GstVideoContentLightLevel,
684 reference->content_light_level);
691 static GstVideoCodecState *
692 _new_input_state (GstCaps * caps)
694 GstVideoCodecState *state;
695 GstStructure *c_struct;
698 state = g_slice_new0 (GstVideoCodecState);
699 state->ref_count = 1;
700 gst_video_info_init (&state->info);
701 if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
703 state->caps = gst_caps_ref (caps);
705 c_struct = gst_caps_get_structure (caps, 0);
707 if ((s = gst_structure_get_string (c_struct, "mastering-display-info"))) {
708 state->mastering_display_info = g_slice_new (GstVideoMasteringDisplayInfo);
709 gst_video_mastering_display_info_from_string (state->mastering_display_info,
712 if ((s = gst_structure_get_string (c_struct, "content-light-level"))) {
713 state->content_light_level = g_slice_new (GstVideoContentLightLevel);
714 gst_video_content_light_level_from_string (state->content_light_level, s);
721 g_slice_free (GstVideoCodecState, state);
727 gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
729 GstVideoEncoderClass *encoder_class;
730 GstVideoCodecState *state;
733 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
735 GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
737 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
739 if (encoder->priv->input_state) {
740 GST_DEBUG_OBJECT (encoder,
741 "Checking if caps changed old %" GST_PTR_FORMAT " new %" GST_PTR_FORMAT,
742 encoder->priv->input_state->caps, caps);
743 if (gst_caps_is_equal (encoder->priv->input_state->caps, caps))
744 goto caps_not_changed;
747 state = _new_input_state (caps);
748 if (G_UNLIKELY (!state))
751 if (encoder->priv->input_state
752 && gst_video_info_is_equal (&state->info,
753 &encoder->priv->input_state->info)) {
754 gst_video_codec_state_unref (state);
755 goto caps_not_changed;
758 if (encoder_class->reset) {
759 GST_FIXME_OBJECT (encoder, "GstVideoEncoder::reset() is deprecated");
760 encoder_class->reset (encoder, TRUE);
763 /* and subclass should be ready to configure format at any time around */
764 if (encoder_class->set_format != NULL)
765 ret = encoder_class->set_format (encoder, state);
768 if (encoder->priv->input_state)
769 gst_video_codec_state_unref (encoder->priv->input_state);
770 encoder->priv->input_state = state;
772 gst_video_codec_state_unref (state);
775 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
778 GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
784 GST_DEBUG_OBJECT (encoder, "Caps did not change - ignore");
785 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
792 GST_WARNING_OBJECT (encoder, "Failed to parse caps");
793 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
799 * gst_video_encoder_proxy_getcaps:
800 * @enc: a #GstVideoEncoder
801 * @caps: (allow-none): initial caps
802 * @filter: (allow-none): filter caps
804 * Returns caps that express @caps (or sink template caps if @caps == NULL)
805 * restricted to resolution/format/... combinations supported by downstream
806 * elements (e.g. muxers).
808 * Returns: (transfer full): a #GstCaps owned by caller
811 gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
814 return __gst_video_element_proxy_getcaps (GST_ELEMENT_CAST (encoder),
815 GST_VIDEO_ENCODER_SINK_PAD (encoder),
816 GST_VIDEO_ENCODER_SRC_PAD (encoder), caps, filter);
820 gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
822 GstVideoEncoderClass *klass;
825 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
828 caps = klass->getcaps (encoder, filter);
830 caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
832 GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
838 gst_video_encoder_decide_allocation_default (GstVideoEncoder * encoder,
841 GstAllocator *allocator = NULL;
842 GstAllocationParams params;
843 gboolean update_allocator;
845 /* we got configuration from our peer or the decide_allocation method,
847 if (gst_query_get_n_allocation_params (query) > 0) {
848 /* try the allocator */
849 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
850 update_allocator = TRUE;
853 gst_allocation_params_init (¶ms);
854 update_allocator = FALSE;
857 if (update_allocator)
858 gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
860 gst_query_add_allocation_param (query, allocator, ¶ms);
862 gst_object_unref (allocator);
868 gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
876 gst_query_parse_allocation (query, &caps, NULL);
881 if (!gst_video_info_from_caps (&info, caps))
884 size = GST_VIDEO_INFO_SIZE (&info);
886 if (gst_query_get_n_allocation_pools (query) == 0) {
887 GstStructure *structure;
888 GstAllocator *allocator = NULL;
889 GstAllocationParams params = { 0, 15, 0, 0 };
891 if (gst_query_get_n_allocation_params (query) > 0)
892 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
894 gst_query_add_allocation_param (query, allocator, ¶ms);
896 pool = gst_video_buffer_pool_new ();
898 structure = gst_buffer_pool_get_config (pool);
899 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
900 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
903 gst_object_unref (allocator);
905 if (!gst_buffer_pool_set_config (pool, structure))
908 gst_query_add_allocation_pool (query, pool, size, 0, 0);
909 gst_object_unref (pool);
910 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
918 GST_ERROR_OBJECT (encoder, "failed to set config");
919 gst_object_unref (pool);
925 gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
928 GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
929 gboolean res = FALSE;
931 switch (GST_QUERY_TYPE (query)) {
934 GstCaps *filter, *caps;
936 gst_query_parse_caps (query, &filter);
937 caps = gst_video_encoder_sink_getcaps (encoder, filter);
938 gst_query_set_caps_result (query, caps);
939 gst_caps_unref (caps);
943 case GST_QUERY_CONVERT:
945 GstFormat src_fmt, dest_fmt;
946 gint64 src_val, dest_val;
948 GST_DEBUG_OBJECT (encoder, "convert query");
950 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
951 GST_OBJECT_LOCK (encoder);
952 if (encoder->priv->input_state != NULL)
953 res = __gst_video_rawvideo_convert (encoder->priv->input_state,
954 src_fmt, src_val, &dest_fmt, &dest_val);
957 GST_OBJECT_UNLOCK (encoder);
960 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
963 case GST_QUERY_ALLOCATION:
965 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
967 if (klass->propose_allocation)
968 res = klass->propose_allocation (encoder, query);
972 res = gst_pad_query_default (pad, GST_OBJECT (encoder), query);
978 GST_DEBUG_OBJECT (encoder, "query failed");
983 gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
986 GstVideoEncoder *encoder;
987 GstVideoEncoderClass *encoder_class;
988 gboolean ret = FALSE;
990 encoder = GST_VIDEO_ENCODER (parent);
991 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
993 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
994 GST_QUERY_TYPE_NAME (query));
996 if (encoder_class->sink_query)
997 ret = encoder_class->sink_query (encoder, query);
1003 gst_video_encoder_finalize (GObject * object)
1005 GstVideoEncoder *encoder;
1007 GST_DEBUG_OBJECT (object, "finalize");
1009 encoder = GST_VIDEO_ENCODER (object);
1010 g_rec_mutex_clear (&encoder->stream_lock);
1012 if (encoder->priv->allocator) {
1013 gst_object_unref (encoder->priv->allocator);
1014 encoder->priv->allocator = NULL;
1017 G_OBJECT_CLASS (parent_class)->finalize (object);
1021 gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
1023 switch (GST_EVENT_TYPE (event)) {
1024 case GST_EVENT_SEGMENT:
1028 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1030 gst_event_copy_segment (event, &segment);
1032 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1034 if (segment.format != GST_FORMAT_TIME) {
1035 GST_DEBUG_OBJECT (encoder, "received non TIME segment");
1036 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1040 if (encoder->priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1041 segment.start += encoder->priv->time_adjustment;
1042 if (GST_CLOCK_TIME_IS_VALID (segment.position)) {
1043 segment.position += encoder->priv->time_adjustment;
1045 if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
1046 segment.stop += encoder->priv->time_adjustment;
1050 encoder->output_segment = segment;
1051 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1053 gst_event_unref (event);
1054 event = gst_event_new_segment (&encoder->output_segment);
1062 return gst_pad_push_event (encoder->srcpad, event);
1066 gst_video_encoder_create_merged_tags_event (GstVideoEncoder * enc)
1068 GstTagList *merged_tags;
1070 GST_LOG_OBJECT (enc, "upstream : %" GST_PTR_FORMAT, enc->priv->upstream_tags);
1071 GST_LOG_OBJECT (enc, "encoder : %" GST_PTR_FORMAT, enc->priv->tags);
1072 GST_LOG_OBJECT (enc, "mode : %d", enc->priv->tags_merge_mode);
1075 gst_tag_list_merge (enc->priv->upstream_tags, enc->priv->tags,
1076 enc->priv->tags_merge_mode);
1078 GST_DEBUG_OBJECT (enc, "merged : %" GST_PTR_FORMAT, merged_tags);
1080 if (merged_tags == NULL)
1083 if (gst_tag_list_is_empty (merged_tags)) {
1084 gst_tag_list_unref (merged_tags);
1088 return gst_event_new_tag (merged_tags);
1092 gst_video_encoder_check_and_push_tags (GstVideoEncoder * encoder)
1094 if (encoder->priv->tags_changed) {
1095 GstEvent *tags_event;
1097 tags_event = gst_video_encoder_create_merged_tags_event (encoder);
1099 if (tags_event != NULL)
1100 gst_video_encoder_push_event (encoder, tags_event);
1102 encoder->priv->tags_changed = FALSE;
1107 gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
1110 GstVideoEncoderClass *encoder_class;
1111 gboolean ret = FALSE;
1113 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1115 switch (GST_EVENT_TYPE (event)) {
1116 case GST_EVENT_CAPS:
1120 gst_event_parse_caps (event, &caps);
1121 ret = gst_video_encoder_setcaps (encoder, caps);
1123 gst_event_unref (event);
1129 GstFlowReturn flow_ret;
1131 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1133 if (encoder_class->finish) {
1134 flow_ret = encoder_class->finish (encoder);
1136 flow_ret = GST_FLOW_OK;
1139 if (encoder->priv->current_frame_events) {
1142 for (l = g_list_last (encoder->priv->current_frame_events); l;
1143 l = g_list_previous (l)) {
1144 GstEvent *event = GST_EVENT (l->data);
1146 gst_video_encoder_push_event (encoder, event);
1149 g_list_free (encoder->priv->current_frame_events);
1150 encoder->priv->current_frame_events = NULL;
1152 gst_video_encoder_check_and_push_tags (encoder);
1154 ret = (flow_ret == GST_FLOW_OK);
1155 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1158 case GST_EVENT_SEGMENT:
1162 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1164 gst_event_copy_segment (event, &segment);
1166 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1168 if (segment.format != GST_FORMAT_TIME) {
1169 GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
1170 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1174 encoder->input_segment = segment;
1176 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1179 case GST_EVENT_CUSTOM_DOWNSTREAM:
1181 if (gst_video_event_is_force_key_unit (event)) {
1182 GstClockTime running_time;
1183 gboolean all_headers;
1186 if (gst_video_event_parse_downstream_force_key_unit (event,
1187 NULL, NULL, &running_time, &all_headers, &count)) {
1188 ForcedKeyUnitEvent *fevt;
1190 GST_OBJECT_LOCK (encoder);
1191 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1192 g_queue_insert_sorted (&encoder->priv->force_key_unit, fevt,
1193 (GCompareDataFunc) forced_key_unit_event_compare, NULL);
1194 GST_OBJECT_UNLOCK (encoder);
1196 GST_DEBUG_OBJECT (encoder,
1197 "force-key-unit event: running-time %" GST_TIME_FORMAT
1198 ", all_headers %d, count %u",
1199 GST_TIME_ARGS (running_time), all_headers, count);
1201 gst_event_unref (event);
1207 case GST_EVENT_STREAM_START:
1209 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1210 /* Flush upstream tags after a STREAM_START */
1211 GST_DEBUG_OBJECT (encoder, "STREAM_START, clearing upstream tags");
1212 if (encoder->priv->upstream_tags) {
1213 gst_tag_list_unref (encoder->priv->upstream_tags);
1214 encoder->priv->upstream_tags = NULL;
1215 encoder->priv->tags_changed = TRUE;
1217 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1224 gst_event_parse_tag (event, &tags);
1226 if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
1227 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1228 if (encoder->priv->upstream_tags != tags) {
1229 tags = gst_tag_list_copy (tags);
1231 /* FIXME: make generic based on GST_TAG_FLAG_ENCODED */
1232 gst_tag_list_remove_tag (tags, GST_TAG_CODEC);
1233 gst_tag_list_remove_tag (tags, GST_TAG_AUDIO_CODEC);
1234 gst_tag_list_remove_tag (tags, GST_TAG_VIDEO_CODEC);
1235 gst_tag_list_remove_tag (tags, GST_TAG_SUBTITLE_CODEC);
1236 gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
1237 gst_tag_list_remove_tag (tags, GST_TAG_BITRATE);
1238 gst_tag_list_remove_tag (tags, GST_TAG_NOMINAL_BITRATE);
1239 gst_tag_list_remove_tag (tags, GST_TAG_MAXIMUM_BITRATE);
1240 gst_tag_list_remove_tag (tags, GST_TAG_MINIMUM_BITRATE);
1241 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER);
1242 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER_VERSION);
1244 if (encoder->priv->upstream_tags)
1245 gst_tag_list_unref (encoder->priv->upstream_tags);
1246 encoder->priv->upstream_tags = tags;
1247 GST_INFO_OBJECT (encoder, "upstream tags: %" GST_PTR_FORMAT, tags);
1249 gst_event_unref (event);
1250 event = gst_video_encoder_create_merged_tags_event (encoder);
1251 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1257 case GST_EVENT_FLUSH_STOP:{
1258 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1259 gst_video_encoder_flush (encoder);
1260 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
1261 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
1262 gst_video_encoder_reset (encoder, FALSE);
1263 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1270 /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
1271 * For EOS this is required because no buffer or serialized event
1272 * will come after EOS and nothing could trigger another
1273 * _finish_frame() call. *
1274 * If the subclass handles sending of EOS manually it can simply
1275 * not chain up to the parent class' event handler
1277 * For FLUSH_STOP this is required because it is expected
1278 * to be forwarded immediately and no buffers are queued anyway.
1281 if (!GST_EVENT_IS_SERIALIZED (event)
1282 || GST_EVENT_TYPE (event) == GST_EVENT_EOS
1283 || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
1284 ret = gst_video_encoder_push_event (encoder, event);
1286 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1287 encoder->priv->current_frame_events =
1288 g_list_prepend (encoder->priv->current_frame_events, event);
1289 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1298 gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
1301 GstVideoEncoder *enc;
1302 GstVideoEncoderClass *klass;
1303 gboolean ret = TRUE;
1305 enc = GST_VIDEO_ENCODER (parent);
1306 klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
1308 GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
1309 GST_EVENT_TYPE_NAME (event));
1311 if (klass->sink_event)
1312 ret = klass->sink_event (enc, event);
1318 gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
1321 gboolean ret = FALSE;
1322 GstVideoEncoderPrivate *priv = encoder->priv;
1324 switch (GST_EVENT_TYPE (event)) {
1325 case GST_EVENT_CUSTOM_UPSTREAM:
1327 if (gst_video_event_is_force_key_unit (event)) {
1328 GstClockTime running_time;
1329 gboolean all_headers;
1332 if (gst_video_event_parse_upstream_force_key_unit (event,
1333 &running_time, &all_headers, &count)) {
1334 ForcedKeyUnitEvent *fevt;
1336 GST_OBJECT_LOCK (encoder);
1337 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1338 g_queue_insert_sorted (&encoder->priv->force_key_unit, fevt,
1339 (GCompareDataFunc) forced_key_unit_event_compare, NULL);
1340 GST_OBJECT_UNLOCK (encoder);
1342 GST_DEBUG_OBJECT (encoder,
1343 "force-key-unit event: running-time %" GST_TIME_FORMAT
1344 ", all_headers %d, count %u",
1345 GST_TIME_ARGS (running_time), all_headers, count);
1347 gst_event_unref (event);
1357 GstClockTimeDiff diff;
1358 GstClockTime timestamp;
1360 if (!g_atomic_int_get (&priv->qos_enabled))
1363 gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
1365 GST_OBJECT_LOCK (encoder);
1366 priv->proportion = proportion;
1367 if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
1368 if (G_UNLIKELY (diff > 0)) {
1369 priv->earliest_time = timestamp + 2 * diff + priv->qos_frame_duration;
1371 priv->earliest_time = timestamp + diff;
1374 priv->earliest_time = GST_CLOCK_TIME_NONE;
1376 GST_OBJECT_UNLOCK (encoder);
1378 GST_DEBUG_OBJECT (encoder,
1379 "got QoS %" GST_TIME_FORMAT ", %" GST_STIME_FORMAT ", %g",
1380 GST_TIME_ARGS (timestamp), GST_STIME_ARGS (diff), proportion);
1382 ret = gst_pad_push_event (encoder->sinkpad, event);
1392 gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
1399 gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
1401 GstVideoEncoder *encoder;
1402 GstVideoEncoderClass *klass;
1403 gboolean ret = FALSE;
1405 encoder = GST_VIDEO_ENCODER (parent);
1406 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1408 GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
1410 if (klass->src_event)
1411 ret = klass->src_event (encoder, event);
1417 gst_video_encoder_src_query_default (GstVideoEncoder * enc, GstQuery * query)
1419 GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (enc);
1420 GstVideoEncoderPrivate *priv;
1425 GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
1427 switch (GST_QUERY_TYPE (query)) {
1428 case GST_QUERY_CONVERT:
1430 GstFormat src_fmt, dest_fmt;
1431 gint64 src_val, dest_val;
1433 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
1434 GST_OBJECT_LOCK (enc);
1436 __gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
1437 src_val, &dest_fmt, &dest_val);
1438 GST_OBJECT_UNLOCK (enc);
1441 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
1444 case GST_QUERY_LATENCY:
1447 GstClockTime min_latency, max_latency;
1449 res = gst_pad_peer_query (enc->sinkpad, query);
1451 gst_query_parse_latency (query, &live, &min_latency, &max_latency);
1452 GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
1453 GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
1454 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1456 GST_OBJECT_LOCK (enc);
1457 min_latency += priv->min_latency;
1458 if (max_latency == GST_CLOCK_TIME_NONE
1459 || enc->priv->max_latency == GST_CLOCK_TIME_NONE)
1460 max_latency = GST_CLOCK_TIME_NONE;
1462 max_latency += enc->priv->max_latency;
1463 GST_OBJECT_UNLOCK (enc);
1465 gst_query_set_latency (query, live, min_latency, max_latency);
1470 res = gst_pad_query_default (pad, GST_OBJECT (enc), query);
1475 GST_DEBUG_OBJECT (enc, "query failed");
1480 gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
1482 GstVideoEncoder *encoder;
1483 GstVideoEncoderClass *encoder_class;
1484 gboolean ret = FALSE;
1486 encoder = GST_VIDEO_ENCODER (parent);
1487 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1489 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
1490 GST_QUERY_TYPE_NAME (query));
1492 if (encoder_class->src_query)
1493 ret = encoder_class->src_query (encoder, query);
1498 static GstVideoCodecFrame *
1499 gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
1500 GstClockTime pts, GstClockTime dts, GstClockTime duration)
1502 GstVideoEncoderPrivate *priv = encoder->priv;
1503 GstVideoCodecFrame *frame;
1505 frame = g_slice_new0 (GstVideoCodecFrame);
1507 frame->ref_count = 1;
1509 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1510 frame->system_frame_number = priv->system_frame_number;
1511 priv->system_frame_number++;
1513 frame->presentation_frame_number = priv->presentation_frame_number;
1514 priv->presentation_frame_number++;
1515 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1517 frame->events = priv->current_frame_events;
1518 priv->current_frame_events = NULL;
1519 frame->input_buffer = buf;
1522 frame->duration = duration;
1523 frame->abidata.ABI.ts = pts;
1529 static GstFlowReturn
1530 gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1532 GstVideoEncoder *encoder;
1533 GstVideoEncoderPrivate *priv;
1534 GstVideoEncoderClass *klass;
1535 GstVideoCodecFrame *frame;
1536 GstClockTime pts, duration;
1537 GstFlowReturn ret = GST_FLOW_OK;
1538 guint64 start, stop, cstart, cstop;
1540 encoder = GST_VIDEO_ENCODER (parent);
1541 priv = encoder->priv;
1542 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1544 g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
1546 if (!encoder->priv->input_state)
1547 goto not_negotiated;
1549 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1551 pts = GST_BUFFER_PTS (buf);
1552 duration = GST_BUFFER_DURATION (buf);
1554 GST_LOG_OBJECT (encoder,
1555 "received buffer of size %" G_GSIZE_FORMAT " with PTS %" GST_TIME_FORMAT
1556 ", DTS %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
1557 gst_buffer_get_size (buf), GST_TIME_ARGS (pts),
1558 GST_TIME_ARGS (GST_BUFFER_DTS (buf)), GST_TIME_ARGS (duration));
1561 if (GST_CLOCK_TIME_IS_VALID (duration))
1562 stop = start + duration;
1564 stop = GST_CLOCK_TIME_NONE;
1566 /* Drop buffers outside of segment */
1567 if (!gst_segment_clip (&encoder->input_segment,
1568 GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
1569 GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
1570 gst_buffer_unref (buf);
1574 if (GST_CLOCK_TIME_IS_VALID (cstop))
1575 duration = cstop - cstart;
1577 duration = GST_CLOCK_TIME_NONE;
1579 if (priv->min_pts != GST_CLOCK_TIME_NONE
1580 && priv->time_adjustment == GST_CLOCK_TIME_NONE) {
1581 if (cstart < priv->min_pts) {
1582 priv->time_adjustment = priv->min_pts - cstart;
1586 if (priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1587 cstart += priv->time_adjustment;
1590 /* incoming DTS is not really relevant and does not make sense anyway,
1591 * so pass along _NONE and maybe come up with something better later on */
1592 frame = gst_video_encoder_new_frame (encoder, buf, cstart,
1593 GST_CLOCK_TIME_NONE, duration);
1595 GST_OBJECT_LOCK (encoder);
1596 if (priv->force_key_unit.head) {
1598 GstClockTime running_time;
1599 gboolean throttled, have_fevt = FALSE, have_pending_none_fevt = FALSE;
1600 GQueue matching_fevt = G_QUEUE_INIT;
1603 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1606 throttled = (priv->min_force_key_unit_interval != 0 &&
1607 priv->min_force_key_unit_interval != GST_CLOCK_TIME_NONE &&
1608 ((priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE &&
1609 priv->last_force_key_unit_request +
1610 priv->min_force_key_unit_interval > running_time)
1611 || (priv->last_key_unit != GST_CLOCK_TIME_NONE
1612 && priv->last_key_unit + priv->min_force_key_unit_interval >
1615 for (l = priv->force_key_unit.head; l && (!throttled || !have_fevt);
1617 ForcedKeyUnitEvent *fevt = l->data;
1619 /* Skip pending keyunits */
1620 if (fevt->pending) {
1621 if (fevt->running_time == GST_CLOCK_TIME_NONE)
1622 have_pending_none_fevt = TRUE;
1626 /* Simple case, keyunit ASAP */
1627 if (fevt->running_time == GST_CLOCK_TIME_NONE) {
1630 g_queue_push_tail (&matching_fevt, fevt);
1634 /* Event for before this frame */
1635 if (fevt->running_time <= running_time) {
1638 g_queue_push_tail (&matching_fevt, fevt);
1642 /* Otherwise all following events are in the future */
1646 if (throttled && have_fevt) {
1647 GstClockTime last_time;
1649 if (priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE &&
1650 priv->last_force_key_unit_request +
1651 priv->min_force_key_unit_interval > running_time) {
1652 last_time = priv->last_force_key_unit_request;
1654 last_time = priv->last_key_unit;
1657 GST_DEBUG_OBJECT (encoder,
1658 "Not requesting a new key unit yet due to throttling (%"
1659 GST_TIME_FORMAT " + %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
1660 GST_TIME_ARGS (last_time),
1661 GST_TIME_ARGS (priv->min_force_key_unit_interval),
1662 GST_TIME_ARGS (running_time));
1663 g_queue_clear (&matching_fevt);
1666 if (matching_fevt.length > 0) {
1667 ForcedKeyUnitEvent *fevt;
1668 gboolean all_headers = FALSE;
1669 gboolean force_keyunit = FALSE;
1671 while ((fevt = g_queue_pop_head (&matching_fevt))) {
1672 fevt->pending = TRUE;
1674 if ((fevt->running_time == GST_CLOCK_TIME_NONE
1675 && have_pending_none_fevt)
1676 || (priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE
1677 && fevt->running_time != GST_CLOCK_TIME_NONE
1678 && fevt->running_time <= priv->last_force_key_unit_request) ||
1679 (priv->last_key_unit != GST_CLOCK_TIME_NONE
1680 && fevt->running_time != GST_CLOCK_TIME_NONE
1681 && fevt->running_time <= priv->last_key_unit)) {
1682 GST_DEBUG_OBJECT (encoder,
1683 "Not requesting another key unit at running time %"
1684 GST_TIME_FORMAT, GST_TIME_ARGS (fevt->running_time));
1686 force_keyunit = TRUE;
1687 fevt->frame_id = frame->system_frame_number;
1688 if (fevt->all_headers)
1693 if (force_keyunit) {
1694 GST_DEBUG_OBJECT (encoder,
1695 "Forcing a key unit at running time %" GST_TIME_FORMAT,
1696 GST_TIME_ARGS (running_time));
1698 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
1700 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
1701 priv->last_force_key_unit_request = running_time;
1705 GST_OBJECT_UNLOCK (encoder);
1707 g_queue_push_tail (&priv->frames, gst_video_codec_frame_ref (frame));
1709 /* new data, more finish needed */
1710 priv->drained = FALSE;
1712 GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
1713 frame->presentation_frame_number);
1716 gst_segment_to_running_time (&encoder->input_segment, GST_FORMAT_TIME,
1719 ret = klass->handle_frame (encoder, frame);
1722 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1729 GST_ELEMENT_ERROR (encoder, CORE, NEGOTIATION, (NULL),
1730 ("encoder not initialized"));
1731 gst_buffer_unref (buf);
1732 return GST_FLOW_NOT_NEGOTIATED;
1736 static GstStateChangeReturn
1737 gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
1739 GstVideoEncoder *encoder;
1740 GstVideoEncoderClass *encoder_class;
1741 GstStateChangeReturn ret;
1743 encoder = GST_VIDEO_ENCODER (element);
1744 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
1746 switch (transition) {
1747 case GST_STATE_CHANGE_NULL_TO_READY:
1748 /* open device/library if needed */
1749 if (encoder_class->open && !encoder_class->open (encoder))
1752 case GST_STATE_CHANGE_READY_TO_PAUSED:
1753 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1754 gst_video_encoder_reset (encoder, TRUE);
1755 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1757 /* Initialize device/library if needed */
1758 if (encoder_class->start && !encoder_class->start (encoder))
1765 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1767 switch (transition) {
1768 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1769 gboolean stopped = TRUE;
1771 if (encoder_class->stop)
1772 stopped = encoder_class->stop (encoder);
1774 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1775 gst_video_encoder_reset (encoder, TRUE);
1776 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1782 case GST_STATE_CHANGE_READY_TO_NULL:
1783 /* close device/library if needed */
1784 if (encoder_class->close && !encoder_class->close (encoder))
1797 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1798 ("Failed to open encoder"));
1799 return GST_STATE_CHANGE_FAILURE;
1804 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1805 ("Failed to start encoder"));
1806 return GST_STATE_CHANGE_FAILURE;
1811 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1812 ("Failed to stop encoder"));
1813 return GST_STATE_CHANGE_FAILURE;
1818 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1819 ("Failed to close encoder"));
1820 return GST_STATE_CHANGE_FAILURE;
1825 gst_video_encoder_negotiate_default (GstVideoEncoder * encoder)
1827 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1828 GstAllocator *allocator;
1829 GstAllocationParams params;
1830 gboolean ret = TRUE;
1831 GstVideoCodecState *state = encoder->priv->output_state;
1832 GstVideoInfo *info = &state->info;
1833 GstQuery *query = NULL;
1834 GstVideoCodecFrame *frame;
1838 g_return_val_if_fail (state->caps != NULL, FALSE);
1840 if (encoder->priv->output_state_changed) {
1841 GstStructure *out_struct;
1843 state->caps = gst_caps_make_writable (state->caps);
1846 gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
1847 "height", G_TYPE_INT, info->height,
1848 "pixel-aspect-ratio", GST_TYPE_FRACTION,
1849 info->par_n, info->par_d, NULL);
1850 if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
1851 /* variable fps with a max-framerate */
1852 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
1853 "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
1855 /* no variable fps or no max-framerate */
1856 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
1857 info->fps_n, info->fps_d, NULL);
1859 if (state->codec_data)
1860 gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
1861 state->codec_data, NULL);
1863 gst_caps_set_simple (state->caps, "interlace-mode", G_TYPE_STRING,
1864 gst_video_interlace_mode_to_string (info->interlace_mode), NULL);
1865 if (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1866 GST_VIDEO_INFO_FIELD_ORDER (info) != GST_VIDEO_FIELD_ORDER_UNKNOWN)
1867 gst_caps_set_simple (state->caps, "field-order", G_TYPE_STRING,
1868 gst_video_field_order_to_string (GST_VIDEO_INFO_FIELD_ORDER (info)),
1871 colorimetry = gst_video_colorimetry_to_string (&info->colorimetry);
1873 gst_caps_set_simple (state->caps, "colorimetry", G_TYPE_STRING,
1875 g_free (colorimetry);
1877 if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) {
1878 gchar *chroma_site = gst_video_chroma_site_to_string (info->chroma_site);
1881 GST_WARNING ("Couldn't convert chroma-site 0x%x to string",
1884 gst_caps_set_simple (state->caps,
1885 "chroma-site", G_TYPE_STRING, chroma_site, NULL);
1886 g_free (chroma_site);
1890 if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) {
1891 const gchar *caps_mview_mode =
1892 gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE
1895 gst_caps_set_simple (state->caps, "multiview-mode", G_TYPE_STRING,
1896 caps_mview_mode, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
1897 GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL);
1900 out_struct = gst_caps_get_structure (state->caps, 0);
1902 /* forward upstream mastering display info and content light level
1903 * if subclass didn't set */
1904 if (state->mastering_display_info &&
1905 !gst_structure_has_field (out_struct, "mastering-display-info")) {
1906 gst_video_mastering_display_info_add_to_caps
1907 (state->mastering_display_info, state->caps);
1910 if (state->content_light_level &&
1911 !gst_structure_has_field (out_struct, "content-light-level")) {
1912 gst_video_content_light_level_add_to_caps (state->content_light_level,
1916 encoder->priv->output_state_changed = FALSE;
1919 if (state->allocation_caps == NULL)
1920 state->allocation_caps = gst_caps_ref (state->caps);
1922 /* Push all pending pre-caps events of the oldest frame before
1924 frame = encoder->priv->frames.head ? encoder->priv->frames.head->data : NULL;
1925 if (frame || encoder->priv->current_frame_events) {
1929 events = &frame->events;
1931 events = &encoder->priv->current_frame_events;
1934 for (l = g_list_last (*events); l;) {
1935 GstEvent *event = GST_EVENT (l->data);
1938 if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
1939 gst_video_encoder_push_event (encoder, event);
1942 *events = g_list_delete_link (*events, tmp);
1949 prevcaps = gst_pad_get_current_caps (encoder->srcpad);
1950 if (!prevcaps || !gst_caps_is_equal (prevcaps, state->caps))
1951 ret = gst_pad_set_caps (encoder->srcpad, state->caps);
1955 gst_caps_unref (prevcaps);
1960 query = gst_query_new_allocation (state->allocation_caps, TRUE);
1961 if (!gst_pad_peer_query (encoder->srcpad, query)) {
1962 GST_DEBUG_OBJECT (encoder, "didn't get downstream ALLOCATION hints");
1965 g_assert (klass->decide_allocation != NULL);
1966 ret = klass->decide_allocation (encoder, query);
1968 GST_DEBUG_OBJECT (encoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
1972 goto no_decide_allocation;
1974 /* we got configuration from our peer or the decide_allocation method,
1976 if (gst_query_get_n_allocation_params (query) > 0) {
1977 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1980 gst_allocation_params_init (¶ms);
1983 if (encoder->priv->allocator)
1984 gst_object_unref (encoder->priv->allocator);
1985 encoder->priv->allocator = allocator;
1986 encoder->priv->params = params;
1990 gst_query_unref (query);
1995 no_decide_allocation:
1997 GST_WARNING_OBJECT (encoder, "Subclass failed to decide allocation");
2003 gst_video_encoder_negotiate_unlocked (GstVideoEncoder * encoder)
2005 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2006 gboolean ret = TRUE;
2008 if (G_LIKELY (klass->negotiate))
2009 ret = klass->negotiate (encoder);
2015 * gst_video_encoder_negotiate:
2016 * @encoder: a #GstVideoEncoder
2018 * Negotiate with downstream elements to currently configured #GstVideoCodecState.
2019 * Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
2022 * Returns: %TRUE if the negotiation succeeded, else %FALSE.
2025 gst_video_encoder_negotiate (GstVideoEncoder * encoder)
2027 GstVideoEncoderClass *klass;
2028 gboolean ret = TRUE;
2030 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
2031 g_return_val_if_fail (encoder->priv->output_state, FALSE);
2033 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2035 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2036 gst_pad_check_reconfigure (encoder->srcpad);
2037 if (klass->negotiate) {
2038 ret = klass->negotiate (encoder);
2040 gst_pad_mark_reconfigure (encoder->srcpad);
2042 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2048 * gst_video_encoder_allocate_output_buffer:
2049 * @encoder: a #GstVideoEncoder
2050 * @size: size of the buffer
2052 * Helper function that allocates a buffer to hold an encoded video frame
2053 * for @encoder's current #GstVideoCodecState.
2055 * Returns: (transfer full): allocated buffer
2058 gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, gsize size)
2061 gboolean needs_reconfigure = FALSE;
2063 g_return_val_if_fail (size > 0, NULL);
2065 GST_DEBUG ("alloc src buffer");
2067 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2068 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2069 if (G_UNLIKELY (encoder->priv->output_state_changed
2070 || (encoder->priv->output_state && needs_reconfigure))) {
2071 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2072 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
2073 gst_pad_mark_reconfigure (encoder->srcpad);
2079 gst_buffer_new_allocate (encoder->priv->allocator, size,
2080 &encoder->priv->params);
2082 GST_INFO_OBJECT (encoder, "couldn't allocate output buffer");
2086 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2091 buffer = gst_buffer_new_allocate (NULL, size, NULL);
2093 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2099 * gst_video_encoder_allocate_output_frame:
2100 * @encoder: a #GstVideoEncoder
2101 * @frame: a #GstVideoCodecFrame
2102 * @size: size of the buffer
2104 * Helper function that allocates a buffer to hold an encoded video frame for @encoder's
2105 * current #GstVideoCodecState. Subclass should already have configured video
2106 * state and set src pad caps.
2108 * The buffer allocated here is owned by the frame and you should only
2109 * keep references to the frame, not the buffer.
2111 * Returns: %GST_FLOW_OK if an output buffer could be allocated
2114 gst_video_encoder_allocate_output_frame (GstVideoEncoder *
2115 encoder, GstVideoCodecFrame * frame, gsize size)
2117 gboolean needs_reconfigure = FALSE;
2119 g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
2121 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2122 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2123 if (G_UNLIKELY (encoder->priv->output_state_changed
2124 || (encoder->priv->output_state && needs_reconfigure))) {
2125 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2126 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
2127 gst_pad_mark_reconfigure (encoder->srcpad);
2131 GST_LOG_OBJECT (encoder, "alloc buffer size %" G_GSIZE_FORMAT, size);
2133 frame->output_buffer =
2134 gst_buffer_new_allocate (encoder->priv->allocator, size,
2135 &encoder->priv->params);
2137 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2139 return frame->output_buffer ? GST_FLOW_OK : GST_FLOW_ERROR;
2143 gst_video_encoder_release_frame (GstVideoEncoder * enc,
2144 GstVideoCodecFrame * frame)
2148 /* unref once from the list */
2149 link = g_queue_find (&enc->priv->frames, frame);
2151 gst_video_codec_frame_unref (frame);
2152 g_queue_delete_link (&enc->priv->frames, link);
2154 /* unref because this function takes ownership */
2155 gst_video_codec_frame_unref (frame);
2159 gst_video_encoder_transform_meta_default (GstVideoEncoder *
2160 encoder, GstVideoCodecFrame * frame, GstMeta * meta)
2162 const GstMetaInfo *info = meta->info;
2163 const gchar *const *tags;
2164 const gchar *const supported_tags[] = {
2165 GST_META_TAG_VIDEO_STR,
2166 GST_META_TAG_VIDEO_ORIENTATION_STR,
2167 GST_META_TAG_VIDEO_SIZE_STR,
2171 tags = gst_meta_api_type_get_tags (info->api);
2177 if (!g_strv_contains (supported_tags, *tags))
2187 GstVideoEncoder *encoder;
2188 GstVideoCodecFrame *frame;
2192 foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
2194 CopyMetaData *data = user_data;
2195 GstVideoEncoder *encoder = data->encoder;
2196 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2197 GstVideoCodecFrame *frame = data->frame;
2198 const GstMetaInfo *info = (*meta)->info;
2199 gboolean do_copy = FALSE;
2201 if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) {
2202 /* never call the transform_meta with memory specific metadata */
2203 GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s",
2204 g_type_name (info->api));
2206 } else if (klass->transform_meta) {
2207 do_copy = klass->transform_meta (encoder, frame, *meta);
2208 GST_DEBUG_OBJECT (encoder, "transformed metadata %s: copy: %d",
2209 g_type_name (info->api), do_copy);
2212 /* we only copy metadata when the subclass implemented a transform_meta
2213 * function and when it returns %TRUE */
2214 if (do_copy && info->transform_func) {
2215 GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
2216 GST_DEBUG_OBJECT (encoder, "copy metadata %s", g_type_name (info->api));
2217 /* simply copy then */
2218 info->transform_func (frame->output_buffer, *meta, inbuf,
2219 _gst_meta_transform_copy, ©_data);
2225 gst_video_encoder_drop_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
2227 GstVideoEncoderPrivate *priv = enc->priv;
2228 GstClockTime stream_time, jitter, earliest_time, qostime, timestamp;
2229 GstSegment *segment;
2230 GstMessage *qos_msg;
2233 GST_DEBUG_OBJECT (enc, "dropping frame %" GST_TIME_FORMAT,
2234 GST_TIME_ARGS (frame->pts));
2238 /* post QoS message */
2239 GST_OBJECT_LOCK (enc);
2240 proportion = priv->proportion;
2241 earliest_time = priv->earliest_time;
2242 GST_OBJECT_UNLOCK (enc);
2244 timestamp = frame->pts;
2245 segment = &enc->output_segment;
2246 if (G_UNLIKELY (segment->format == GST_FORMAT_UNDEFINED))
2247 segment = &enc->input_segment;
2249 gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp);
2250 qostime = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp);
2251 jitter = GST_CLOCK_DIFF (qostime, earliest_time);
2253 gst_message_new_qos (GST_OBJECT_CAST (enc), FALSE, qostime, stream_time,
2254 timestamp, GST_CLOCK_TIME_NONE);
2255 gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
2256 gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
2257 priv->processed, priv->dropped);
2258 gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
2261 static GstFlowReturn
2262 gst_video_encoder_can_push_unlocked (GstVideoEncoder * encoder)
2264 GstVideoEncoderPrivate *priv = encoder->priv;
2265 gboolean needs_reconfigure;
2267 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2268 if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
2269 && needs_reconfigure))) {
2270 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2271 gst_pad_mark_reconfigure (encoder->srcpad);
2272 if (GST_PAD_IS_FLUSHING (encoder->srcpad))
2273 return GST_FLOW_FLUSHING;
2275 return GST_FLOW_NOT_NEGOTIATED;
2279 if (G_UNLIKELY (priv->output_state == NULL)) {
2280 GST_ERROR_OBJECT (encoder, "Output state was not configured");
2281 GST_ELEMENT_ERROR (encoder, LIBRARY, FAILED,
2282 ("Output state was not configured"), (NULL));
2283 return GST_FLOW_ERROR;
2290 gst_video_encoder_push_pending_unlocked (GstVideoEncoder * encoder,
2291 GstVideoCodecFrame * frame)
2293 GstVideoEncoderPrivate *priv = encoder->priv;
2296 /* Push all pending events that arrived before this frame */
2297 for (l = priv->frames.head; l; l = l->next) {
2298 GstVideoCodecFrame *tmp = l->data;
2303 for (k = g_list_last (tmp->events); k; k = k->prev)
2304 gst_video_encoder_push_event (encoder, k->data);
2305 g_list_free (tmp->events);
2313 gst_video_encoder_check_and_push_tags (encoder);
2317 gst_video_encoder_infer_dts_unlocked (GstVideoEncoder * encoder,
2318 GstVideoCodecFrame * frame)
2320 /* DTS is expected to be monotonously increasing,
2321 * so a good guess is the lowest unsent PTS (all being OK) */
2322 GstVideoEncoderPrivate *priv = encoder->priv;
2324 GstClockTime min_ts = GST_CLOCK_TIME_NONE;
2325 GstVideoCodecFrame *oframe = NULL;
2326 gboolean seen_none = FALSE;
2328 /* some maintenance regardless */
2329 for (l = priv->frames.head; l; l = l->next) {
2330 GstVideoCodecFrame *tmp = l->data;
2332 if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
2337 if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
2338 min_ts = tmp->abidata.ABI.ts;
2342 /* save a ts if needed */
2343 if (oframe && oframe != frame) {
2344 oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
2347 /* and set if needed */
2348 if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
2349 frame->dts = min_ts;
2350 GST_DEBUG_OBJECT (encoder,
2351 "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
2352 GST_TIME_ARGS (frame->pts));
2357 gst_video_encoder_send_header_unlocked (GstVideoEncoder * encoder,
2358 gboolean * discont, gboolean key_unit)
2360 GstVideoEncoderPrivate *priv = encoder->priv;
2362 if (G_UNLIKELY (priv->new_headers)) {
2365 GST_DEBUG_OBJECT (encoder, "Sending headers");
2367 /* First make all buffers metadata-writable */
2368 for (tmp = priv->headers; tmp; tmp = tmp->next) {
2369 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2371 tmp->data = tmpbuf = gst_buffer_make_writable (tmpbuf);
2373 GST_OBJECT_LOCK (encoder);
2374 priv->bytes += gst_buffer_get_size (tmpbuf);
2375 GST_OBJECT_UNLOCK (encoder);
2377 if (G_UNLIKELY (key_unit)) {
2379 GST_BUFFER_FLAG_UNSET (tmpbuf, GST_BUFFER_FLAG_DELTA_UNIT);
2381 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DELTA_UNIT);
2384 if (G_UNLIKELY (*discont)) {
2385 GST_LOG_OBJECT (encoder, "marking discont");
2386 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2389 GST_BUFFER_FLAG_UNSET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2392 gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
2394 priv->new_headers = FALSE;
2399 gst_video_encoder_transform_meta_unlocked (GstVideoEncoder * encoder,
2400 GstVideoCodecFrame * frame)
2402 GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2404 if (encoder_class->transform_meta) {
2405 if (G_LIKELY (frame->input_buffer)) {
2408 data.encoder = encoder;
2410 gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
2412 GST_FIXME_OBJECT (encoder,
2413 "Can't copy metadata because input frame disappeared");
2419 gst_video_encoder_send_key_unit_unlocked (GstVideoEncoder * encoder,
2420 GstVideoCodecFrame * frame, gboolean * send_headers)
2422 GstVideoEncoderPrivate *priv = encoder->priv;
2423 GstClockTime stream_time, running_time;
2426 GQueue matching_fevt = G_QUEUE_INIT;
2427 ForcedKeyUnitEvent *fevt;
2430 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2433 GST_OBJECT_LOCK (encoder);
2434 for (l = priv->force_key_unit.head; l;) {
2437 /* Skip non-pending keyunits */
2438 if (!fevt->pending) {
2443 /* Exact match using the frame id */
2444 if (frame->system_frame_number == fevt->frame_id) {
2445 GList *next = l->next;
2446 g_queue_push_tail (&matching_fevt, fevt);
2447 g_queue_delete_link (&priv->force_key_unit, l);
2452 /* Simple case, keyunit ASAP */
2453 if (fevt->running_time == GST_CLOCK_TIME_NONE) {
2454 GList *next = l->next;
2455 g_queue_push_tail (&matching_fevt, fevt);
2456 g_queue_delete_link (&priv->force_key_unit, l);
2461 /* Event for before this frame */
2462 if (fevt->running_time <= running_time) {
2463 GList *next = l->next;
2464 g_queue_push_tail (&matching_fevt, fevt);
2465 g_queue_delete_link (&priv->force_key_unit, l);
2470 /* Otherwise all following events are in the future */
2474 GST_OBJECT_UNLOCK (encoder);
2476 while ((fevt = g_queue_pop_head (&matching_fevt))) {
2478 gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
2481 ev = gst_video_event_new_downstream_force_key_unit
2482 (frame->pts, stream_time, running_time, fevt->all_headers, fevt->count);
2484 gst_video_encoder_push_event (encoder, ev);
2486 if (fevt->all_headers)
2487 *send_headers = TRUE;
2489 GST_DEBUG_OBJECT (encoder,
2490 "Forced key unit: running-time %" GST_TIME_FORMAT
2491 ", all_headers %d, count %u",
2492 GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
2493 forced_key_unit_event_free (fevt);
2498 * gst_video_encoder_finish_frame:
2499 * @encoder: a #GstVideoEncoder
2500 * @frame: (transfer full): an encoded #GstVideoCodecFrame
2502 * @frame must have a valid encoded data buffer, whose metadata fields
2503 * are then appropriately set according to frame data or no buffer at
2504 * all if the frame should be dropped.
2505 * It is subsequently pushed downstream or provided to @pre_push.
2506 * In any case, the frame is considered finished and released.
2508 * After calling this function the output buffer of the frame is to be
2509 * considered read-only. This function will also change the metadata
2512 * Returns: a #GstFlowReturn resulting from sending data downstream
2515 gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
2516 GstVideoCodecFrame * frame)
2518 GstVideoEncoderPrivate *priv = encoder->priv;
2519 GstFlowReturn ret = GST_FLOW_OK;
2520 GstVideoEncoderClass *encoder_class;
2521 gboolean send_headers = FALSE;
2522 gboolean key_unit = FALSE;
2523 gboolean discont = FALSE;
2526 g_return_val_if_fail (frame, GST_FLOW_ERROR);
2528 discont = (frame->presentation_frame_number == 0
2529 && frame->abidata.ABI.num_subframes == 0);
2531 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2533 GST_LOG_OBJECT (encoder,
2534 "finish frame fpn %d sync point: %d", frame->presentation_frame_number,
2535 GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
2537 GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
2538 ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
2539 GST_TIME_ARGS (frame->dts));
2541 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2543 ret = gst_video_encoder_can_push_unlocked (encoder);
2544 if (ret != GST_FLOW_OK)
2547 if (frame->abidata.ABI.num_subframes == 0)
2548 gst_video_encoder_push_pending_unlocked (encoder, frame);
2550 /* no buffer data means this frame is skipped/dropped */
2551 if (!frame->output_buffer) {
2552 gst_video_encoder_drop_frame (encoder, frame);
2558 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit.head)
2559 gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
2561 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
2562 && frame->abidata.ABI.num_subframes == 0) {
2563 priv->distance_from_sync = 0;
2565 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2566 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2567 frame->dts = frame->pts;
2569 priv->last_key_unit =
2570 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2574 gst_video_encoder_infer_dts_unlocked (encoder, frame);
2576 frame->distance_from_sync = priv->distance_from_sync;
2577 priv->distance_from_sync++;
2579 /* We need a writable buffer for the metadata changes below */
2580 frame->output_buffer = gst_buffer_make_writable (frame->output_buffer);
2582 GST_BUFFER_PTS (frame->output_buffer) = frame->pts;
2583 GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
2584 GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
2586 /* At this stage we have a full frame in subframe use case ,
2587 * let's mark it to enabled some latency optimization
2588 * in some uses cases like RTP. */
2590 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_MARKER);
2592 GST_OBJECT_LOCK (encoder);
2593 /* update rate estimate */
2594 priv->bytes += gst_buffer_get_size (frame->output_buffer);
2595 if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
2596 priv->time += frame->duration;
2598 /* better none than nothing valid */
2599 priv->time = GST_CLOCK_TIME_NONE;
2601 GST_OBJECT_UNLOCK (encoder);
2603 if (G_UNLIKELY (send_headers))
2604 priv->new_headers = TRUE;
2606 gst_video_encoder_send_header_unlocked (encoder, &discont, key_unit);
2609 GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2611 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2614 if (G_UNLIKELY (discont)) {
2615 GST_LOG_OBJECT (encoder, "marking discont");
2616 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
2619 if (encoder_class->pre_push)
2620 ret = encoder_class->pre_push (encoder, frame);
2622 gst_video_encoder_transform_meta_unlocked (encoder, frame);
2624 /* Get an additional ref to the buffer, which is going to be pushed
2625 * downstream, the original ref is owned by the frame */
2626 if (ret == GST_FLOW_OK)
2627 buffer = gst_buffer_ref (frame->output_buffer);
2629 /* Release frame so the buffer is writable when we push it downstream
2630 * if possible, i.e. if the subclass does not hold additional references
2633 gst_video_encoder_release_frame (encoder, frame);
2636 if (ret == GST_FLOW_OK) {
2637 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2638 ret = gst_pad_push (encoder->srcpad, buffer);
2639 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2645 gst_video_encoder_release_frame (encoder, frame);
2647 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2653 * gst_video_encoder_finish_subframe:
2654 * @encoder: a #GstVideoEncoder
2655 * @frame: (transfer none): a #GstVideoCodecFrame being encoded
2657 * If multiple subframes are produced for one input frame then use this method
2658 * for each subframe, except for the last one. Before calling this function,
2659 * you need to fill frame->output_buffer with the encoded buffer to push.
2661 * You must call #gst_video_encoder_finish_frame() for the last sub-frame
2662 * to tell the encoder that the frame has been fully encoded.
2664 * This function will change the metadata of @frame and frame->output_buffer
2665 * will be pushed downstream.
2667 * Returns: a #GstFlowReturn resulting from pushing the buffer downstream.
2672 gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
2673 GstVideoCodecFrame * frame)
2675 GstVideoEncoderPrivate *priv = encoder->priv;
2676 GstVideoEncoderClass *encoder_class;
2677 GstFlowReturn ret = GST_FLOW_OK;
2678 GstBuffer *subframe_buffer = NULL;
2679 gboolean discont = FALSE;
2680 gboolean send_headers = FALSE;
2681 gboolean key_unit = FALSE;
2683 g_return_val_if_fail (frame, GST_FLOW_ERROR);
2684 g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
2686 subframe_buffer = frame->output_buffer;
2688 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2689 discont = (frame->presentation_frame_number == 0
2690 && frame->abidata.ABI.num_subframes == 0);
2692 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2694 GST_LOG_OBJECT (encoder,
2695 "finish subframe %u of frame fpn %u PTS %" GST_TIME_FORMAT ", DTS %"
2696 GST_TIME_FORMAT " sync point: %d", frame->abidata.ABI.num_subframes,
2697 frame->presentation_frame_number, GST_TIME_ARGS (frame->pts),
2698 GST_TIME_ARGS (frame->dts), GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
2700 ret = gst_video_encoder_can_push_unlocked (encoder);
2701 if (ret != GST_FLOW_OK)
2704 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit.head)
2705 gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
2707 /* Push pending events only for the first subframe ie segment event.
2708 * Push new incoming events on finish_frame otherwise.
2710 if (frame->abidata.ABI.num_subframes == 0)
2711 gst_video_encoder_push_pending_unlocked (encoder, frame);
2713 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
2714 && frame->abidata.ABI.num_subframes == 0) {
2715 priv->distance_from_sync = 0;
2717 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2718 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2719 frame->dts = frame->pts;
2721 priv->last_key_unit =
2722 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2726 gst_video_encoder_infer_dts_unlocked (encoder, frame);
2728 /* We need a writable buffer for the metadata changes below */
2729 subframe_buffer = gst_buffer_make_writable (subframe_buffer);
2731 GST_BUFFER_PTS (subframe_buffer) = frame->pts;
2732 GST_BUFFER_DTS (subframe_buffer) = frame->dts;
2733 GST_BUFFER_DURATION (subframe_buffer) = frame->duration;
2735 GST_OBJECT_LOCK (encoder);
2736 /* update rate estimate */
2737 priv->bytes += gst_buffer_get_size (subframe_buffer);
2738 GST_OBJECT_UNLOCK (encoder);
2740 if (G_UNLIKELY (send_headers))
2741 priv->new_headers = TRUE;
2743 gst_video_encoder_send_header_unlocked (encoder, &discont, key_unit);
2746 GST_BUFFER_FLAG_UNSET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2748 GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2751 if (G_UNLIKELY (discont)) {
2752 GST_LOG_OBJECT (encoder, "marking discont buffer: %" GST_PTR_FORMAT,
2754 GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DISCONT);
2757 if (encoder_class->pre_push) {
2758 ret = encoder_class->pre_push (encoder, frame);
2761 gst_video_encoder_transform_meta_unlocked (encoder, frame);
2763 if (ret == GST_FLOW_OK) {
2764 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2765 ret = gst_pad_push (encoder->srcpad, subframe_buffer);
2766 subframe_buffer = NULL;
2767 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2771 frame->abidata.ABI.num_subframes++;
2772 if (subframe_buffer)
2773 gst_buffer_unref (subframe_buffer);
2774 frame->output_buffer = NULL;
2776 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2782 * gst_video_encoder_get_output_state:
2783 * @encoder: a #GstVideoEncoder
2785 * Get the current #GstVideoCodecState
2787 * Returns: (transfer full): #GstVideoCodecState describing format of video data.
2789 GstVideoCodecState *
2790 gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
2792 GstVideoCodecState *state;
2794 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2795 state = gst_video_codec_state_ref (encoder->priv->output_state);
2796 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2802 * gst_video_encoder_set_output_state:
2803 * @encoder: a #GstVideoEncoder
2804 * @caps: (transfer full): the #GstCaps to use for the output
2805 * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
2807 * Creates a new #GstVideoCodecState with the specified caps as the output state
2809 * Any previously set output state on @encoder will be replaced by the newly
2812 * The specified @caps should not contain any resolution, pixel-aspect-ratio,
2813 * framerate, codec-data, .... Those should be specified instead in the returned
2814 * #GstVideoCodecState.
2816 * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
2817 * or framerate) from an existing #GstVideoCodecState, it can be provided as a
2820 * If the subclass wishes to override some fields from the output state (like
2821 * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
2823 * The new output state will only take effect (set on pads and buffers) starting
2824 * from the next call to #gst_video_encoder_finish_frame().
2826 * Returns: (transfer full): the newly configured output state.
2828 GstVideoCodecState *
2829 gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
2830 GstVideoCodecState * reference)
2832 GstVideoEncoderPrivate *priv = encoder->priv;
2833 GstVideoCodecState *state;
2835 g_return_val_if_fail (caps != NULL, NULL);
2837 state = _new_output_state (caps, reference);
2841 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2842 if (priv->output_state)
2843 gst_video_codec_state_unref (priv->output_state);
2844 priv->output_state = gst_video_codec_state_ref (state);
2846 if (priv->output_state != NULL && priv->output_state->info.fps_n > 0) {
2847 priv->qos_frame_duration =
2848 gst_util_uint64_scale (GST_SECOND, priv->output_state->info.fps_d,
2849 priv->output_state->info.fps_n);
2851 priv->qos_frame_duration = 0;
2854 priv->output_state_changed = TRUE;
2855 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2861 * gst_video_encoder_set_latency:
2862 * @encoder: a #GstVideoEncoder
2863 * @min_latency: minimum latency
2864 * @max_latency: maximum latency
2866 * Informs baseclass of encoding latency.
2869 gst_video_encoder_set_latency (GstVideoEncoder * encoder,
2870 GstClockTime min_latency, GstClockTime max_latency)
2872 g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
2873 g_return_if_fail (max_latency >= min_latency);
2875 GST_OBJECT_LOCK (encoder);
2876 encoder->priv->min_latency = min_latency;
2877 encoder->priv->max_latency = max_latency;
2878 GST_OBJECT_UNLOCK (encoder);
2880 gst_element_post_message (GST_ELEMENT_CAST (encoder),
2881 gst_message_new_latency (GST_OBJECT_CAST (encoder)));
2885 * gst_video_encoder_get_latency:
2886 * @encoder: a #GstVideoEncoder
2887 * @min_latency: (out) (allow-none): address of variable in which to store the
2888 * configured minimum latency, or %NULL
2889 * @max_latency: (out) (allow-none): address of variable in which to store the
2890 * configured maximum latency, or %NULL
2892 * Query the configured encoding latency. Results will be returned via
2893 * @min_latency and @max_latency.
2896 gst_video_encoder_get_latency (GstVideoEncoder * encoder,
2897 GstClockTime * min_latency, GstClockTime * max_latency)
2899 GST_OBJECT_LOCK (encoder);
2901 *min_latency = encoder->priv->min_latency;
2903 *max_latency = encoder->priv->max_latency;
2904 GST_OBJECT_UNLOCK (encoder);
2908 * gst_video_encoder_get_oldest_frame:
2909 * @encoder: a #GstVideoEncoder
2911 * Get the oldest unfinished pending #GstVideoCodecFrame
2913 * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
2915 GstVideoCodecFrame *
2916 gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
2918 GstVideoCodecFrame *frame = NULL;
2920 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2921 if (encoder->priv->frames.head)
2922 frame = gst_video_codec_frame_ref (encoder->priv->frames.head->data);
2923 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2925 return (GstVideoCodecFrame *) frame;
2929 * gst_video_encoder_get_frame:
2930 * @encoder: a #GstVideoEncoder
2931 * @frame_number: system_frame_number of a frame
2933 * Get a pending unfinished #GstVideoCodecFrame
2935 * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
2937 GstVideoCodecFrame *
2938 gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
2941 GstVideoCodecFrame *frame = NULL;
2943 GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
2945 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2946 for (g = encoder->priv->frames.head; g; g = g->next) {
2947 GstVideoCodecFrame *tmp = g->data;
2949 if (tmp->system_frame_number == frame_number) {
2950 frame = gst_video_codec_frame_ref (tmp);
2954 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2960 * gst_video_encoder_get_frames:
2961 * @encoder: a #GstVideoEncoder
2963 * Get all pending unfinished #GstVideoCodecFrame
2965 * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
2968 gst_video_encoder_get_frames (GstVideoEncoder * encoder)
2972 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2974 g_list_copy_deep (encoder->priv->frames.head,
2975 (GCopyFunc) gst_video_codec_frame_ref, NULL);
2976 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2982 * gst_video_encoder_merge_tags:
2983 * @encoder: a #GstVideoEncoder
2984 * @tags: (allow-none): a #GstTagList to merge, or NULL to unset
2985 * previously-set tags
2986 * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
2988 * Sets the video encoder tags and how they should be merged with any
2989 * upstream stream tags. This will override any tags previously-set
2990 * with gst_video_encoder_merge_tags().
2992 * Note that this is provided for convenience, and the subclass is
2993 * not required to use this and can still do tag handling on its own.
2998 gst_video_encoder_merge_tags (GstVideoEncoder * encoder,
2999 const GstTagList * tags, GstTagMergeMode mode)
3001 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3002 g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
3003 g_return_if_fail (tags == NULL || mode != GST_TAG_MERGE_UNDEFINED);
3005 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
3006 if (encoder->priv->tags != tags) {
3007 if (encoder->priv->tags) {
3008 gst_tag_list_unref (encoder->priv->tags);
3009 encoder->priv->tags = NULL;
3010 encoder->priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
3013 encoder->priv->tags = gst_tag_list_ref ((GstTagList *) tags);
3014 encoder->priv->tags_merge_mode = mode;
3017 GST_DEBUG_OBJECT (encoder, "setting encoder tags to %" GST_PTR_FORMAT,
3019 encoder->priv->tags_changed = TRUE;
3021 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
3025 * gst_video_encoder_get_allocator:
3026 * @encoder: a #GstVideoEncoder
3027 * @allocator: (out) (allow-none) (transfer full): the #GstAllocator
3029 * @params: (out) (allow-none) (transfer full): the
3030 * #GstAllocationParams of @allocator
3032 * Lets #GstVideoEncoder sub-classes to know the memory @allocator
3033 * used by the base class and its @params.
3035 * Unref the @allocator after use it.
3038 gst_video_encoder_get_allocator (GstVideoEncoder * encoder,
3039 GstAllocator ** allocator, GstAllocationParams * params)
3041 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3044 *allocator = encoder->priv->allocator ?
3045 gst_object_ref (encoder->priv->allocator) : NULL;
3048 *params = encoder->priv->params;
3052 * gst_video_encoder_set_min_pts:
3053 * @encoder: a #GstVideoEncoder
3054 * @min_pts: minimal PTS that will be passed to handle_frame
3056 * Request minimal value for PTS passed to handle_frame.
3058 * For streams with reordered frames this can be used to ensure that there
3059 * is enough time to accommodate first DTS, which may be less than first PTS
3064 gst_video_encoder_set_min_pts (GstVideoEncoder * encoder, GstClockTime min_pts)
3066 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3067 encoder->priv->min_pts = min_pts;
3068 encoder->priv->time_adjustment = GST_CLOCK_TIME_NONE;
3072 * gst_video_encoder_get_max_encode_time:
3073 * @encoder: a #GstVideoEncoder
3074 * @frame: a #GstVideoCodecFrame
3076 * Determines maximum possible encoding time for @frame that will
3077 * allow it to encode and arrive in time (as determined by QoS events).
3078 * In particular, a negative result means encoding in time is no longer possible
3079 * and should therefore occur as soon/skippy as possible.
3081 * If no QoS events have been received from downstream, or if
3082 * #GstVideoEncoder:qos is disabled this function returns #G_MAXINT64.
3084 * Returns: max decoding time.
3088 gst_video_encoder_get_max_encode_time (GstVideoEncoder *
3089 encoder, GstVideoCodecFrame * frame)
3091 GstClockTimeDiff deadline;
3092 GstClockTime earliest_time;
3094 if (!g_atomic_int_get (&encoder->priv->qos_enabled))
3097 GST_OBJECT_LOCK (encoder);
3098 earliest_time = encoder->priv->earliest_time;
3099 if (GST_CLOCK_TIME_IS_VALID (earliest_time)
3100 && GST_CLOCK_TIME_IS_VALID (frame->deadline))
3101 deadline = GST_CLOCK_DIFF (earliest_time, frame->deadline);
3103 deadline = G_MAXINT64;
3105 GST_LOG_OBJECT (encoder, "earliest %" GST_TIME_FORMAT
3106 ", frame deadline %" GST_TIME_FORMAT ", deadline %" GST_STIME_FORMAT,
3107 GST_TIME_ARGS (earliest_time), GST_TIME_ARGS (frame->deadline),
3108 GST_STIME_ARGS (deadline));
3110 GST_OBJECT_UNLOCK (encoder);
3116 * gst_video_encoder_set_qos_enabled:
3117 * @encoder: the encoder
3118 * @enabled: the new qos value.
3120 * Configures @encoder to handle Quality-of-Service events from downstream.
3124 gst_video_encoder_set_qos_enabled (GstVideoEncoder * encoder, gboolean enabled)
3126 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3128 g_atomic_int_set (&encoder->priv->qos_enabled, enabled);
3132 * gst_video_encoder_is_qos_enabled:
3133 * @encoder: the encoder
3135 * Checks if @encoder is currently configured to handle Quality-of-Service
3136 * events from downstream.
3138 * Returns: %TRUE if the encoder is configured to perform Quality-of-Service.
3142 gst_video_encoder_is_qos_enabled (GstVideoEncoder * encoder)
3146 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
3148 res = g_atomic_int_get (&encoder->priv->qos_enabled);
3154 * gst_video_encoder_set_min_force_key_unit_interval:
3155 * @encoder: the encoder
3156 * @interval: minimum interval
3158 * Sets the minimum interval for requesting keyframes based on force-keyunit
3159 * events. Setting this to 0 will allow to handle every event, setting this to
3160 * %GST_CLOCK_TIME_NONE causes force-keyunit events to be ignored.
3165 gst_video_encoder_set_min_force_key_unit_interval (GstVideoEncoder * encoder,
3166 GstClockTime interval)
3168 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3170 GST_OBJECT_LOCK (encoder);
3171 encoder->priv->min_force_key_unit_interval = interval;
3172 GST_OBJECT_UNLOCK (encoder);
3176 * gst_video_encoder_get_min_force_key_unit_interval:
3177 * @encoder: the encoder
3179 * Returns the minimum force-keyunit interval, see gst_video_encoder_set_min_force_key_unit_interval()
3182 * Returns: the minimum force-keyunit interval
3187 gst_video_encoder_get_min_force_key_unit_interval (GstVideoEncoder * encoder)
3189 GstClockTime interval;
3191 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), GST_CLOCK_TIME_NONE);
3193 GST_OBJECT_LOCK (encoder);
3194 interval = encoder->priv->min_force_key_unit_interval;
3195 GST_OBJECT_UNLOCK (encoder);