2 * Copyright (C) 2008 David Schleef <ds@schleef.org>
3 * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4 * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5 * Contact: Stefan Kost <stefan.kost@nokia.com>
6 * Copyright (C) 2012 Collabora Ltd.
7 * Author : Edward Hervey <edward@collabora.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
26 * SECTION:gstvideoencoder
27 * @title: GstVideoEncoder
28 * @short_description: Base class for video encoders
30 * This base class is for video encoders turning raw video into
33 * GstVideoEncoder and subclass should cooperate as follows.
37 * * Initially, GstVideoEncoder calls @start when the encoder element
38 * is activated, which allows subclass to perform any global setup.
39 * * GstVideoEncoder calls @set_format to inform subclass of the format
40 * of input video data that it is about to receive. Subclass should
41 * setup for encoding and configure base class as appropriate
42 * (e.g. latency). While unlikely, it might be called more than once,
43 * if changing input parameters require reconfiguration. Baseclass
44 * will ensure that processing of current configuration is finished.
45 * * GstVideoEncoder calls @stop at end of all processing.
49 * * Base class collects input data and metadata into a frame and hands
50 * this to subclass' @handle_frame.
52 * * If codec processing results in encoded data, subclass should call
53 * @gst_video_encoder_finish_frame to have encoded data pushed
56 * * If implemented, baseclass calls subclass @pre_push just prior to
57 * pushing to allow subclasses to modify some metadata on the buffer.
58 * If it returns GST_FLOW_OK, the buffer is pushed downstream.
60 * * GstVideoEncoderClass will handle both srcpad and sinkpad events.
61 * Sink events will be passed to subclass if @event callback has been
66 * * GstVideoEncoder class calls @stop to inform the subclass that data
67 * parsing will be stopped.
69 * Subclass is responsible for providing pad template caps for
70 * source and sink pads. The pads need to be named "sink" and "src". It should
71 * also be able to provide fixed src pad caps in @getcaps by the time it calls
72 * @gst_video_encoder_finish_frame.
74 * Things that subclass need to take care of:
76 * * Provide pad templates
77 * * Provide source pad caps before pushing the first buffer
78 * * Accept data in @handle_frame and provide encoded results to
79 * @gst_video_encoder_finish_frame.
82 * The #GstVideoEncoder:qos property will enable the Quality-of-Service
83 * features of the encoder which gather statistics about the real-time
84 * performance of the downstream elements. If enabled, subclasses can
85 * use gst_video_encoder_get_max_encode_time() to check if input frames
86 * are already late and drop them right away to give a chance to the
87 * pipeline to catch up.
96 * * Calculate actual latency based on input/output timestamp/frame_number
97 * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
100 #include <gst/video/video.h>
101 #include "gstvideoencoder.h"
102 #include "gstvideoutils.h"
103 #include "gstvideoutilsprivate.h"
105 #include <gst/video/gstvideometa.h>
106 #include <gst/video/gstvideopool.h>
110 GST_DEBUG_CATEGORY (videoencoder_debug);
111 #define GST_CAT_DEFAULT videoencoder_debug
115 #define DEFAULT_QOS FALSE
116 #define DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL 0
122 PROP_MIN_FORCE_KEY_UNIT_INTERVAL,
126 struct _GstVideoEncoderPrivate
128 guint64 presentation_frame_number;
129 int distance_from_sync;
131 /* FIXME : (and introduce a context ?) */
137 /* FIXME 2.0: Use a GQueue or similar, see GstVideoCodecFrame::events */
138 GList *current_frame_events;
141 gboolean new_headers; /* Whether new headers were just set */
143 GQueue force_key_unit; /* List of pending forced keyunits */
144 GstClockTime min_force_key_unit_interval;
145 GstClockTime last_force_key_unit_request;
146 GstClockTime last_key_unit;
148 guint32 system_frame_number;
150 GQueue frames; /* Protected with OBJECT_LOCK */
151 GstVideoCodecState *input_state;
152 GstVideoCodecState *output_state;
153 gboolean output_state_changed;
158 GstAllocator *allocator;
159 GstAllocationParams params;
161 /* upstream stream tags (global tags are passed through as-is) */
162 GstTagList *upstream_tags;
166 GstTagMergeMode tags_merge_mode;
168 gboolean tags_changed;
170 GstClockTime min_pts;
171 /* adjustment needed on pts, dts, segment start and stop to accommodate
173 GstClockTime time_adjustment;
176 gint qos_enabled; /* ATOMIC */
177 gdouble proportion; /* OBJECT_LOCK */
178 GstClockTime earliest_time; /* OBJECT_LOCK */
179 GstClockTime qos_frame_duration; /* OBJECT_LOCK */
180 /* qos messages: frames dropped/processed */
185 typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
186 struct _ForcedKeyUnitEvent
188 GstClockTime running_time;
189 gboolean pending; /* TRUE if this was requested already */
190 gboolean all_headers;
196 forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
198 g_slice_free (ForcedKeyUnitEvent, evt);
201 static ForcedKeyUnitEvent *
202 forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
205 ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
207 evt->running_time = running_time;
208 evt->all_headers = all_headers;
215 forced_key_unit_event_compare (const ForcedKeyUnitEvent * a,
216 const ForcedKeyUnitEvent * b, gpointer user_data)
218 if (a->running_time == b->running_time) {
219 /* Sort pending ones before non-pending ones */
220 if (a->pending && !b->pending)
222 if (!a->pending && b->pending)
227 if (a->running_time == GST_CLOCK_TIME_NONE)
229 if (b->running_time == GST_CLOCK_TIME_NONE)
231 if (a->running_time < b->running_time)
236 static GstElementClass *parent_class = NULL;
237 static gint private_offset = 0;
239 /* cached quark to avoid contention on the global quark table lock */
240 #define META_TAG_VIDEO meta_tag_video_quark
241 static GQuark meta_tag_video_quark;
243 static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
244 static void gst_video_encoder_init (GstVideoEncoder * enc,
245 GstVideoEncoderClass * klass);
247 static void gst_video_encoder_finalize (GObject * object);
249 static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
251 static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
253 static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
255 static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
257 static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
259 static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
260 element, GstStateChange transition);
261 static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
263 static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
265 static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
266 encoder, GstBuffer * buf, GstClockTime pts, GstClockTime dts,
267 GstClockTime duration);
269 static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
271 static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
273 static gboolean gst_video_encoder_decide_allocation_default (GstVideoEncoder *
274 encoder, GstQuery * query);
275 static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
276 encoder, GstQuery * query);
277 static gboolean gst_video_encoder_negotiate_default (GstVideoEncoder * encoder);
278 static gboolean gst_video_encoder_negotiate_unlocked (GstVideoEncoder *
281 static gboolean gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
283 static gboolean gst_video_encoder_src_query_default (GstVideoEncoder * encoder,
286 static gboolean gst_video_encoder_transform_meta_default (GstVideoEncoder *
287 encoder, GstVideoCodecFrame * frame, GstMeta * meta);
289 /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
290 * method to get to the padtemplates */
292 gst_video_encoder_get_type (void)
294 static gsize type = 0;
296 if (g_once_init_enter (&type)) {
298 static const GTypeInfo info = {
299 sizeof (GstVideoEncoderClass),
302 (GClassInitFunc) gst_video_encoder_class_init,
305 sizeof (GstVideoEncoder),
307 (GInstanceInitFunc) gst_video_encoder_init,
309 const GInterfaceInfo preset_interface_info = {
310 NULL, /* interface_init */
311 NULL, /* interface_finalize */
312 NULL /* interface_data */
315 _type = g_type_register_static (GST_TYPE_ELEMENT,
316 "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
318 g_type_add_instance_private (_type, sizeof (GstVideoEncoderPrivate));
319 g_type_add_interface_static (_type, GST_TYPE_PRESET,
320 &preset_interface_info);
321 g_once_init_leave (&type, _type);
326 static inline GstVideoEncoderPrivate *
327 gst_video_encoder_get_instance_private (GstVideoEncoder * self)
329 return (G_STRUCT_MEMBER_P (self, private_offset));
333 gst_video_encoder_set_property (GObject * object, guint prop_id,
334 const GValue * value, GParamSpec * pspec)
336 GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
340 gst_video_encoder_set_qos_enabled (sink, g_value_get_boolean (value));
342 case PROP_MIN_FORCE_KEY_UNIT_INTERVAL:
343 gst_video_encoder_set_min_force_key_unit_interval (sink,
344 g_value_get_uint64 (value));
347 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
353 gst_video_encoder_get_property (GObject * object, guint prop_id, GValue * value,
356 GstVideoEncoder *sink = GST_VIDEO_ENCODER (object);
360 g_value_set_boolean (value, gst_video_encoder_is_qos_enabled (sink));
362 case PROP_MIN_FORCE_KEY_UNIT_INTERVAL:
363 g_value_set_uint64 (value,
364 gst_video_encoder_get_min_force_key_unit_interval (sink));
367 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
373 gst_video_encoder_class_init (GstVideoEncoderClass * klass)
375 GObjectClass *gobject_class;
376 GstElementClass *gstelement_class;
378 gobject_class = G_OBJECT_CLASS (klass);
379 gstelement_class = GST_ELEMENT_CLASS (klass);
381 GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
382 "Base Video Encoder");
384 parent_class = g_type_class_peek_parent (klass);
386 if (private_offset != 0)
387 g_type_class_adjust_private_offset (klass, &private_offset);
389 gobject_class->set_property = gst_video_encoder_set_property;
390 gobject_class->get_property = gst_video_encoder_get_property;
391 gobject_class->finalize = gst_video_encoder_finalize;
393 gstelement_class->change_state =
394 GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
396 klass->sink_event = gst_video_encoder_sink_event_default;
397 klass->src_event = gst_video_encoder_src_event_default;
398 klass->propose_allocation = gst_video_encoder_propose_allocation_default;
399 klass->decide_allocation = gst_video_encoder_decide_allocation_default;
400 klass->negotiate = gst_video_encoder_negotiate_default;
401 klass->sink_query = gst_video_encoder_sink_query_default;
402 klass->src_query = gst_video_encoder_src_query_default;
403 klass->transform_meta = gst_video_encoder_transform_meta_default;
405 g_object_class_install_property (gobject_class, PROP_QOS,
406 g_param_spec_boolean ("qos", "Qos",
407 "Handle Quality-of-Service events from downstream", DEFAULT_QOS,
408 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
411 * GstVideoEncoder:min-force-key-unit-interval:
413 * Minimum interval between force-keyunit requests in nanoseconds. See
414 * gst_video_encoder_set_min_force_key_unit_interval() for more details.
418 g_object_class_install_property (gobject_class,
419 PROP_MIN_FORCE_KEY_UNIT_INTERVAL,
420 g_param_spec_uint64 ("min-force-key-unit-interval",
421 "Minimum Force Keyunit Interval",
422 "Minimum interval between force-keyunit requests in nanoseconds", 0,
423 G_MAXUINT64, DEFAULT_MIN_FORCE_KEY_UNIT_INTERVAL,
424 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
426 meta_tag_video_quark = g_quark_from_static_string (GST_META_TAG_VIDEO_STR);
430 _flush_events (GstPad * pad, GList * events)
434 for (tmp = events; tmp; tmp = tmp->next) {
435 if (GST_EVENT_TYPE (tmp->data) != GST_EVENT_EOS &&
436 GST_EVENT_TYPE (tmp->data) != GST_EVENT_SEGMENT &&
437 GST_EVENT_IS_STICKY (tmp->data)) {
438 gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data));
440 gst_event_unref (tmp->data);
442 g_list_free (events);
448 gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard)
450 GstVideoEncoderPrivate *priv = encoder->priv;
453 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
455 priv->presentation_frame_number = 0;
456 priv->distance_from_sync = 0;
458 g_queue_clear_full (&priv->force_key_unit,
459 (GDestroyNotify) forced_key_unit_event_free);
460 priv->last_force_key_unit_request = GST_CLOCK_TIME_NONE;
461 priv->last_key_unit = GST_CLOCK_TIME_NONE;
463 priv->drained = TRUE;
465 GST_OBJECT_LOCK (encoder);
468 GST_OBJECT_UNLOCK (encoder);
470 priv->time_adjustment = GST_CLOCK_TIME_NONE;
473 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
474 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
476 if (priv->input_state)
477 gst_video_codec_state_unref (priv->input_state);
478 priv->input_state = NULL;
479 if (priv->output_state)
480 gst_video_codec_state_unref (priv->output_state);
481 priv->output_state = NULL;
483 if (priv->upstream_tags) {
484 gst_tag_list_unref (priv->upstream_tags);
485 priv->upstream_tags = NULL;
488 gst_tag_list_unref (priv->tags);
490 priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
491 priv->tags_changed = FALSE;
493 g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
494 g_list_free (priv->headers);
495 priv->headers = NULL;
496 priv->new_headers = FALSE;
498 if (priv->allocator) {
499 gst_object_unref (priv->allocator);
500 priv->allocator = NULL;
503 g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
504 g_list_free (priv->current_frame_events);
505 priv->current_frame_events = NULL;
507 GST_OBJECT_LOCK (encoder);
508 priv->proportion = 0.5;
509 priv->earliest_time = GST_CLOCK_TIME_NONE;
510 priv->qos_frame_duration = 0;
511 GST_OBJECT_UNLOCK (encoder);
518 for (l = priv->frames.head; l; l = l->next) {
519 GstVideoCodecFrame *frame = l->data;
521 frame->events = _flush_events (encoder->srcpad, frame->events);
523 priv->current_frame_events = _flush_events (encoder->srcpad,
524 encoder->priv->current_frame_events);
527 g_queue_clear_full (&priv->frames,
528 (GDestroyNotify) gst_video_codec_frame_unref);
530 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
535 /* Always call reset() in one way or another after this */
537 gst_video_encoder_flush (GstVideoEncoder * encoder)
539 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
543 ret = klass->flush (encoder);
549 gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
551 GstVideoEncoderPrivate *priv;
552 GstPadTemplate *pad_template;
555 GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
557 priv = encoder->priv = gst_video_encoder_get_instance_private (encoder);
560 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
561 g_return_if_fail (pad_template != NULL);
563 encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
565 gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
566 gst_pad_set_event_function (pad,
567 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
568 gst_pad_set_query_function (pad,
569 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
570 gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
573 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
574 g_return_if_fail (pad_template != NULL);
576 encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
578 gst_pad_set_query_function (pad,
579 GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
580 gst_pad_set_event_function (pad,
581 GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
582 gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
584 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
585 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
587 g_rec_mutex_init (&encoder->stream_lock);
589 priv->headers = NULL;
590 priv->new_headers = FALSE;
592 g_queue_init (&priv->frames);
593 g_queue_init (&priv->force_key_unit);
595 priv->min_latency = 0;
596 priv->max_latency = 0;
597 priv->min_pts = GST_CLOCK_TIME_NONE;
598 priv->time_adjustment = GST_CLOCK_TIME_NONE;
600 gst_video_encoder_reset (encoder, TRUE);
604 * gst_video_encoder_set_headers:
605 * @encoder: a #GstVideoEncoder
606 * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
608 * Set the codec headers to be sent downstream whenever requested.
611 gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
613 GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
615 GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
616 if (video_encoder->priv->headers) {
617 g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
619 g_list_free (video_encoder->priv->headers);
621 video_encoder->priv->headers = headers;
622 video_encoder->priv->new_headers = TRUE;
624 GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
627 static GstVideoCodecState *
628 _new_output_state (GstCaps * caps, GstVideoCodecState * reference)
630 GstVideoCodecState *state;
632 state = g_slice_new0 (GstVideoCodecState);
633 state->ref_count = 1;
634 gst_video_info_init (&state->info);
636 if (!gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0)) {
637 g_slice_free (GstVideoCodecState, state);
644 GstVideoInfo *tgt, *ref;
647 ref = &reference->info;
649 /* Copy over extra fields from reference state */
650 tgt->interlace_mode = ref->interlace_mode;
651 tgt->flags = ref->flags;
652 tgt->width = ref->width;
653 tgt->height = ref->height;
654 tgt->chroma_site = ref->chroma_site;
655 tgt->colorimetry = ref->colorimetry;
656 tgt->par_n = ref->par_n;
657 tgt->par_d = ref->par_d;
658 tgt->fps_n = ref->fps_n;
659 tgt->fps_d = ref->fps_d;
661 GST_VIDEO_INFO_FIELD_ORDER (tgt) = GST_VIDEO_INFO_FIELD_ORDER (ref);
663 GST_VIDEO_INFO_MULTIVIEW_MODE (tgt) = GST_VIDEO_INFO_MULTIVIEW_MODE (ref);
664 GST_VIDEO_INFO_MULTIVIEW_FLAGS (tgt) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (ref);
666 if (reference->mastering_display_info) {
667 state->mastering_display_info = g_slice_dup (GstVideoMasteringDisplayInfo,
668 reference->mastering_display_info);
670 if (reference->content_light_level) {
671 state->content_light_level = g_slice_dup (GstVideoContentLightLevel,
672 reference->content_light_level);
679 static GstVideoCodecState *
680 _new_input_state (GstCaps * caps)
682 GstVideoCodecState *state;
683 GstStructure *c_struct;
686 state = g_slice_new0 (GstVideoCodecState);
687 state->ref_count = 1;
688 gst_video_info_init (&state->info);
689 if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
691 state->caps = gst_caps_ref (caps);
693 c_struct = gst_caps_get_structure (caps, 0);
695 if ((s = gst_structure_get_string (c_struct, "mastering-display-info"))) {
696 state->mastering_display_info = g_slice_new (GstVideoMasteringDisplayInfo);
697 gst_video_mastering_display_info_from_string (state->mastering_display_info,
700 if ((s = gst_structure_get_string (c_struct, "content-light-level"))) {
701 state->content_light_level = g_slice_new (GstVideoContentLightLevel);
702 gst_video_content_light_level_from_string (state->content_light_level, s);
709 g_slice_free (GstVideoCodecState, state);
715 gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
717 GstVideoEncoderClass *encoder_class;
718 GstVideoCodecState *state;
721 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
723 GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
725 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
727 if (encoder->priv->input_state) {
728 GST_DEBUG_OBJECT (encoder,
729 "Checking if caps changed old %" GST_PTR_FORMAT " new %" GST_PTR_FORMAT,
730 encoder->priv->input_state->caps, caps);
731 if (gst_caps_is_equal (encoder->priv->input_state->caps, caps))
732 goto caps_not_changed;
735 state = _new_input_state (caps);
736 if (G_UNLIKELY (!state))
739 if (encoder->priv->input_state
740 && gst_video_info_is_equal (&state->info,
741 &encoder->priv->input_state->info)) {
742 gst_video_codec_state_unref (state);
743 goto caps_not_changed;
746 if (encoder_class->reset) {
747 GST_FIXME_OBJECT (encoder, "GstVideoEncoder::reset() is deprecated");
748 encoder_class->reset (encoder, TRUE);
751 /* and subclass should be ready to configure format at any time around */
752 if (encoder_class->set_format != NULL)
753 ret = encoder_class->set_format (encoder, state);
756 if (encoder->priv->input_state)
757 gst_video_codec_state_unref (encoder->priv->input_state);
758 encoder->priv->input_state = state;
760 gst_video_codec_state_unref (state);
763 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
766 GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
772 GST_DEBUG_OBJECT (encoder, "Caps did not change - ignore");
773 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
780 GST_WARNING_OBJECT (encoder, "Failed to parse caps");
781 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
787 * gst_video_encoder_proxy_getcaps:
788 * @enc: a #GstVideoEncoder
789 * @caps: (allow-none): initial caps
790 * @filter: (allow-none): filter caps
792 * Returns caps that express @caps (or sink template caps if @caps == NULL)
793 * restricted to resolution/format/... combinations supported by downstream
794 * elements (e.g. muxers).
796 * Returns: (transfer full): a #GstCaps owned by caller
799 gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
802 return __gst_video_element_proxy_getcaps (GST_ELEMENT_CAST (encoder),
803 GST_VIDEO_ENCODER_SINK_PAD (encoder),
804 GST_VIDEO_ENCODER_SRC_PAD (encoder), caps, filter);
808 gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
810 GstVideoEncoderClass *klass;
813 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
816 caps = klass->getcaps (encoder, filter);
818 caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
820 GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
826 gst_video_encoder_decide_allocation_default (GstVideoEncoder * encoder,
829 GstAllocator *allocator = NULL;
830 GstAllocationParams params;
831 gboolean update_allocator;
833 /* we got configuration from our peer or the decide_allocation method,
835 if (gst_query_get_n_allocation_params (query) > 0) {
836 /* try the allocator */
837 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
838 update_allocator = TRUE;
841 gst_allocation_params_init (¶ms);
842 update_allocator = FALSE;
845 if (update_allocator)
846 gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
848 gst_query_add_allocation_param (query, allocator, ¶ms);
850 gst_object_unref (allocator);
856 gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
864 gst_query_parse_allocation (query, &caps, NULL);
869 if (!gst_video_info_from_caps (&info, caps))
872 size = GST_VIDEO_INFO_SIZE (&info);
874 if (gst_query_get_n_allocation_pools (query) == 0) {
875 GstStructure *structure;
876 GstAllocator *allocator = NULL;
877 GstAllocationParams params = { 0, 15, 0, 0 };
879 if (gst_query_get_n_allocation_params (query) > 0)
880 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
882 gst_query_add_allocation_param (query, allocator, ¶ms);
884 pool = gst_video_buffer_pool_new ();
886 structure = gst_buffer_pool_get_config (pool);
887 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
888 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
891 gst_object_unref (allocator);
893 if (!gst_buffer_pool_set_config (pool, structure))
896 gst_query_add_allocation_pool (query, pool, size, 0, 0);
897 gst_object_unref (pool);
898 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
906 GST_ERROR_OBJECT (encoder, "failed to set config");
907 gst_object_unref (pool);
913 gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
916 GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
917 gboolean res = FALSE;
919 switch (GST_QUERY_TYPE (query)) {
922 GstCaps *filter, *caps;
924 gst_query_parse_caps (query, &filter);
925 caps = gst_video_encoder_sink_getcaps (encoder, filter);
926 gst_query_set_caps_result (query, caps);
927 gst_caps_unref (caps);
931 case GST_QUERY_CONVERT:
933 GstFormat src_fmt, dest_fmt;
934 gint64 src_val, dest_val;
936 GST_DEBUG_OBJECT (encoder, "convert query");
938 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
939 GST_OBJECT_LOCK (encoder);
940 if (encoder->priv->input_state != NULL)
941 res = __gst_video_rawvideo_convert (encoder->priv->input_state,
942 src_fmt, src_val, &dest_fmt, &dest_val);
945 GST_OBJECT_UNLOCK (encoder);
948 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
951 case GST_QUERY_ALLOCATION:
953 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
955 if (klass->propose_allocation)
956 res = klass->propose_allocation (encoder, query);
960 res = gst_pad_query_default (pad, GST_OBJECT (encoder), query);
966 GST_DEBUG_OBJECT (encoder, "query failed");
971 gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
974 GstVideoEncoder *encoder;
975 GstVideoEncoderClass *encoder_class;
976 gboolean ret = FALSE;
978 encoder = GST_VIDEO_ENCODER (parent);
979 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
981 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
982 GST_QUERY_TYPE_NAME (query));
984 if (encoder_class->sink_query)
985 ret = encoder_class->sink_query (encoder, query);
991 gst_video_encoder_finalize (GObject * object)
993 GstVideoEncoder *encoder;
995 GST_DEBUG_OBJECT (object, "finalize");
997 encoder = GST_VIDEO_ENCODER (object);
998 g_rec_mutex_clear (&encoder->stream_lock);
1000 if (encoder->priv->allocator) {
1001 gst_object_unref (encoder->priv->allocator);
1002 encoder->priv->allocator = NULL;
1005 G_OBJECT_CLASS (parent_class)->finalize (object);
1009 gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
1011 switch (GST_EVENT_TYPE (event)) {
1012 case GST_EVENT_SEGMENT:
1016 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1018 gst_event_copy_segment (event, &segment);
1020 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1022 if (segment.format != GST_FORMAT_TIME) {
1023 GST_DEBUG_OBJECT (encoder, "received non TIME segment");
1024 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1028 if (encoder->priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1029 segment.start += encoder->priv->time_adjustment;
1030 if (GST_CLOCK_TIME_IS_VALID (segment.position)) {
1031 segment.position += encoder->priv->time_adjustment;
1033 if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
1034 segment.stop += encoder->priv->time_adjustment;
1038 encoder->output_segment = segment;
1039 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1041 gst_event_unref (event);
1042 event = gst_event_new_segment (&encoder->output_segment);
1050 return gst_pad_push_event (encoder->srcpad, event);
1054 gst_video_encoder_create_merged_tags_event (GstVideoEncoder * enc)
1056 GstTagList *merged_tags;
1058 GST_LOG_OBJECT (enc, "upstream : %" GST_PTR_FORMAT, enc->priv->upstream_tags);
1059 GST_LOG_OBJECT (enc, "encoder : %" GST_PTR_FORMAT, enc->priv->tags);
1060 GST_LOG_OBJECT (enc, "mode : %d", enc->priv->tags_merge_mode);
1063 gst_tag_list_merge (enc->priv->upstream_tags, enc->priv->tags,
1064 enc->priv->tags_merge_mode);
1066 GST_DEBUG_OBJECT (enc, "merged : %" GST_PTR_FORMAT, merged_tags);
1068 if (merged_tags == NULL)
1071 if (gst_tag_list_is_empty (merged_tags)) {
1072 gst_tag_list_unref (merged_tags);
1076 return gst_event_new_tag (merged_tags);
1080 gst_video_encoder_check_and_push_tags (GstVideoEncoder * encoder)
1082 if (encoder->priv->tags_changed) {
1083 GstEvent *tags_event;
1085 tags_event = gst_video_encoder_create_merged_tags_event (encoder);
1087 if (tags_event != NULL)
1088 gst_video_encoder_push_event (encoder, tags_event);
1090 encoder->priv->tags_changed = FALSE;
1095 gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
1098 GstVideoEncoderClass *encoder_class;
1099 gboolean ret = FALSE;
1101 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1103 switch (GST_EVENT_TYPE (event)) {
1104 case GST_EVENT_CAPS:
1108 gst_event_parse_caps (event, &caps);
1109 ret = gst_video_encoder_setcaps (encoder, caps);
1111 gst_event_unref (event);
1117 GstFlowReturn flow_ret;
1119 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1121 if (encoder_class->finish) {
1122 flow_ret = encoder_class->finish (encoder);
1124 flow_ret = GST_FLOW_OK;
1127 if (encoder->priv->current_frame_events) {
1130 for (l = g_list_last (encoder->priv->current_frame_events); l;
1131 l = g_list_previous (l)) {
1132 GstEvent *event = GST_EVENT (l->data);
1134 gst_video_encoder_push_event (encoder, event);
1137 g_list_free (encoder->priv->current_frame_events);
1138 encoder->priv->current_frame_events = NULL;
1140 gst_video_encoder_check_and_push_tags (encoder);
1142 ret = (flow_ret == GST_FLOW_OK);
1143 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1146 case GST_EVENT_SEGMENT:
1150 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1152 gst_event_copy_segment (event, &segment);
1154 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1156 if (segment.format != GST_FORMAT_TIME) {
1157 GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
1158 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1162 encoder->input_segment = segment;
1164 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1167 case GST_EVENT_CUSTOM_DOWNSTREAM:
1169 if (gst_video_event_is_force_key_unit (event)) {
1170 GstClockTime running_time;
1171 gboolean all_headers;
1174 if (gst_video_event_parse_downstream_force_key_unit (event,
1175 NULL, NULL, &running_time, &all_headers, &count)) {
1176 ForcedKeyUnitEvent *fevt;
1178 GST_OBJECT_LOCK (encoder);
1179 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1180 g_queue_insert_sorted (&encoder->priv->force_key_unit, fevt,
1181 (GCompareDataFunc) forced_key_unit_event_compare, NULL);
1182 GST_OBJECT_UNLOCK (encoder);
1184 GST_DEBUG_OBJECT (encoder,
1185 "force-key-unit event: running-time %" GST_TIME_FORMAT
1186 ", all_headers %d, count %u",
1187 GST_TIME_ARGS (running_time), all_headers, count);
1189 gst_event_unref (event);
1195 case GST_EVENT_STREAM_START:
1197 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1198 /* Flush upstream tags after a STREAM_START */
1199 GST_DEBUG_OBJECT (encoder, "STREAM_START, clearing upstream tags");
1200 if (encoder->priv->upstream_tags) {
1201 gst_tag_list_unref (encoder->priv->upstream_tags);
1202 encoder->priv->upstream_tags = NULL;
1203 encoder->priv->tags_changed = TRUE;
1205 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1212 gst_event_parse_tag (event, &tags);
1214 if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
1215 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1216 if (encoder->priv->upstream_tags != tags) {
1217 tags = gst_tag_list_copy (tags);
1219 /* FIXME: make generic based on GST_TAG_FLAG_ENCODED */
1220 gst_tag_list_remove_tag (tags, GST_TAG_CODEC);
1221 gst_tag_list_remove_tag (tags, GST_TAG_AUDIO_CODEC);
1222 gst_tag_list_remove_tag (tags, GST_TAG_VIDEO_CODEC);
1223 gst_tag_list_remove_tag (tags, GST_TAG_SUBTITLE_CODEC);
1224 gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
1225 gst_tag_list_remove_tag (tags, GST_TAG_BITRATE);
1226 gst_tag_list_remove_tag (tags, GST_TAG_NOMINAL_BITRATE);
1227 gst_tag_list_remove_tag (tags, GST_TAG_MAXIMUM_BITRATE);
1228 gst_tag_list_remove_tag (tags, GST_TAG_MINIMUM_BITRATE);
1229 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER);
1230 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER_VERSION);
1232 if (encoder->priv->upstream_tags)
1233 gst_tag_list_unref (encoder->priv->upstream_tags);
1234 encoder->priv->upstream_tags = tags;
1235 GST_INFO_OBJECT (encoder, "upstream tags: %" GST_PTR_FORMAT, tags);
1237 gst_event_unref (event);
1238 event = gst_video_encoder_create_merged_tags_event (encoder);
1239 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1245 case GST_EVENT_FLUSH_STOP:{
1246 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1247 gst_video_encoder_flush (encoder);
1248 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
1249 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
1250 gst_video_encoder_reset (encoder, FALSE);
1251 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1258 /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
1259 * For EOS this is required because no buffer or serialized event
1260 * will come after EOS and nothing could trigger another
1261 * _finish_frame() call. *
1262 * If the subclass handles sending of EOS manually it can simply
1263 * not chain up to the parent class' event handler
1265 * For FLUSH_STOP this is required because it is expected
1266 * to be forwarded immediately and no buffers are queued anyway.
1269 if (!GST_EVENT_IS_SERIALIZED (event)
1270 || GST_EVENT_TYPE (event) == GST_EVENT_EOS
1271 || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
1272 ret = gst_video_encoder_push_event (encoder, event);
1274 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1275 encoder->priv->current_frame_events =
1276 g_list_prepend (encoder->priv->current_frame_events, event);
1277 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1286 gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
1289 GstVideoEncoder *enc;
1290 GstVideoEncoderClass *klass;
1291 gboolean ret = TRUE;
1293 enc = GST_VIDEO_ENCODER (parent);
1294 klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
1296 GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
1297 GST_EVENT_TYPE_NAME (event));
1299 if (klass->sink_event)
1300 ret = klass->sink_event (enc, event);
1306 gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
1309 gboolean ret = FALSE;
1310 GstVideoEncoderPrivate *priv = encoder->priv;
1312 switch (GST_EVENT_TYPE (event)) {
1313 case GST_EVENT_CUSTOM_UPSTREAM:
1315 if (gst_video_event_is_force_key_unit (event)) {
1316 GstClockTime running_time;
1317 gboolean all_headers;
1320 if (gst_video_event_parse_upstream_force_key_unit (event,
1321 &running_time, &all_headers, &count)) {
1322 ForcedKeyUnitEvent *fevt;
1324 GST_OBJECT_LOCK (encoder);
1325 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1326 g_queue_insert_sorted (&encoder->priv->force_key_unit, fevt,
1327 (GCompareDataFunc) forced_key_unit_event_compare, NULL);
1328 GST_OBJECT_UNLOCK (encoder);
1330 GST_DEBUG_OBJECT (encoder,
1331 "force-key-unit event: running-time %" GST_TIME_FORMAT
1332 ", all_headers %d, count %u",
1333 GST_TIME_ARGS (running_time), all_headers, count);
1335 gst_event_unref (event);
1345 GstClockTimeDiff diff;
1346 GstClockTime timestamp;
1348 if (!g_atomic_int_get (&priv->qos_enabled))
1351 gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
1353 GST_OBJECT_LOCK (encoder);
1354 priv->proportion = proportion;
1355 if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
1356 if (G_UNLIKELY (diff > 0)) {
1357 priv->earliest_time = timestamp + 2 * diff + priv->qos_frame_duration;
1359 priv->earliest_time = timestamp + diff;
1362 priv->earliest_time = GST_CLOCK_TIME_NONE;
1364 GST_OBJECT_UNLOCK (encoder);
1366 GST_DEBUG_OBJECT (encoder,
1367 "got QoS %" GST_TIME_FORMAT ", %" GST_STIME_FORMAT ", %g",
1368 GST_TIME_ARGS (timestamp), GST_STIME_ARGS (diff), proportion);
1370 ret = gst_pad_push_event (encoder->sinkpad, event);
1380 gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
1387 gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
1389 GstVideoEncoder *encoder;
1390 GstVideoEncoderClass *klass;
1391 gboolean ret = FALSE;
1393 encoder = GST_VIDEO_ENCODER (parent);
1394 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1396 GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
1398 if (klass->src_event)
1399 ret = klass->src_event (encoder, event);
1405 gst_video_encoder_src_query_default (GstVideoEncoder * enc, GstQuery * query)
1407 GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (enc);
1408 GstVideoEncoderPrivate *priv;
1413 GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
1415 switch (GST_QUERY_TYPE (query)) {
1416 case GST_QUERY_CONVERT:
1418 GstFormat src_fmt, dest_fmt;
1419 gint64 src_val, dest_val;
1421 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
1422 GST_OBJECT_LOCK (enc);
1424 __gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
1425 src_val, &dest_fmt, &dest_val);
1426 GST_OBJECT_UNLOCK (enc);
1429 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
1432 case GST_QUERY_LATENCY:
1435 GstClockTime min_latency, max_latency;
1437 res = gst_pad_peer_query (enc->sinkpad, query);
1439 gst_query_parse_latency (query, &live, &min_latency, &max_latency);
1440 GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
1441 GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
1442 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1444 GST_OBJECT_LOCK (enc);
1445 min_latency += priv->min_latency;
1446 if (max_latency == GST_CLOCK_TIME_NONE
1447 || enc->priv->max_latency == GST_CLOCK_TIME_NONE)
1448 max_latency = GST_CLOCK_TIME_NONE;
1450 max_latency += enc->priv->max_latency;
1451 GST_OBJECT_UNLOCK (enc);
1453 gst_query_set_latency (query, live, min_latency, max_latency);
1458 res = gst_pad_query_default (pad, GST_OBJECT (enc), query);
1463 GST_DEBUG_OBJECT (enc, "query failed");
1468 gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
1470 GstVideoEncoder *encoder;
1471 GstVideoEncoderClass *encoder_class;
1472 gboolean ret = FALSE;
1474 encoder = GST_VIDEO_ENCODER (parent);
1475 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1477 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
1478 GST_QUERY_TYPE_NAME (query));
1480 if (encoder_class->src_query)
1481 ret = encoder_class->src_query (encoder, query);
1486 static GstVideoCodecFrame *
1487 gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
1488 GstClockTime pts, GstClockTime dts, GstClockTime duration)
1490 GstVideoEncoderPrivate *priv = encoder->priv;
1491 GstVideoCodecFrame *frame;
1493 frame = g_slice_new0 (GstVideoCodecFrame);
1495 frame->ref_count = 1;
1497 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1498 frame->system_frame_number = priv->system_frame_number;
1499 priv->system_frame_number++;
1501 frame->presentation_frame_number = priv->presentation_frame_number;
1502 priv->presentation_frame_number++;
1503 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1505 frame->events = priv->current_frame_events;
1506 priv->current_frame_events = NULL;
1507 frame->input_buffer = buf;
1510 frame->duration = duration;
1511 frame->abidata.ABI.ts = pts;
1517 static GstFlowReturn
1518 gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1520 GstVideoEncoder *encoder;
1521 GstVideoEncoderPrivate *priv;
1522 GstVideoEncoderClass *klass;
1523 GstVideoCodecFrame *frame;
1524 GstClockTime pts, duration;
1525 GstFlowReturn ret = GST_FLOW_OK;
1526 guint64 start, stop, cstart, cstop;
1528 encoder = GST_VIDEO_ENCODER (parent);
1529 priv = encoder->priv;
1530 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1532 g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
1534 if (!encoder->priv->input_state)
1535 goto not_negotiated;
1537 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1539 pts = GST_BUFFER_PTS (buf);
1540 duration = GST_BUFFER_DURATION (buf);
1542 GST_LOG_OBJECT (encoder,
1543 "received buffer of size %" G_GSIZE_FORMAT " with PTS %" GST_TIME_FORMAT
1544 ", DTS %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
1545 gst_buffer_get_size (buf), GST_TIME_ARGS (pts),
1546 GST_TIME_ARGS (GST_BUFFER_DTS (buf)), GST_TIME_ARGS (duration));
1549 if (GST_CLOCK_TIME_IS_VALID (duration))
1550 stop = start + duration;
1552 stop = GST_CLOCK_TIME_NONE;
1554 /* Drop buffers outside of segment */
1555 if (!gst_segment_clip (&encoder->input_segment,
1556 GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
1557 GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
1558 gst_buffer_unref (buf);
1562 if (GST_CLOCK_TIME_IS_VALID (cstop))
1563 duration = cstop - cstart;
1565 duration = GST_CLOCK_TIME_NONE;
1567 if (priv->min_pts != GST_CLOCK_TIME_NONE
1568 && priv->time_adjustment == GST_CLOCK_TIME_NONE) {
1569 if (cstart < priv->min_pts) {
1570 priv->time_adjustment = priv->min_pts - cstart;
1574 if (priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1575 cstart += priv->time_adjustment;
1578 /* incoming DTS is not really relevant and does not make sense anyway,
1579 * so pass along _NONE and maybe come up with something better later on */
1580 frame = gst_video_encoder_new_frame (encoder, buf, cstart,
1581 GST_CLOCK_TIME_NONE, duration);
1583 GST_OBJECT_LOCK (encoder);
1584 if (priv->force_key_unit.head) {
1586 GstClockTime running_time;
1587 gboolean throttled, have_fevt = FALSE, have_pending_none_fevt = FALSE;
1588 GQueue matching_fevt = G_QUEUE_INIT;
1591 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1594 throttled = (priv->min_force_key_unit_interval != 0 &&
1595 priv->min_force_key_unit_interval != GST_CLOCK_TIME_NONE &&
1596 ((priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE &&
1597 priv->last_force_key_unit_request +
1598 priv->min_force_key_unit_interval > running_time)
1599 || (priv->last_key_unit != GST_CLOCK_TIME_NONE
1600 && priv->last_key_unit + priv->min_force_key_unit_interval >
1603 for (l = priv->force_key_unit.head; l && (!throttled || !have_fevt);
1605 ForcedKeyUnitEvent *fevt = l->data;
1607 /* Skip pending keyunits */
1608 if (fevt->pending) {
1609 if (fevt->running_time == GST_CLOCK_TIME_NONE)
1610 have_pending_none_fevt = TRUE;
1614 /* Simple case, keyunit ASAP */
1615 if (fevt->running_time == GST_CLOCK_TIME_NONE) {
1618 g_queue_push_tail (&matching_fevt, fevt);
1622 /* Event for before this frame */
1623 if (fevt->running_time <= running_time) {
1626 g_queue_push_tail (&matching_fevt, fevt);
1630 /* Otherwise all following events are in the future */
1634 if (throttled && have_fevt) {
1635 GstClockTime last_time;
1637 if (priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE &&
1638 priv->last_force_key_unit_request +
1639 priv->min_force_key_unit_interval > running_time) {
1640 last_time = priv->last_force_key_unit_request;
1642 last_time = priv->last_key_unit;
1645 GST_DEBUG_OBJECT (encoder,
1646 "Not requesting a new key unit yet due to throttling (%"
1647 GST_TIME_FORMAT " + %" GST_TIME_FORMAT " > %" GST_TIME_FORMAT,
1648 GST_TIME_ARGS (last_time),
1649 GST_TIME_ARGS (priv->min_force_key_unit_interval),
1650 GST_TIME_ARGS (running_time));
1651 g_queue_clear (&matching_fevt);
1654 if (matching_fevt.length > 0) {
1655 ForcedKeyUnitEvent *fevt;
1656 gboolean all_headers = FALSE;
1657 gboolean force_keyunit = FALSE;
1659 while ((fevt = g_queue_pop_head (&matching_fevt))) {
1660 fevt->pending = TRUE;
1662 if ((fevt->running_time == GST_CLOCK_TIME_NONE
1663 && have_pending_none_fevt)
1664 || (priv->last_force_key_unit_request != GST_CLOCK_TIME_NONE
1665 && fevt->running_time != GST_CLOCK_TIME_NONE
1666 && fevt->running_time <= priv->last_force_key_unit_request) ||
1667 (priv->last_key_unit != GST_CLOCK_TIME_NONE
1668 && fevt->running_time != GST_CLOCK_TIME_NONE
1669 && fevt->running_time <= priv->last_key_unit)) {
1670 GST_DEBUG_OBJECT (encoder,
1671 "Not requesting another key unit at running time %"
1672 GST_TIME_FORMAT, GST_TIME_ARGS (fevt->running_time));
1674 force_keyunit = TRUE;
1675 fevt->frame_id = frame->system_frame_number;
1676 if (fevt->all_headers)
1681 if (force_keyunit) {
1682 GST_DEBUG_OBJECT (encoder,
1683 "Forcing a key unit at running time %" GST_TIME_FORMAT,
1684 GST_TIME_ARGS (running_time));
1686 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
1688 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
1689 priv->last_force_key_unit_request = running_time;
1693 GST_OBJECT_UNLOCK (encoder);
1695 g_queue_push_tail (&priv->frames, gst_video_codec_frame_ref (frame));
1697 /* new data, more finish needed */
1698 priv->drained = FALSE;
1700 GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
1701 frame->presentation_frame_number);
1704 gst_segment_to_running_time (&encoder->input_segment, GST_FORMAT_TIME,
1707 ret = klass->handle_frame (encoder, frame);
1710 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1717 GST_ELEMENT_ERROR (encoder, CORE, NEGOTIATION, (NULL),
1718 ("encoder not initialized"));
1719 gst_buffer_unref (buf);
1720 return GST_FLOW_NOT_NEGOTIATED;
1724 static GstStateChangeReturn
1725 gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
1727 GstVideoEncoder *encoder;
1728 GstVideoEncoderClass *encoder_class;
1729 GstStateChangeReturn ret;
1731 encoder = GST_VIDEO_ENCODER (element);
1732 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
1734 switch (transition) {
1735 case GST_STATE_CHANGE_NULL_TO_READY:
1736 /* open device/library if needed */
1737 if (encoder_class->open && !encoder_class->open (encoder))
1740 case GST_STATE_CHANGE_READY_TO_PAUSED:
1741 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1742 gst_video_encoder_reset (encoder, TRUE);
1743 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1745 /* Initialize device/library if needed */
1746 if (encoder_class->start && !encoder_class->start (encoder))
1753 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1755 switch (transition) {
1756 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1757 gboolean stopped = TRUE;
1759 if (encoder_class->stop)
1760 stopped = encoder_class->stop (encoder);
1762 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1763 gst_video_encoder_reset (encoder, TRUE);
1764 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1770 case GST_STATE_CHANGE_READY_TO_NULL:
1771 /* close device/library if needed */
1772 if (encoder_class->close && !encoder_class->close (encoder))
1785 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1786 ("Failed to open encoder"));
1787 return GST_STATE_CHANGE_FAILURE;
1792 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1793 ("Failed to start encoder"));
1794 return GST_STATE_CHANGE_FAILURE;
1799 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1800 ("Failed to stop encoder"));
1801 return GST_STATE_CHANGE_FAILURE;
1806 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1807 ("Failed to close encoder"));
1808 return GST_STATE_CHANGE_FAILURE;
1813 gst_video_encoder_negotiate_default (GstVideoEncoder * encoder)
1815 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1816 GstAllocator *allocator;
1817 GstAllocationParams params;
1818 gboolean ret = TRUE;
1819 GstVideoCodecState *state = encoder->priv->output_state;
1820 GstVideoInfo *info = &state->info;
1821 GstQuery *query = NULL;
1822 GstVideoCodecFrame *frame;
1826 g_return_val_if_fail (state->caps != NULL, FALSE);
1828 if (encoder->priv->output_state_changed) {
1829 GstStructure *out_struct;
1831 state->caps = gst_caps_make_writable (state->caps);
1834 gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
1835 "height", G_TYPE_INT, info->height,
1836 "pixel-aspect-ratio", GST_TYPE_FRACTION,
1837 info->par_n, info->par_d, NULL);
1838 if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
1839 /* variable fps with a max-framerate */
1840 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
1841 "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
1843 /* no variable fps or no max-framerate */
1844 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
1845 info->fps_n, info->fps_d, NULL);
1847 if (state->codec_data)
1848 gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
1849 state->codec_data, NULL);
1851 gst_caps_set_simple (state->caps, "interlace-mode", G_TYPE_STRING,
1852 gst_video_interlace_mode_to_string (info->interlace_mode), NULL);
1853 if (info->interlace_mode == GST_VIDEO_INTERLACE_MODE_INTERLEAVED &&
1854 GST_VIDEO_INFO_FIELD_ORDER (info) != GST_VIDEO_FIELD_ORDER_UNKNOWN)
1855 gst_caps_set_simple (state->caps, "field-order", G_TYPE_STRING,
1856 gst_video_field_order_to_string (GST_VIDEO_INFO_FIELD_ORDER (info)),
1859 colorimetry = gst_video_colorimetry_to_string (&info->colorimetry);
1861 gst_caps_set_simple (state->caps, "colorimetry", G_TYPE_STRING,
1863 g_free (colorimetry);
1865 if (info->chroma_site != GST_VIDEO_CHROMA_SITE_UNKNOWN) {
1866 gchar *chroma_site = gst_video_chroma_site_to_string (info->chroma_site);
1869 GST_WARNING ("Couldn't convert chroma-site 0x%x to string",
1872 gst_caps_set_simple (state->caps,
1873 "chroma-site", G_TYPE_STRING, chroma_site, NULL);
1874 g_free (chroma_site);
1878 if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) {
1879 const gchar *caps_mview_mode =
1880 gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE
1883 gst_caps_set_simple (state->caps, "multiview-mode", G_TYPE_STRING,
1884 caps_mview_mode, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
1885 GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL);
1888 out_struct = gst_caps_get_structure (state->caps, 0);
1890 /* forward upstream mastering display info and content light level
1891 * if subclass didn't set */
1892 if (state->mastering_display_info &&
1893 !gst_structure_has_field (out_struct, "mastering-display-info")) {
1894 gst_video_mastering_display_info_add_to_caps
1895 (state->mastering_display_info, state->caps);
1898 if (state->content_light_level &&
1899 !gst_structure_has_field (out_struct, "content-light-level")) {
1900 gst_video_content_light_level_add_to_caps (state->content_light_level,
1904 encoder->priv->output_state_changed = FALSE;
1907 if (state->allocation_caps == NULL)
1908 state->allocation_caps = gst_caps_ref (state->caps);
1910 /* Push all pending pre-caps events of the oldest frame before
1912 frame = encoder->priv->frames.head ? encoder->priv->frames.head->data : NULL;
1913 if (frame || encoder->priv->current_frame_events) {
1917 events = &frame->events;
1919 events = &encoder->priv->current_frame_events;
1922 for (l = g_list_last (*events); l;) {
1923 GstEvent *event = GST_EVENT (l->data);
1926 if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
1927 gst_video_encoder_push_event (encoder, event);
1930 *events = g_list_delete_link (*events, tmp);
1937 prevcaps = gst_pad_get_current_caps (encoder->srcpad);
1938 if (!prevcaps || !gst_caps_is_equal (prevcaps, state->caps))
1939 ret = gst_pad_set_caps (encoder->srcpad, state->caps);
1943 gst_caps_unref (prevcaps);
1948 query = gst_query_new_allocation (state->allocation_caps, TRUE);
1949 if (!gst_pad_peer_query (encoder->srcpad, query)) {
1950 GST_DEBUG_OBJECT (encoder, "didn't get downstream ALLOCATION hints");
1953 g_assert (klass->decide_allocation != NULL);
1954 ret = klass->decide_allocation (encoder, query);
1956 GST_DEBUG_OBJECT (encoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
1960 goto no_decide_allocation;
1962 /* we got configuration from our peer or the decide_allocation method,
1964 if (gst_query_get_n_allocation_params (query) > 0) {
1965 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1968 gst_allocation_params_init (¶ms);
1971 if (encoder->priv->allocator)
1972 gst_object_unref (encoder->priv->allocator);
1973 encoder->priv->allocator = allocator;
1974 encoder->priv->params = params;
1978 gst_query_unref (query);
1983 no_decide_allocation:
1985 GST_WARNING_OBJECT (encoder, "Subclass failed to decide allocation");
1991 gst_video_encoder_negotiate_unlocked (GstVideoEncoder * encoder)
1993 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1994 gboolean ret = TRUE;
1996 if (G_LIKELY (klass->negotiate))
1997 ret = klass->negotiate (encoder);
2003 * gst_video_encoder_negotiate:
2004 * @encoder: a #GstVideoEncoder
2006 * Negotiate with downstream elements to currently configured #GstVideoCodecState.
2007 * Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
2010 * Returns: %TRUE if the negotiation succeeded, else %FALSE.
2013 gst_video_encoder_negotiate (GstVideoEncoder * encoder)
2015 GstVideoEncoderClass *klass;
2016 gboolean ret = TRUE;
2018 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
2019 g_return_val_if_fail (encoder->priv->output_state, FALSE);
2021 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2023 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2024 gst_pad_check_reconfigure (encoder->srcpad);
2025 if (klass->negotiate) {
2026 ret = klass->negotiate (encoder);
2028 gst_pad_mark_reconfigure (encoder->srcpad);
2030 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2036 * gst_video_encoder_allocate_output_buffer:
2037 * @encoder: a #GstVideoEncoder
2038 * @size: size of the buffer
2040 * Helper function that allocates a buffer to hold an encoded video frame
2041 * for @encoder's current #GstVideoCodecState.
2043 * Returns: (transfer full): allocated buffer
2046 gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, gsize size)
2049 gboolean needs_reconfigure = FALSE;
2051 g_return_val_if_fail (size > 0, NULL);
2053 GST_DEBUG ("alloc src buffer");
2055 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2056 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2057 if (G_UNLIKELY (encoder->priv->output_state_changed
2058 || (encoder->priv->output_state && needs_reconfigure))) {
2059 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2060 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
2061 gst_pad_mark_reconfigure (encoder->srcpad);
2067 gst_buffer_new_allocate (encoder->priv->allocator, size,
2068 &encoder->priv->params);
2070 GST_INFO_OBJECT (encoder, "couldn't allocate output buffer");
2074 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2079 buffer = gst_buffer_new_allocate (NULL, size, NULL);
2081 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2087 * gst_video_encoder_allocate_output_frame:
2088 * @encoder: a #GstVideoEncoder
2089 * @frame: a #GstVideoCodecFrame
2090 * @size: size of the buffer
2092 * Helper function that allocates a buffer to hold an encoded video frame for @encoder's
2093 * current #GstVideoCodecState. Subclass should already have configured video
2094 * state and set src pad caps.
2096 * The buffer allocated here is owned by the frame and you should only
2097 * keep references to the frame, not the buffer.
2099 * Returns: %GST_FLOW_OK if an output buffer could be allocated
2102 gst_video_encoder_allocate_output_frame (GstVideoEncoder *
2103 encoder, GstVideoCodecFrame * frame, gsize size)
2105 gboolean needs_reconfigure = FALSE;
2107 g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
2109 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2110 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2111 if (G_UNLIKELY (encoder->priv->output_state_changed
2112 || (encoder->priv->output_state && needs_reconfigure))) {
2113 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2114 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
2115 gst_pad_mark_reconfigure (encoder->srcpad);
2119 GST_LOG_OBJECT (encoder, "alloc buffer size %" G_GSIZE_FORMAT, size);
2121 frame->output_buffer =
2122 gst_buffer_new_allocate (encoder->priv->allocator, size,
2123 &encoder->priv->params);
2125 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2127 return frame->output_buffer ? GST_FLOW_OK : GST_FLOW_ERROR;
2131 gst_video_encoder_release_frame (GstVideoEncoder * enc,
2132 GstVideoCodecFrame * frame)
2136 /* unref once from the list */
2137 link = g_queue_find (&enc->priv->frames, frame);
2139 gst_video_codec_frame_unref (frame);
2140 g_queue_delete_link (&enc->priv->frames, link);
2142 /* unref because this function takes ownership */
2143 gst_video_codec_frame_unref (frame);
2147 gst_video_encoder_transform_meta_default (GstVideoEncoder *
2148 encoder, GstVideoCodecFrame * frame, GstMeta * meta)
2150 const GstMetaInfo *info = meta->info;
2151 const gchar *const *tags;
2152 const gchar *const supported_tags[] = {
2153 GST_META_TAG_VIDEO_STR,
2154 GST_META_TAG_VIDEO_ORIENTATION_STR,
2155 GST_META_TAG_VIDEO_SIZE_STR,
2159 tags = gst_meta_api_type_get_tags (info->api);
2165 if (!g_strv_contains (supported_tags, *tags))
2175 GstVideoEncoder *encoder;
2176 GstVideoCodecFrame *frame;
2180 foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
2182 CopyMetaData *data = user_data;
2183 GstVideoEncoder *encoder = data->encoder;
2184 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2185 GstVideoCodecFrame *frame = data->frame;
2186 const GstMetaInfo *info = (*meta)->info;
2187 gboolean do_copy = FALSE;
2189 if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) {
2190 /* never call the transform_meta with memory specific metadata */
2191 GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s",
2192 g_type_name (info->api));
2194 } else if (klass->transform_meta) {
2195 do_copy = klass->transform_meta (encoder, frame, *meta);
2196 GST_DEBUG_OBJECT (encoder, "transformed metadata %s: copy: %d",
2197 g_type_name (info->api), do_copy);
2200 /* we only copy metadata when the subclass implemented a transform_meta
2201 * function and when it returns %TRUE */
2202 if (do_copy && info->transform_func) {
2203 GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
2204 GST_DEBUG_OBJECT (encoder, "copy metadata %s", g_type_name (info->api));
2205 /* simply copy then */
2206 info->transform_func (frame->output_buffer, *meta, inbuf,
2207 _gst_meta_transform_copy, ©_data);
2213 gst_video_encoder_drop_frame (GstVideoEncoder * enc, GstVideoCodecFrame * frame)
2215 GstVideoEncoderPrivate *priv = enc->priv;
2216 GstClockTime stream_time, jitter, earliest_time, qostime, timestamp;
2217 GstSegment *segment;
2218 GstMessage *qos_msg;
2221 GST_DEBUG_OBJECT (enc, "dropping frame %" GST_TIME_FORMAT,
2222 GST_TIME_ARGS (frame->pts));
2226 /* post QoS message */
2227 GST_OBJECT_LOCK (enc);
2228 proportion = priv->proportion;
2229 earliest_time = priv->earliest_time;
2230 GST_OBJECT_UNLOCK (enc);
2232 timestamp = frame->pts;
2233 segment = &enc->output_segment;
2234 if (G_UNLIKELY (segment->format == GST_FORMAT_UNDEFINED))
2235 segment = &enc->input_segment;
2237 gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp);
2238 qostime = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp);
2239 jitter = GST_CLOCK_DIFF (qostime, earliest_time);
2241 gst_message_new_qos (GST_OBJECT_CAST (enc), FALSE, qostime, stream_time,
2242 timestamp, GST_CLOCK_TIME_NONE);
2243 gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
2244 gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
2245 priv->processed, priv->dropped);
2246 gst_element_post_message (GST_ELEMENT_CAST (enc), qos_msg);
2249 static GstFlowReturn
2250 gst_video_encoder_can_push_unlocked (GstVideoEncoder * encoder)
2252 GstVideoEncoderPrivate *priv = encoder->priv;
2253 gboolean needs_reconfigure;
2255 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
2256 if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
2257 && needs_reconfigure))) {
2258 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
2259 gst_pad_mark_reconfigure (encoder->srcpad);
2260 if (GST_PAD_IS_FLUSHING (encoder->srcpad))
2261 return GST_FLOW_FLUSHING;
2263 return GST_FLOW_NOT_NEGOTIATED;
2267 if (G_UNLIKELY (priv->output_state == NULL)) {
2268 GST_ERROR_OBJECT (encoder, "Output state was not configured");
2269 GST_ELEMENT_ERROR (encoder, LIBRARY, FAILED,
2270 ("Output state was not configured"), (NULL));
2271 return GST_FLOW_ERROR;
2278 gst_video_encoder_push_pending_unlocked (GstVideoEncoder * encoder,
2279 GstVideoCodecFrame * frame)
2281 GstVideoEncoderPrivate *priv = encoder->priv;
2284 /* Push all pending events that arrived before this frame */
2285 for (l = priv->frames.head; l; l = l->next) {
2286 GstVideoCodecFrame *tmp = l->data;
2291 for (k = g_list_last (tmp->events); k; k = k->prev)
2292 gst_video_encoder_push_event (encoder, k->data);
2293 g_list_free (tmp->events);
2301 gst_video_encoder_check_and_push_tags (encoder);
2305 gst_video_encoder_infer_dts_unlocked (GstVideoEncoder * encoder,
2306 GstVideoCodecFrame * frame)
2308 /* DTS is expected to be monotonously increasing,
2309 * so a good guess is the lowest unsent PTS (all being OK) */
2310 GstVideoEncoderPrivate *priv = encoder->priv;
2312 GstClockTime min_ts = GST_CLOCK_TIME_NONE;
2313 GstVideoCodecFrame *oframe = NULL;
2314 gboolean seen_none = FALSE;
2316 /* some maintenance regardless */
2317 for (l = priv->frames.head; l; l = l->next) {
2318 GstVideoCodecFrame *tmp = l->data;
2320 if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
2325 if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
2326 min_ts = tmp->abidata.ABI.ts;
2330 /* save a ts if needed */
2331 if (oframe && oframe != frame) {
2332 oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
2335 /* and set if needed */
2336 if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
2337 frame->dts = min_ts;
2338 GST_DEBUG_OBJECT (encoder,
2339 "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
2340 GST_TIME_ARGS (frame->pts));
2345 gst_video_encoder_send_header_unlocked (GstVideoEncoder * encoder,
2346 gboolean * discont, gboolean key_unit)
2348 GstVideoEncoderPrivate *priv = encoder->priv;
2350 if (G_UNLIKELY (priv->new_headers)) {
2353 GST_DEBUG_OBJECT (encoder, "Sending headers");
2355 /* First make all buffers metadata-writable */
2356 for (tmp = priv->headers; tmp; tmp = tmp->next) {
2357 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2359 tmp->data = tmpbuf = gst_buffer_make_writable (tmpbuf);
2361 GST_OBJECT_LOCK (encoder);
2362 priv->bytes += gst_buffer_get_size (tmpbuf);
2363 GST_OBJECT_UNLOCK (encoder);
2365 if (G_UNLIKELY (key_unit)) {
2367 GST_BUFFER_FLAG_UNSET (tmpbuf, GST_BUFFER_FLAG_DELTA_UNIT);
2369 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DELTA_UNIT);
2372 if (G_UNLIKELY (*discont)) {
2373 GST_LOG_OBJECT (encoder, "marking discont");
2374 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2377 GST_BUFFER_FLAG_UNSET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2380 gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
2382 priv->new_headers = FALSE;
2387 gst_video_encoder_transform_meta_unlocked (GstVideoEncoder * encoder,
2388 GstVideoCodecFrame * frame)
2390 GstVideoEncoderClass *encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2392 if (encoder_class->transform_meta) {
2393 if (G_LIKELY (frame->input_buffer)) {
2396 data.encoder = encoder;
2398 gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
2400 GST_FIXME_OBJECT (encoder,
2401 "Can't copy metadata because input frame disappeared");
2407 gst_video_encoder_send_key_unit_unlocked (GstVideoEncoder * encoder,
2408 GstVideoCodecFrame * frame, gboolean * send_headers)
2410 GstVideoEncoderPrivate *priv = encoder->priv;
2411 GstClockTime stream_time, running_time;
2414 GQueue matching_fevt = G_QUEUE_INIT;
2415 ForcedKeyUnitEvent *fevt;
2418 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2421 GST_OBJECT_LOCK (encoder);
2422 for (l = priv->force_key_unit.head; l;) {
2425 /* Skip non-pending keyunits */
2426 if (!fevt->pending) {
2431 /* Exact match using the frame id */
2432 if (frame->system_frame_number == fevt->frame_id) {
2433 GList *next = l->next;
2434 g_queue_push_tail (&matching_fevt, fevt);
2435 g_queue_delete_link (&priv->force_key_unit, l);
2440 /* Simple case, keyunit ASAP */
2441 if (fevt->running_time == GST_CLOCK_TIME_NONE) {
2442 GList *next = l->next;
2443 g_queue_push_tail (&matching_fevt, fevt);
2444 g_queue_delete_link (&priv->force_key_unit, l);
2449 /* Event for before this frame */
2450 if (fevt->running_time <= running_time) {
2451 GList *next = l->next;
2452 g_queue_push_tail (&matching_fevt, fevt);
2453 g_queue_delete_link (&priv->force_key_unit, l);
2458 /* Otherwise all following events are in the future */
2462 GST_OBJECT_UNLOCK (encoder);
2464 while ((fevt = g_queue_pop_head (&matching_fevt))) {
2466 gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
2469 ev = gst_video_event_new_downstream_force_key_unit
2470 (frame->pts, stream_time, running_time, fevt->all_headers, fevt->count);
2472 gst_video_encoder_push_event (encoder, ev);
2474 if (fevt->all_headers)
2475 *send_headers = TRUE;
2477 GST_DEBUG_OBJECT (encoder,
2478 "Forced key unit: running-time %" GST_TIME_FORMAT
2479 ", all_headers %d, count %u",
2480 GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
2481 forced_key_unit_event_free (fevt);
2486 * gst_video_encoder_finish_frame:
2487 * @encoder: a #GstVideoEncoder
2488 * @frame: (transfer full): an encoded #GstVideoCodecFrame
2490 * @frame must have a valid encoded data buffer, whose metadata fields
2491 * are then appropriately set according to frame data or no buffer at
2492 * all if the frame should be dropped.
2493 * It is subsequently pushed downstream or provided to @pre_push.
2494 * In any case, the frame is considered finished and released.
2496 * After calling this function the output buffer of the frame is to be
2497 * considered read-only. This function will also change the metadata
2500 * Returns: a #GstFlowReturn resulting from sending data downstream
2503 gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
2504 GstVideoCodecFrame * frame)
2506 GstVideoEncoderPrivate *priv = encoder->priv;
2507 GstFlowReturn ret = GST_FLOW_OK;
2508 GstVideoEncoderClass *encoder_class;
2509 gboolean send_headers = FALSE;
2510 gboolean key_unit = FALSE;
2511 gboolean discont = FALSE;
2514 g_return_val_if_fail (frame, GST_FLOW_ERROR);
2516 discont = (frame->presentation_frame_number == 0
2517 && frame->abidata.ABI.num_subframes == 0);
2519 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2521 GST_LOG_OBJECT (encoder,
2522 "finish frame fpn %d sync point: %d", frame->presentation_frame_number,
2523 GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
2525 GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
2526 ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
2527 GST_TIME_ARGS (frame->dts));
2529 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2531 ret = gst_video_encoder_can_push_unlocked (encoder);
2532 if (ret != GST_FLOW_OK)
2535 if (frame->abidata.ABI.num_subframes == 0)
2536 gst_video_encoder_push_pending_unlocked (encoder, frame);
2538 /* no buffer data means this frame is skipped/dropped */
2539 if (!frame->output_buffer) {
2540 gst_video_encoder_drop_frame (encoder, frame);
2546 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit.head)
2547 gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
2549 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
2550 && frame->abidata.ABI.num_subframes == 0) {
2551 priv->distance_from_sync = 0;
2553 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2554 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2555 frame->dts = frame->pts;
2557 priv->last_key_unit =
2558 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2562 gst_video_encoder_infer_dts_unlocked (encoder, frame);
2564 frame->distance_from_sync = priv->distance_from_sync;
2565 priv->distance_from_sync++;
2567 /* We need a writable buffer for the metadata changes below */
2568 frame->output_buffer = gst_buffer_make_writable (frame->output_buffer);
2570 GST_BUFFER_PTS (frame->output_buffer) = frame->pts;
2571 GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
2572 GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
2574 /* At this stage we have a full frame in subframe use case ,
2575 * let's mark it to enabled some latency optimization
2576 * in some uses cases like RTP. */
2578 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_VIDEO_BUFFER_FLAG_MARKER);
2580 GST_OBJECT_LOCK (encoder);
2581 /* update rate estimate */
2582 priv->bytes += gst_buffer_get_size (frame->output_buffer);
2583 if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
2584 priv->time += frame->duration;
2586 /* better none than nothing valid */
2587 priv->time = GST_CLOCK_TIME_NONE;
2589 GST_OBJECT_UNLOCK (encoder);
2591 if (G_UNLIKELY (send_headers))
2592 priv->new_headers = TRUE;
2594 gst_video_encoder_send_header_unlocked (encoder, &discont, key_unit);
2597 GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2599 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2602 if (G_UNLIKELY (discont)) {
2603 GST_LOG_OBJECT (encoder, "marking discont");
2604 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
2607 if (encoder_class->pre_push)
2608 ret = encoder_class->pre_push (encoder, frame);
2610 gst_video_encoder_transform_meta_unlocked (encoder, frame);
2612 /* Get an additional ref to the buffer, which is going to be pushed
2613 * downstream, the original ref is owned by the frame */
2614 if (ret == GST_FLOW_OK)
2615 buffer = gst_buffer_ref (frame->output_buffer);
2617 /* Release frame so the buffer is writable when we push it downstream
2618 * if possible, i.e. if the subclass does not hold additional references
2621 gst_video_encoder_release_frame (encoder, frame);
2624 if (ret == GST_FLOW_OK) {
2625 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2626 ret = gst_pad_push (encoder->srcpad, buffer);
2627 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2633 gst_video_encoder_release_frame (encoder, frame);
2635 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2641 * gst_video_encoder_finish_subframe:
2642 * @encoder: a #GstVideoEncoder
2643 * @frame: (transfer none): a #GstVideoCodecFrame being encoded
2645 * If multiple subframes are produced for one input frame then use this method
2646 * for each subframe, except for the last one. Before calling this function,
2647 * you need to fill frame->output_buffer with the encoded buffer to push.
2649 * You must call #gst_video_encoder_finish_frame() for the last sub-frame
2650 * to tell the encoder that the frame has been fully encoded.
2652 * This function will change the metadata of @frame and frame->output_buffer
2653 * will be pushed downstream.
2655 * Returns: a #GstFlowReturn resulting from pushing the buffer downstream.
2660 gst_video_encoder_finish_subframe (GstVideoEncoder * encoder,
2661 GstVideoCodecFrame * frame)
2663 GstVideoEncoderPrivate *priv = encoder->priv;
2664 GstVideoEncoderClass *encoder_class;
2665 GstFlowReturn ret = GST_FLOW_OK;
2666 GstBuffer *subframe_buffer = NULL;
2667 gboolean discont = FALSE;
2668 gboolean send_headers = FALSE;
2669 gboolean key_unit = FALSE;
2671 g_return_val_if_fail (frame, GST_FLOW_ERROR);
2672 g_return_val_if_fail (frame->output_buffer, GST_FLOW_ERROR);
2674 subframe_buffer = frame->output_buffer;
2676 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2677 discont = (frame->presentation_frame_number == 0
2678 && frame->abidata.ABI.num_subframes == 0);
2680 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
2682 GST_LOG_OBJECT (encoder,
2683 "finish subframe %u of frame fpn %u PTS %" GST_TIME_FORMAT ", DTS %"
2684 GST_TIME_FORMAT " sync point: %d", frame->abidata.ABI.num_subframes,
2685 frame->presentation_frame_number, GST_TIME_ARGS (frame->pts),
2686 GST_TIME_ARGS (frame->dts), GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame));
2688 ret = gst_video_encoder_can_push_unlocked (encoder);
2689 if (ret != GST_FLOW_OK)
2692 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit.head)
2693 gst_video_encoder_send_key_unit_unlocked (encoder, frame, &send_headers);
2695 /* Push pending events only for the first subframe ie segment event.
2696 * Push new incoming events on finish_frame otherwise.
2698 if (frame->abidata.ABI.num_subframes == 0)
2699 gst_video_encoder_push_pending_unlocked (encoder, frame);
2701 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)
2702 && frame->abidata.ABI.num_subframes == 0) {
2703 priv->distance_from_sync = 0;
2705 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2706 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2707 frame->dts = frame->pts;
2709 priv->last_key_unit =
2710 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2714 gst_video_encoder_infer_dts_unlocked (encoder, frame);
2716 /* We need a writable buffer for the metadata changes below */
2717 subframe_buffer = gst_buffer_make_writable (subframe_buffer);
2719 GST_BUFFER_PTS (subframe_buffer) = frame->pts;
2720 GST_BUFFER_DTS (subframe_buffer) = frame->dts;
2721 GST_BUFFER_DURATION (subframe_buffer) = frame->duration;
2723 GST_OBJECT_LOCK (encoder);
2724 /* update rate estimate */
2725 priv->bytes += gst_buffer_get_size (subframe_buffer);
2726 GST_OBJECT_UNLOCK (encoder);
2728 if (G_UNLIKELY (send_headers))
2729 priv->new_headers = TRUE;
2731 gst_video_encoder_send_header_unlocked (encoder, &discont, key_unit);
2734 GST_BUFFER_FLAG_UNSET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2736 GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2739 if (G_UNLIKELY (discont)) {
2740 GST_LOG_OBJECT (encoder, "marking discont buffer: %" GST_PTR_FORMAT,
2742 GST_BUFFER_FLAG_SET (subframe_buffer, GST_BUFFER_FLAG_DISCONT);
2745 if (encoder_class->pre_push) {
2746 ret = encoder_class->pre_push (encoder, frame);
2749 gst_video_encoder_transform_meta_unlocked (encoder, frame);
2751 if (ret == GST_FLOW_OK) {
2752 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2753 ret = gst_pad_push (encoder->srcpad, subframe_buffer);
2754 subframe_buffer = NULL;
2755 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2759 frame->abidata.ABI.num_subframes++;
2760 if (subframe_buffer)
2761 gst_buffer_unref (subframe_buffer);
2762 frame->output_buffer = NULL;
2764 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2770 * gst_video_encoder_get_output_state:
2771 * @encoder: a #GstVideoEncoder
2773 * Get the current #GstVideoCodecState
2775 * Returns: (transfer full): #GstVideoCodecState describing format of video data.
2777 GstVideoCodecState *
2778 gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
2780 GstVideoCodecState *state;
2782 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2783 state = gst_video_codec_state_ref (encoder->priv->output_state);
2784 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2790 * gst_video_encoder_set_output_state:
2791 * @encoder: a #GstVideoEncoder
2792 * @caps: (transfer full): the #GstCaps to use for the output
2793 * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
2795 * Creates a new #GstVideoCodecState with the specified caps as the output state
2797 * Any previously set output state on @encoder will be replaced by the newly
2800 * The specified @caps should not contain any resolution, pixel-aspect-ratio,
2801 * framerate, codec-data, .... Those should be specified instead in the returned
2802 * #GstVideoCodecState.
2804 * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
2805 * or framerate) from an existing #GstVideoCodecState, it can be provided as a
2808 * If the subclass wishes to override some fields from the output state (like
2809 * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
2811 * The new output state will only take effect (set on pads and buffers) starting
2812 * from the next call to #gst_video_encoder_finish_frame().
2814 * Returns: (transfer full): the newly configured output state.
2816 GstVideoCodecState *
2817 gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
2818 GstVideoCodecState * reference)
2820 GstVideoEncoderPrivate *priv = encoder->priv;
2821 GstVideoCodecState *state;
2823 g_return_val_if_fail (caps != NULL, NULL);
2825 state = _new_output_state (caps, reference);
2829 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2830 if (priv->output_state)
2831 gst_video_codec_state_unref (priv->output_state);
2832 priv->output_state = gst_video_codec_state_ref (state);
2834 if (priv->output_state != NULL && priv->output_state->info.fps_n > 0) {
2835 priv->qos_frame_duration =
2836 gst_util_uint64_scale (GST_SECOND, priv->output_state->info.fps_d,
2837 priv->output_state->info.fps_n);
2839 priv->qos_frame_duration = 0;
2842 priv->output_state_changed = TRUE;
2843 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2849 * gst_video_encoder_set_latency:
2850 * @encoder: a #GstVideoEncoder
2851 * @min_latency: minimum latency
2852 * @max_latency: maximum latency
2854 * Informs baseclass of encoding latency.
2857 gst_video_encoder_set_latency (GstVideoEncoder * encoder,
2858 GstClockTime min_latency, GstClockTime max_latency)
2860 g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
2861 g_return_if_fail (max_latency >= min_latency);
2863 GST_OBJECT_LOCK (encoder);
2864 encoder->priv->min_latency = min_latency;
2865 encoder->priv->max_latency = max_latency;
2866 GST_OBJECT_UNLOCK (encoder);
2868 gst_element_post_message (GST_ELEMENT_CAST (encoder),
2869 gst_message_new_latency (GST_OBJECT_CAST (encoder)));
2873 * gst_video_encoder_get_latency:
2874 * @encoder: a #GstVideoEncoder
2875 * @min_latency: (out) (allow-none): address of variable in which to store the
2876 * configured minimum latency, or %NULL
2877 * @max_latency: (out) (allow-none): address of variable in which to store the
2878 * configured maximum latency, or %NULL
2880 * Query the configured encoding latency. Results will be returned via
2881 * @min_latency and @max_latency.
2884 gst_video_encoder_get_latency (GstVideoEncoder * encoder,
2885 GstClockTime * min_latency, GstClockTime * max_latency)
2887 GST_OBJECT_LOCK (encoder);
2889 *min_latency = encoder->priv->min_latency;
2891 *max_latency = encoder->priv->max_latency;
2892 GST_OBJECT_UNLOCK (encoder);
2896 * gst_video_encoder_get_oldest_frame:
2897 * @encoder: a #GstVideoEncoder
2899 * Get the oldest unfinished pending #GstVideoCodecFrame
2901 * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
2903 GstVideoCodecFrame *
2904 gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
2906 GstVideoCodecFrame *frame = NULL;
2908 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2909 if (encoder->priv->frames.head)
2910 frame = gst_video_codec_frame_ref (encoder->priv->frames.head->data);
2911 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2913 return (GstVideoCodecFrame *) frame;
2917 * gst_video_encoder_get_frame:
2918 * @encoder: a #GstVideoEncoder
2919 * @frame_number: system_frame_number of a frame
2921 * Get a pending unfinished #GstVideoCodecFrame
2923 * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
2925 GstVideoCodecFrame *
2926 gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
2929 GstVideoCodecFrame *frame = NULL;
2931 GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
2933 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2934 for (g = encoder->priv->frames.head; g; g = g->next) {
2935 GstVideoCodecFrame *tmp = g->data;
2937 if (tmp->system_frame_number == frame_number) {
2938 frame = gst_video_codec_frame_ref (tmp);
2942 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2948 * gst_video_encoder_get_frames:
2949 * @encoder: a #GstVideoEncoder
2951 * Get all pending unfinished #GstVideoCodecFrame
2953 * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
2956 gst_video_encoder_get_frames (GstVideoEncoder * encoder)
2960 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2962 g_list_copy_deep (encoder->priv->frames.head,
2963 (GCopyFunc) gst_video_codec_frame_ref, NULL);
2964 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2970 * gst_video_encoder_merge_tags:
2971 * @encoder: a #GstVideoEncoder
2972 * @tags: (allow-none): a #GstTagList to merge, or NULL to unset
2973 * previously-set tags
2974 * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
2976 * Sets the video encoder tags and how they should be merged with any
2977 * upstream stream tags. This will override any tags previously-set
2978 * with gst_video_encoder_merge_tags().
2980 * Note that this is provided for convenience, and the subclass is
2981 * not required to use this and can still do tag handling on its own.
2986 gst_video_encoder_merge_tags (GstVideoEncoder * encoder,
2987 const GstTagList * tags, GstTagMergeMode mode)
2989 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2990 g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
2991 g_return_if_fail (tags == NULL || mode != GST_TAG_MERGE_UNDEFINED);
2993 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2994 if (encoder->priv->tags != tags) {
2995 if (encoder->priv->tags) {
2996 gst_tag_list_unref (encoder->priv->tags);
2997 encoder->priv->tags = NULL;
2998 encoder->priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
3001 encoder->priv->tags = gst_tag_list_ref ((GstTagList *) tags);
3002 encoder->priv->tags_merge_mode = mode;
3005 GST_DEBUG_OBJECT (encoder, "setting encoder tags to %" GST_PTR_FORMAT,
3007 encoder->priv->tags_changed = TRUE;
3009 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
3013 * gst_video_encoder_get_allocator:
3014 * @encoder: a #GstVideoEncoder
3015 * @allocator: (out) (allow-none) (transfer full): the #GstAllocator
3017 * @params: (out) (allow-none) (transfer full): the
3018 * #GstAllocationParams of @allocator
3020 * Lets #GstVideoEncoder sub-classes to know the memory @allocator
3021 * used by the base class and its @params.
3023 * Unref the @allocator after use it.
3026 gst_video_encoder_get_allocator (GstVideoEncoder * encoder,
3027 GstAllocator ** allocator, GstAllocationParams * params)
3029 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3032 *allocator = encoder->priv->allocator ?
3033 gst_object_ref (encoder->priv->allocator) : NULL;
3036 *params = encoder->priv->params;
3040 * gst_video_encoder_set_min_pts:
3041 * @encoder: a #GstVideoEncoder
3042 * @min_pts: minimal PTS that will be passed to handle_frame
3044 * Request minimal value for PTS passed to handle_frame.
3046 * For streams with reordered frames this can be used to ensure that there
3047 * is enough time to accommodate first DTS, which may be less than first PTS
3052 gst_video_encoder_set_min_pts (GstVideoEncoder * encoder, GstClockTime min_pts)
3054 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3055 encoder->priv->min_pts = min_pts;
3056 encoder->priv->time_adjustment = GST_CLOCK_TIME_NONE;
3060 * gst_video_encoder_get_max_encode_time:
3061 * @encoder: a #GstVideoEncoder
3062 * @frame: a #GstVideoCodecFrame
3064 * Determines maximum possible encoding time for @frame that will
3065 * allow it to encode and arrive in time (as determined by QoS events).
3066 * In particular, a negative result means encoding in time is no longer possible
3067 * and should therefore occur as soon/skippy as possible.
3069 * If no QoS events have been received from downstream, or if
3070 * #GstVideoEncoder:qos is disabled this function returns #G_MAXINT64.
3072 * Returns: max decoding time.
3076 gst_video_encoder_get_max_encode_time (GstVideoEncoder *
3077 encoder, GstVideoCodecFrame * frame)
3079 GstClockTimeDiff deadline;
3080 GstClockTime earliest_time;
3082 if (!g_atomic_int_get (&encoder->priv->qos_enabled))
3085 GST_OBJECT_LOCK (encoder);
3086 earliest_time = encoder->priv->earliest_time;
3087 if (GST_CLOCK_TIME_IS_VALID (earliest_time)
3088 && GST_CLOCK_TIME_IS_VALID (frame->deadline))
3089 deadline = GST_CLOCK_DIFF (earliest_time, frame->deadline);
3091 deadline = G_MAXINT64;
3093 GST_LOG_OBJECT (encoder, "earliest %" GST_TIME_FORMAT
3094 ", frame deadline %" GST_TIME_FORMAT ", deadline %" GST_STIME_FORMAT,
3095 GST_TIME_ARGS (earliest_time), GST_TIME_ARGS (frame->deadline),
3096 GST_STIME_ARGS (deadline));
3098 GST_OBJECT_UNLOCK (encoder);
3104 * gst_video_encoder_set_qos_enabled:
3105 * @encoder: the encoder
3106 * @enabled: the new qos value.
3108 * Configures @encoder to handle Quality-of-Service events from downstream.
3112 gst_video_encoder_set_qos_enabled (GstVideoEncoder * encoder, gboolean enabled)
3114 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3116 g_atomic_int_set (&encoder->priv->qos_enabled, enabled);
3120 * gst_video_encoder_is_qos_enabled:
3121 * @encoder: the encoder
3123 * Checks if @encoder is currently configured to handle Quality-of-Service
3124 * events from downstream.
3126 * Returns: %TRUE if the encoder is configured to perform Quality-of-Service.
3130 gst_video_encoder_is_qos_enabled (GstVideoEncoder * encoder)
3134 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
3136 res = g_atomic_int_get (&encoder->priv->qos_enabled);
3142 * gst_video_encoder_set_min_force_key_unit_interval:
3143 * @encoder: the encoder
3144 * @interval: minimum interval
3146 * Sets the minimum interval for requesting keyframes based on force-keyunit
3147 * events. Setting this to 0 will allow to handle every event, setting this to
3148 * %GST_CLOCK_TIME_NONE causes force-keyunit events to be ignored.
3153 gst_video_encoder_set_min_force_key_unit_interval (GstVideoEncoder * encoder,
3154 GstClockTime interval)
3156 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
3158 GST_OBJECT_LOCK (encoder);
3159 encoder->priv->min_force_key_unit_interval = interval;
3160 GST_OBJECT_UNLOCK (encoder);
3164 * gst_video_encoder_get_min_force_key_unit_interval:
3165 * @encoder: the encoder
3167 * Returns the minimum force-keyunit interval, see gst_video_encoder_set_min_force_key_unit_interval()
3170 * Returns: the minimum force-keyunit interval
3175 gst_video_encoder_get_min_force_key_unit_interval (GstVideoEncoder * encoder)
3177 GstClockTime interval;
3179 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), GST_CLOCK_TIME_NONE);
3181 GST_OBJECT_LOCK (encoder);
3182 interval = encoder->priv->min_force_key_unit_interval;
3183 GST_OBJECT_UNLOCK (encoder);