2 * Copyright (C) 2008 David Schleef <ds@schleef.org>
3 * Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
4 * Copyright (C) 2011 Nokia Corporation. All rights reserved.
5 * Contact: Stefan Kost <stefan.kost@nokia.com>
6 * Copyright (C) 2012 Collabora Ltd.
7 * Author : Edward Hervey <edward@collabora.com>
9 * This library is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Library General Public
11 * License as published by the Free Software Foundation; either
12 * version 2 of the License, or (at your option) any later version.
14 * This library is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Library General Public License for more details.
19 * You should have received a copy of the GNU Library General Public
20 * License along with this library; if not, write to the
21 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
22 * Boston, MA 02110-1301, USA.
26 * SECTION:gstvideoencoder
27 * @short_description: Base class for video encoders
30 * This base class is for video encoders turning raw video into
33 * GstVideoEncoder and subclass should cooperate as follows.
36 * <itemizedlist><title>Configuration</title>
38 * Initially, GstVideoEncoder calls @start when the encoder element
39 * is activated, which allows subclass to perform any global setup.
42 * GstVideoEncoder calls @set_format to inform subclass of the format
43 * of input video data that it is about to receive. Subclass should
44 * setup for encoding and configure base class as appropriate
45 * (e.g. latency). While unlikely, it might be called more than once,
46 * if changing input parameters require reconfiguration. Baseclass
47 * will ensure that processing of current configuration is finished.
50 * GstVideoEncoder calls @stop at end of all processing.
56 * <title>Data processing</title>
58 * Base class collects input data and metadata into a frame and hands
59 * this to subclass' @handle_frame.
62 * If codec processing results in encoded data, subclass should call
63 * @gst_video_encoder_finish_frame to have encoded data pushed
67 * If implemented, baseclass calls subclass @pre_push just prior to
68 * pushing to allow subclasses to modify some metadata on the buffer.
69 * If it returns GST_FLOW_OK, the buffer is pushed downstream.
72 * GstVideoEncoderClass will handle both srcpad and sinkpad events.
73 * Sink events will be passed to subclass if @event callback has been
79 * <itemizedlist><title>Shutdown phase</title>
81 * GstVideoEncoder class calls @stop to inform the subclass that data
82 * parsing will be stopped.
88 * Subclass is responsible for providing pad template caps for
89 * source and sink pads. The pads need to be named "sink" and "src". It should
90 * also be able to provide fixed src pad caps in @getcaps by the time it calls
91 * @gst_video_encoder_finish_frame.
93 * Things that subclass need to take care of:
95 * <listitem><para>Provide pad templates</para></listitem>
97 * Provide source pad caps before pushing the first buffer
100 * Accept data in @handle_frame and provide encoded results to
101 * @gst_video_encoder_finish_frame.
113 * * Calculate actual latency based on input/output timestamp/frame_number
114 * and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
117 #include <gst/video/video.h>
118 #include "gstvideoencoder.h"
119 #include "gstvideoutils.h"
120 #include "gstvideoutilsprivate.h"
122 #include <gst/video/gstvideometa.h>
123 #include <gst/video/gstvideopool.h>
127 GST_DEBUG_CATEGORY (videoencoder_debug);
128 #define GST_CAT_DEFAULT videoencoder_debug
130 #define GST_VIDEO_ENCODER_GET_PRIVATE(obj) \
131 (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VIDEO_ENCODER, \
132 GstVideoEncoderPrivate))
134 struct _GstVideoEncoderPrivate
136 guint64 presentation_frame_number;
137 int distance_from_sync;
139 /* FIXME : (and introduce a context ?) */
145 GList *current_frame_events;
148 gboolean new_headers; /* Whether new headers were just set */
150 GList *force_key_unit; /* List of pending forced keyunits */
152 guint32 system_frame_number;
154 GList *frames; /* Protected with OBJECT_LOCK */
155 GstVideoCodecState *input_state;
156 GstVideoCodecState *output_state;
157 gboolean output_state_changed;
162 GstAllocator *allocator;
163 GstAllocationParams params;
165 /* upstream stream tags (global tags are passed through as-is) */
166 GstTagList *upstream_tags;
170 GstTagMergeMode tags_merge_mode;
172 gboolean tags_changed;
174 GstClockTime min_pts;
175 /* adjustment needed on pts, dts, segment start and stop to accomodate
177 GstClockTime time_adjustment;
180 typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
181 struct _ForcedKeyUnitEvent
183 GstClockTime running_time;
184 gboolean pending; /* TRUE if this was requested already */
185 gboolean all_headers;
191 forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
193 g_slice_free (ForcedKeyUnitEvent, evt);
196 static ForcedKeyUnitEvent *
197 forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
200 ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
202 evt->running_time = running_time;
203 evt->all_headers = all_headers;
209 static GstElementClass *parent_class = NULL;
210 static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
211 static void gst_video_encoder_init (GstVideoEncoder * enc,
212 GstVideoEncoderClass * klass);
214 static void gst_video_encoder_finalize (GObject * object);
216 gst_video_encoder_release_frame (GstVideoEncoder * enc,
217 GstVideoCodecFrame * frame);
218 static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
220 static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
222 static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
224 static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
226 static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
228 static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
229 element, GstStateChange transition);
230 static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
232 static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
234 static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
235 encoder, GstBuffer * buf, GstClockTime pts, GstClockTime dts,
236 GstClockTime duration);
238 static gboolean gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
240 static gboolean gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
242 static gboolean gst_video_encoder_decide_allocation_default (GstVideoEncoder *
243 encoder, GstQuery * query);
244 static gboolean gst_video_encoder_propose_allocation_default (GstVideoEncoder *
245 encoder, GstQuery * query);
246 static gboolean gst_video_encoder_negotiate_default (GstVideoEncoder * encoder);
247 static gboolean gst_video_encoder_negotiate_unlocked (GstVideoEncoder *
250 static gboolean gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
252 static gboolean gst_video_encoder_src_query_default (GstVideoEncoder * encoder,
255 static gboolean gst_video_encoder_transform_meta_default (GstVideoEncoder *
256 encoder, GstVideoCodecFrame * frame, GstMeta * meta);
258 /* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
259 * method to get to the padtemplates */
261 gst_video_encoder_get_type (void)
263 static volatile gsize type = 0;
265 if (g_once_init_enter (&type)) {
267 static const GTypeInfo info = {
268 sizeof (GstVideoEncoderClass),
271 (GClassInitFunc) gst_video_encoder_class_init,
274 sizeof (GstVideoEncoder),
276 (GInstanceInitFunc) gst_video_encoder_init,
278 const GInterfaceInfo preset_interface_info = {
279 NULL, /* interface_init */
280 NULL, /* interface_finalize */
281 NULL /* interface_data */
284 _type = g_type_register_static (GST_TYPE_ELEMENT,
285 "GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
286 g_type_add_interface_static (_type, GST_TYPE_PRESET,
287 &preset_interface_info);
288 g_once_init_leave (&type, _type);
294 gst_video_encoder_class_init (GstVideoEncoderClass * klass)
296 GObjectClass *gobject_class;
297 GstElementClass *gstelement_class;
299 gobject_class = G_OBJECT_CLASS (klass);
300 gstelement_class = GST_ELEMENT_CLASS (klass);
302 GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
303 "Base Video Encoder");
305 parent_class = g_type_class_peek_parent (klass);
307 g_type_class_add_private (klass, sizeof (GstVideoEncoderPrivate));
309 gobject_class->finalize = gst_video_encoder_finalize;
311 gstelement_class->change_state =
312 GST_DEBUG_FUNCPTR (gst_video_encoder_change_state);
314 klass->sink_event = gst_video_encoder_sink_event_default;
315 klass->src_event = gst_video_encoder_src_event_default;
316 klass->propose_allocation = gst_video_encoder_propose_allocation_default;
317 klass->decide_allocation = gst_video_encoder_decide_allocation_default;
318 klass->negotiate = gst_video_encoder_negotiate_default;
319 klass->sink_query = gst_video_encoder_sink_query_default;
320 klass->src_query = gst_video_encoder_src_query_default;
321 klass->transform_meta = gst_video_encoder_transform_meta_default;
325 _flush_events (GstPad * pad, GList * events)
329 for (tmp = events; tmp; tmp = tmp->next) {
330 if (GST_EVENT_TYPE (tmp->data) != GST_EVENT_EOS &&
331 GST_EVENT_TYPE (tmp->data) != GST_EVENT_SEGMENT &&
332 GST_EVENT_IS_STICKY (tmp->data)) {
333 gst_pad_store_sticky_event (pad, GST_EVENT_CAST (tmp->data));
335 gst_event_unref (tmp->data);
337 g_list_free (events);
343 gst_video_encoder_reset (GstVideoEncoder * encoder, gboolean hard)
345 GstVideoEncoderPrivate *priv = encoder->priv;
348 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
350 priv->presentation_frame_number = 0;
351 priv->distance_from_sync = 0;
353 g_list_foreach (priv->force_key_unit, (GFunc) forced_key_unit_event_free,
355 g_list_free (priv->force_key_unit);
356 priv->force_key_unit = NULL;
358 priv->drained = TRUE;
363 priv->time_adjustment = GST_CLOCK_TIME_NONE;
366 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
367 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
369 if (priv->input_state)
370 gst_video_codec_state_unref (priv->input_state);
371 priv->input_state = NULL;
372 if (priv->output_state)
373 gst_video_codec_state_unref (priv->output_state);
374 priv->output_state = NULL;
376 if (priv->upstream_tags) {
377 gst_tag_list_unref (priv->upstream_tags);
378 priv->upstream_tags = NULL;
381 gst_tag_list_unref (priv->tags);
383 priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
384 priv->tags_changed = FALSE;
386 g_list_foreach (priv->headers, (GFunc) gst_event_unref, NULL);
387 g_list_free (priv->headers);
388 priv->headers = NULL;
389 priv->new_headers = FALSE;
391 if (priv->allocator) {
392 gst_object_unref (priv->allocator);
393 priv->allocator = NULL;
396 g_list_foreach (priv->current_frame_events, (GFunc) gst_event_unref, NULL);
397 g_list_free (priv->current_frame_events);
398 priv->current_frame_events = NULL;
403 for (l = priv->frames; l; l = l->next) {
404 GstVideoCodecFrame *frame = l->data;
406 frame->events = _flush_events (encoder->srcpad, frame->events);
408 priv->current_frame_events = _flush_events (encoder->srcpad,
409 encoder->priv->current_frame_events);
412 g_list_foreach (priv->frames, (GFunc) gst_video_codec_frame_unref, NULL);
413 g_list_free (priv->frames);
416 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
421 /* Always call reset() in one way or another after this */
423 gst_video_encoder_flush (GstVideoEncoder * encoder)
425 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
428 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
430 ret = klass->flush (encoder);
432 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
437 gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
439 GstVideoEncoderPrivate *priv;
440 GstPadTemplate *pad_template;
443 GST_DEBUG_OBJECT (encoder, "gst_video_encoder_init");
445 priv = encoder->priv = GST_VIDEO_ENCODER_GET_PRIVATE (encoder);
448 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
449 g_return_if_fail (pad_template != NULL);
451 encoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
453 gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
454 gst_pad_set_event_function (pad,
455 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
456 gst_pad_set_query_function (pad,
457 GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
458 gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
461 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
462 g_return_if_fail (pad_template != NULL);
464 encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
466 gst_pad_set_query_function (pad,
467 GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
468 gst_pad_set_event_function (pad,
469 GST_DEBUG_FUNCPTR (gst_video_encoder_src_event));
470 gst_element_add_pad (GST_ELEMENT (encoder), encoder->srcpad);
472 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
473 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
475 g_rec_mutex_init (&encoder->stream_lock);
477 priv->headers = NULL;
478 priv->new_headers = FALSE;
480 priv->min_latency = 0;
481 priv->max_latency = 0;
482 priv->min_pts = GST_CLOCK_TIME_NONE;
483 priv->time_adjustment = GST_CLOCK_TIME_NONE;
485 gst_video_encoder_reset (encoder, TRUE);
489 gst_video_encoded_video_convert (gint64 bytes, gint64 time,
490 GstFormat src_format, gint64 src_value, GstFormat * dest_format,
493 gboolean res = FALSE;
495 g_return_val_if_fail (dest_format != NULL, FALSE);
496 g_return_val_if_fail (dest_value != NULL, FALSE);
498 if (G_UNLIKELY (src_format == *dest_format || src_value == 0 ||
501 *dest_value = src_value;
505 if (bytes <= 0 || time <= 0) {
506 GST_DEBUG ("not enough metadata yet to convert");
510 switch (src_format) {
511 case GST_FORMAT_BYTES:
512 switch (*dest_format) {
513 case GST_FORMAT_TIME:
514 *dest_value = gst_util_uint64_scale (src_value, time, bytes);
521 case GST_FORMAT_TIME:
522 switch (*dest_format) {
523 case GST_FORMAT_BYTES:
524 *dest_value = gst_util_uint64_scale (src_value, bytes, time);
532 GST_DEBUG ("unhandled conversion from %d to %d", src_format,
542 * gst_video_encoder_set_headers:
543 * @encoder: a #GstVideoEncoder
544 * @headers: (transfer full) (element-type GstBuffer): a list of #GstBuffer containing the codec header
546 * Set the codec headers to be sent downstream whenever requested.
549 gst_video_encoder_set_headers (GstVideoEncoder * video_encoder, GList * headers)
551 GST_VIDEO_ENCODER_STREAM_LOCK (video_encoder);
553 GST_DEBUG_OBJECT (video_encoder, "new headers %p", headers);
554 if (video_encoder->priv->headers) {
555 g_list_foreach (video_encoder->priv->headers, (GFunc) gst_buffer_unref,
557 g_list_free (video_encoder->priv->headers);
559 video_encoder->priv->headers = headers;
560 video_encoder->priv->new_headers = TRUE;
562 GST_VIDEO_ENCODER_STREAM_UNLOCK (video_encoder);
565 static GstVideoCodecState *
566 _new_output_state (GstCaps * caps, GstVideoCodecState * reference)
568 GstVideoCodecState *state;
570 state = g_slice_new0 (GstVideoCodecState);
571 state->ref_count = 1;
572 gst_video_info_init (&state->info);
573 gst_video_info_set_format (&state->info, GST_VIDEO_FORMAT_ENCODED, 0, 0);
578 GstVideoInfo *tgt, *ref;
581 ref = &reference->info;
583 /* Copy over extra fields from reference state */
584 tgt->interlace_mode = ref->interlace_mode;
585 tgt->flags = ref->flags;
586 tgt->width = ref->width;
587 tgt->height = ref->height;
588 tgt->chroma_site = ref->chroma_site;
589 tgt->colorimetry = ref->colorimetry;
590 tgt->par_n = ref->par_n;
591 tgt->par_d = ref->par_d;
592 tgt->fps_n = ref->fps_n;
593 tgt->fps_d = ref->fps_d;
595 GST_VIDEO_INFO_MULTIVIEW_MODE (tgt) = GST_VIDEO_INFO_MULTIVIEW_MODE (ref);
596 GST_VIDEO_INFO_MULTIVIEW_FLAGS (tgt) = GST_VIDEO_INFO_MULTIVIEW_FLAGS (ref);
602 static GstVideoCodecState *
603 _new_input_state (GstCaps * caps)
605 GstVideoCodecState *state;
607 state = g_slice_new0 (GstVideoCodecState);
608 state->ref_count = 1;
609 gst_video_info_init (&state->info);
610 if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
612 state->caps = gst_caps_ref (caps);
618 g_slice_free (GstVideoCodecState, state);
624 gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
626 GstVideoEncoderClass *encoder_class;
627 GstVideoCodecState *state;
630 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
632 /* subclass should do something here ... */
633 g_return_val_if_fail (encoder_class->set_format != NULL, FALSE);
635 GST_DEBUG_OBJECT (encoder, "setcaps %" GST_PTR_FORMAT, caps);
637 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
639 if (encoder->priv->input_state) {
640 GST_DEBUG_OBJECT (encoder,
641 "Checking if caps changed old %" GST_PTR_FORMAT " new %" GST_PTR_FORMAT,
642 encoder->priv->input_state->caps, caps);
643 if (gst_caps_is_equal (encoder->priv->input_state->caps, caps))
644 goto caps_not_changed;
647 state = _new_input_state (caps);
648 if (G_UNLIKELY (!state))
651 if (encoder->priv->input_state
652 && gst_video_info_is_equal (&state->info,
653 &encoder->priv->input_state->info)) {
654 gst_video_codec_state_unref (state);
655 goto caps_not_changed;
658 if (encoder_class->reset) {
659 GST_FIXME_OBJECT (encoder, "GstVideoEncoder::reset() is deprecated");
660 encoder_class->reset (encoder, TRUE);
663 /* and subclass should be ready to configure format at any time around */
664 ret = encoder_class->set_format (encoder, state);
666 if (encoder->priv->input_state)
667 gst_video_codec_state_unref (encoder->priv->input_state);
668 encoder->priv->input_state = state;
670 gst_video_codec_state_unref (state);
673 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
676 GST_WARNING_OBJECT (encoder, "rejected caps %" GST_PTR_FORMAT, caps);
682 GST_DEBUG_OBJECT (encoder, "Caps did not change - ignore");
683 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
690 GST_WARNING_OBJECT (encoder, "Failed to parse caps");
691 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
697 * gst_video_encoder_proxy_getcaps:
698 * @enc: a #GstVideoEncoder
699 * @caps: (allow-none): initial caps
700 * @filter: (allow-none): filter caps
702 * Returns caps that express @caps (or sink template caps if @caps == NULL)
703 * restricted to resolution/format/... combinations supported by downstream
704 * elements (e.g. muxers).
706 * Returns: (transfer full): a #GstCaps owned by caller
709 gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
712 return __gst_video_element_proxy_getcaps (GST_ELEMENT_CAST (encoder),
713 GST_VIDEO_ENCODER_SINK_PAD (encoder),
714 GST_VIDEO_ENCODER_SRC_PAD (encoder), caps, filter);
718 gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
720 GstVideoEncoderClass *klass;
723 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
726 caps = klass->getcaps (encoder, filter);
728 caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
730 GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
736 gst_video_encoder_decide_allocation_default (GstVideoEncoder * encoder,
739 GstAllocator *allocator = NULL;
740 GstAllocationParams params;
741 gboolean update_allocator;
743 /* we got configuration from our peer or the decide_allocation method,
745 if (gst_query_get_n_allocation_params (query) > 0) {
746 /* try the allocator */
747 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
748 update_allocator = TRUE;
751 gst_allocation_params_init (¶ms);
752 update_allocator = FALSE;
755 if (update_allocator)
756 gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
758 gst_query_add_allocation_param (query, allocator, ¶ms);
760 gst_object_unref (allocator);
766 gst_video_encoder_propose_allocation_default (GstVideoEncoder * encoder,
774 gst_query_parse_allocation (query, &caps, NULL);
779 if (!gst_video_info_from_caps (&info, caps))
782 size = GST_VIDEO_INFO_SIZE (&info);
784 if (gst_query_get_n_allocation_pools (query) == 0) {
785 GstStructure *structure;
786 GstAllocator *allocator = NULL;
787 GstAllocationParams params = { 0, 15, 0, 0 };
789 if (gst_query_get_n_allocation_params (query) > 0)
790 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
792 gst_query_add_allocation_param (query, allocator, ¶ms);
794 pool = gst_video_buffer_pool_new ();
796 structure = gst_buffer_pool_get_config (pool);
797 gst_buffer_pool_config_set_params (structure, caps, size, 0, 0);
798 gst_buffer_pool_config_set_allocator (structure, allocator, ¶ms);
801 gst_object_unref (allocator);
803 if (!gst_buffer_pool_set_config (pool, structure))
806 gst_query_add_allocation_pool (query, pool, size, 0, 0);
807 gst_object_unref (pool);
808 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
816 GST_ERROR_OBJECT (encoder, "failed to set config");
817 gst_object_unref (pool);
823 gst_video_encoder_sink_query_default (GstVideoEncoder * encoder,
826 GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
827 gboolean res = FALSE;
829 switch (GST_QUERY_TYPE (query)) {
832 GstCaps *filter, *caps;
834 gst_query_parse_caps (query, &filter);
835 caps = gst_video_encoder_sink_getcaps (encoder, filter);
836 gst_query_set_caps_result (query, caps);
837 gst_caps_unref (caps);
841 case GST_QUERY_ALLOCATION:
843 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
845 if (klass->propose_allocation)
846 res = klass->propose_allocation (encoder, query);
850 res = gst_pad_query_default (pad, GST_OBJECT (encoder), query);
857 gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
860 GstVideoEncoder *encoder;
861 GstVideoEncoderClass *encoder_class;
862 gboolean ret = FALSE;
864 encoder = GST_VIDEO_ENCODER (parent);
865 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
867 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
868 GST_QUERY_TYPE_NAME (query));
870 if (encoder_class->sink_query)
871 ret = encoder_class->sink_query (encoder, query);
877 gst_video_encoder_finalize (GObject * object)
879 GstVideoEncoder *encoder;
881 GST_DEBUG_OBJECT (object, "finalize");
883 encoder = GST_VIDEO_ENCODER (object);
884 g_rec_mutex_clear (&encoder->stream_lock);
886 if (encoder->priv->allocator) {
887 gst_object_unref (encoder->priv->allocator);
888 encoder->priv->allocator = NULL;
891 G_OBJECT_CLASS (parent_class)->finalize (object);
895 gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
897 switch (GST_EVENT_TYPE (event)) {
898 case GST_EVENT_SEGMENT:
902 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
904 gst_event_copy_segment (event, &segment);
906 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
908 if (segment.format != GST_FORMAT_TIME) {
909 GST_DEBUG_OBJECT (encoder, "received non TIME segment");
910 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
914 if (encoder->priv->time_adjustment != GST_CLOCK_TIME_NONE) {
915 segment.start += encoder->priv->time_adjustment;
916 if (GST_CLOCK_TIME_IS_VALID (segment.stop)) {
917 segment.stop += encoder->priv->time_adjustment;
921 encoder->output_segment = segment;
922 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
924 gst_event_unref (event);
925 event = gst_event_new_segment (&encoder->output_segment);
933 return gst_pad_push_event (encoder->srcpad, event);
937 gst_video_encoder_create_merged_tags_event (GstVideoEncoder * enc)
939 GstTagList *merged_tags;
941 GST_LOG_OBJECT (enc, "upstream : %" GST_PTR_FORMAT, enc->priv->upstream_tags);
942 GST_LOG_OBJECT (enc, "encoder : %" GST_PTR_FORMAT, enc->priv->tags);
943 GST_LOG_OBJECT (enc, "mode : %d", enc->priv->tags_merge_mode);
946 gst_tag_list_merge (enc->priv->upstream_tags, enc->priv->tags,
947 enc->priv->tags_merge_mode);
949 GST_DEBUG_OBJECT (enc, "merged : %" GST_PTR_FORMAT, merged_tags);
951 if (merged_tags == NULL)
954 if (gst_tag_list_is_empty (merged_tags)) {
955 gst_tag_list_unref (merged_tags);
959 return gst_event_new_tag (merged_tags);
963 gst_video_encoder_check_and_push_tags (GstVideoEncoder * encoder)
965 if (encoder->priv->tags_changed) {
966 GstEvent *tags_event;
968 tags_event = gst_video_encoder_create_merged_tags_event (encoder);
970 if (tags_event != NULL)
971 gst_video_encoder_push_event (encoder, tags_event);
973 encoder->priv->tags_changed = FALSE;
978 gst_video_encoder_sink_event_default (GstVideoEncoder * encoder,
981 GstVideoEncoderClass *encoder_class;
982 gboolean ret = FALSE;
984 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
986 switch (GST_EVENT_TYPE (event)) {
991 gst_event_parse_caps (event, &caps);
992 ret = gst_video_encoder_setcaps (encoder, caps);
994 gst_event_unref (event);
1000 GstFlowReturn flow_ret;
1002 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1004 if (encoder_class->finish) {
1005 flow_ret = encoder_class->finish (encoder);
1007 flow_ret = GST_FLOW_OK;
1010 if (encoder->priv->current_frame_events) {
1013 for (l = g_list_last (encoder->priv->current_frame_events); l;
1014 l = g_list_previous (l)) {
1015 GstEvent *event = GST_EVENT (l->data);
1017 gst_video_encoder_push_event (encoder, event);
1020 g_list_free (encoder->priv->current_frame_events);
1021 encoder->priv->current_frame_events = NULL;
1023 gst_video_encoder_check_and_push_tags (encoder);
1025 ret = (flow_ret == GST_FLOW_OK);
1026 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1029 case GST_EVENT_SEGMENT:
1033 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1035 gst_event_copy_segment (event, &segment);
1037 GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
1039 if (segment.format != GST_FORMAT_TIME) {
1040 GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
1041 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1045 encoder->input_segment = segment;
1047 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1050 case GST_EVENT_CUSTOM_DOWNSTREAM:
1052 if (gst_video_event_is_force_key_unit (event)) {
1053 GstClockTime running_time;
1054 gboolean all_headers;
1057 if (gst_video_event_parse_downstream_force_key_unit (event,
1058 NULL, NULL, &running_time, &all_headers, &count)) {
1059 ForcedKeyUnitEvent *fevt;
1061 GST_OBJECT_LOCK (encoder);
1062 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1063 encoder->priv->force_key_unit =
1064 g_list_append (encoder->priv->force_key_unit, fevt);
1065 GST_OBJECT_UNLOCK (encoder);
1067 GST_DEBUG_OBJECT (encoder,
1068 "force-key-unit event: running-time %" GST_TIME_FORMAT
1069 ", all_headers %d, count %u",
1070 GST_TIME_ARGS (running_time), all_headers, count);
1072 gst_event_unref (event);
1078 case GST_EVENT_STREAM_START:
1080 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1081 /* Flush upstream tags after a STREAM_START */
1082 GST_DEBUG_OBJECT (encoder, "STREAM_START, clearing upstream tags");
1083 if (encoder->priv->upstream_tags) {
1084 gst_tag_list_unref (encoder->priv->upstream_tags);
1085 encoder->priv->upstream_tags = NULL;
1086 encoder->priv->tags_changed = TRUE;
1088 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1095 gst_event_parse_tag (event, &tags);
1097 if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
1098 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1099 if (encoder->priv->upstream_tags != tags) {
1100 tags = gst_tag_list_copy (tags);
1102 /* FIXME: make generic based on GST_TAG_FLAG_ENCODED */
1103 gst_tag_list_remove_tag (tags, GST_TAG_CODEC);
1104 gst_tag_list_remove_tag (tags, GST_TAG_AUDIO_CODEC);
1105 gst_tag_list_remove_tag (tags, GST_TAG_VIDEO_CODEC);
1106 gst_tag_list_remove_tag (tags, GST_TAG_SUBTITLE_CODEC);
1107 gst_tag_list_remove_tag (tags, GST_TAG_CONTAINER_FORMAT);
1108 gst_tag_list_remove_tag (tags, GST_TAG_BITRATE);
1109 gst_tag_list_remove_tag (tags, GST_TAG_NOMINAL_BITRATE);
1110 gst_tag_list_remove_tag (tags, GST_TAG_MAXIMUM_BITRATE);
1111 gst_tag_list_remove_tag (tags, GST_TAG_MINIMUM_BITRATE);
1112 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER);
1113 gst_tag_list_remove_tag (tags, GST_TAG_ENCODER_VERSION);
1115 if (encoder->priv->upstream_tags)
1116 gst_tag_list_unref (encoder->priv->upstream_tags);
1117 encoder->priv->upstream_tags = tags;
1118 GST_INFO_OBJECT (encoder, "upstream tags: %" GST_PTR_FORMAT, tags);
1120 gst_event_unref (event);
1121 event = gst_video_encoder_create_merged_tags_event (encoder);
1122 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1128 case GST_EVENT_FLUSH_STOP:{
1129 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1130 gst_video_encoder_flush (encoder);
1131 gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
1132 gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
1133 gst_video_encoder_reset (encoder, FALSE);
1134 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1141 /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
1142 * For EOS this is required because no buffer or serialized event
1143 * will come after EOS and nothing could trigger another
1144 * _finish_frame() call. *
1145 * If the subclass handles sending of EOS manually it can simply
1146 * not chain up to the parent class' event handler
1148 * For FLUSH_STOP this is required because it is expected
1149 * to be forwarded immediately and no buffers are queued anyway.
1152 if (!GST_EVENT_IS_SERIALIZED (event)
1153 || GST_EVENT_TYPE (event) == GST_EVENT_EOS
1154 || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
1155 ret = gst_video_encoder_push_event (encoder, event);
1157 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1158 encoder->priv->current_frame_events =
1159 g_list_prepend (encoder->priv->current_frame_events, event);
1160 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1169 gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
1172 GstVideoEncoder *enc;
1173 GstVideoEncoderClass *klass;
1174 gboolean ret = TRUE;
1176 enc = GST_VIDEO_ENCODER (parent);
1177 klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
1179 GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
1180 GST_EVENT_TYPE_NAME (event));
1182 if (klass->sink_event)
1183 ret = klass->sink_event (enc, event);
1189 gst_video_encoder_src_event_default (GstVideoEncoder * encoder,
1192 gboolean ret = FALSE;
1194 switch (GST_EVENT_TYPE (event)) {
1195 case GST_EVENT_CUSTOM_UPSTREAM:
1197 if (gst_video_event_is_force_key_unit (event)) {
1198 GstClockTime running_time;
1199 gboolean all_headers;
1202 if (gst_video_event_parse_upstream_force_key_unit (event,
1203 &running_time, &all_headers, &count)) {
1204 ForcedKeyUnitEvent *fevt;
1206 GST_OBJECT_LOCK (encoder);
1207 fevt = forced_key_unit_event_new (running_time, all_headers, count);
1208 encoder->priv->force_key_unit =
1209 g_list_append (encoder->priv->force_key_unit, fevt);
1210 GST_OBJECT_UNLOCK (encoder);
1212 GST_DEBUG_OBJECT (encoder,
1213 "force-key-unit event: running-time %" GST_TIME_FORMAT
1214 ", all_headers %d, count %u",
1215 GST_TIME_ARGS (running_time), all_headers, count);
1217 gst_event_unref (event);
1229 gst_pad_event_default (encoder->srcpad, GST_OBJECT_CAST (encoder),
1236 gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
1238 GstVideoEncoder *encoder;
1239 GstVideoEncoderClass *klass;
1240 gboolean ret = FALSE;
1242 encoder = GST_VIDEO_ENCODER (parent);
1243 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1245 GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
1247 if (klass->src_event)
1248 ret = klass->src_event (encoder, event);
1254 gst_video_encoder_src_query_default (GstVideoEncoder * enc, GstQuery * query)
1256 GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (enc);
1257 GstVideoEncoderPrivate *priv;
1262 GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
1264 switch (GST_QUERY_TYPE (query)) {
1265 case GST_QUERY_CONVERT:
1267 GstFormat src_fmt, dest_fmt;
1268 gint64 src_val, dest_val;
1270 gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
1272 gst_video_encoded_video_convert (priv->bytes, priv->time, src_fmt,
1273 src_val, &dest_fmt, &dest_val);
1276 gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
1279 case GST_QUERY_LATENCY:
1282 GstClockTime min_latency, max_latency;
1284 res = gst_pad_peer_query (enc->sinkpad, query);
1286 gst_query_parse_latency (query, &live, &min_latency, &max_latency);
1287 GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
1288 GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
1289 GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
1291 GST_OBJECT_LOCK (enc);
1292 min_latency += priv->min_latency;
1293 if (max_latency == GST_CLOCK_TIME_NONE
1294 || enc->priv->max_latency == GST_CLOCK_TIME_NONE)
1295 max_latency = GST_CLOCK_TIME_NONE;
1297 max_latency += enc->priv->max_latency;
1298 GST_OBJECT_UNLOCK (enc);
1300 gst_query_set_latency (query, live, min_latency, max_latency);
1305 res = gst_pad_query_default (pad, GST_OBJECT (enc), query);
1310 GST_DEBUG_OBJECT (enc, "query failed");
1315 gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
1317 GstVideoEncoder *encoder;
1318 GstVideoEncoderClass *encoder_class;
1319 gboolean ret = FALSE;
1321 encoder = GST_VIDEO_ENCODER (parent);
1322 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1324 GST_DEBUG_OBJECT (encoder, "received query %d, %s", GST_QUERY_TYPE (query),
1325 GST_QUERY_TYPE_NAME (query));
1327 if (encoder_class->src_query)
1328 ret = encoder_class->src_query (encoder, query);
1333 static GstVideoCodecFrame *
1334 gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
1335 GstClockTime pts, GstClockTime dts, GstClockTime duration)
1337 GstVideoEncoderPrivate *priv = encoder->priv;
1338 GstVideoCodecFrame *frame;
1340 frame = g_slice_new0 (GstVideoCodecFrame);
1342 frame->ref_count = 1;
1344 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1345 frame->system_frame_number = priv->system_frame_number;
1346 priv->system_frame_number++;
1348 frame->presentation_frame_number = priv->presentation_frame_number;
1349 priv->presentation_frame_number++;
1350 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1352 frame->events = priv->current_frame_events;
1353 priv->current_frame_events = NULL;
1354 frame->input_buffer = buf;
1357 frame->duration = duration;
1358 frame->abidata.ABI.ts = pts;
1364 static GstFlowReturn
1365 gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1367 GstVideoEncoder *encoder;
1368 GstVideoEncoderPrivate *priv;
1369 GstVideoEncoderClass *klass;
1370 GstVideoCodecFrame *frame;
1371 GstClockTime pts, duration;
1372 GstFlowReturn ret = GST_FLOW_OK;
1373 guint64 start, stop, cstart, cstop;
1375 encoder = GST_VIDEO_ENCODER (parent);
1376 priv = encoder->priv;
1377 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1379 g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
1381 if (!encoder->priv->input_state)
1382 goto not_negotiated;
1384 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1386 pts = GST_BUFFER_PTS (buf);
1387 duration = GST_BUFFER_DURATION (buf);
1389 GST_LOG_OBJECT (encoder,
1390 "received buffer of size %" G_GSIZE_FORMAT " with PTS %" GST_TIME_FORMAT
1391 ", DTS %" GST_TIME_FORMAT ", duration %" GST_TIME_FORMAT,
1392 gst_buffer_get_size (buf), GST_TIME_ARGS (pts),
1393 GST_TIME_ARGS (GST_BUFFER_DTS (buf)), GST_TIME_ARGS (duration));
1396 if (GST_CLOCK_TIME_IS_VALID (duration))
1397 stop = start + duration;
1399 stop = GST_CLOCK_TIME_NONE;
1401 /* Drop buffers outside of segment */
1402 if (!gst_segment_clip (&encoder->input_segment,
1403 GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
1404 GST_DEBUG_OBJECT (encoder, "clipping to segment dropped frame");
1405 gst_buffer_unref (buf);
1409 if (GST_CLOCK_TIME_IS_VALID (cstop))
1410 duration = cstop - cstart;
1412 duration = GST_CLOCK_TIME_NONE;
1414 if (priv->min_pts != GST_CLOCK_TIME_NONE
1415 && priv->time_adjustment == GST_CLOCK_TIME_NONE) {
1416 if (cstart < priv->min_pts) {
1417 priv->time_adjustment = priv->min_pts - cstart;
1421 if (priv->time_adjustment != GST_CLOCK_TIME_NONE) {
1422 cstart += priv->time_adjustment;
1425 /* incoming DTS is not really relevant and does not make sense anyway,
1426 * so pass along _NONE and maybe come up with something better later on */
1427 frame = gst_video_encoder_new_frame (encoder, buf, cstart,
1428 GST_CLOCK_TIME_NONE, duration);
1430 GST_OBJECT_LOCK (encoder);
1431 if (priv->force_key_unit) {
1432 ForcedKeyUnitEvent *fevt = NULL;
1433 GstClockTime running_time;
1437 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
1440 for (l = priv->force_key_unit; l; l = l->next) {
1441 ForcedKeyUnitEvent *tmp = l->data;
1443 /* Skip pending keyunits */
1447 /* Simple case, keyunit ASAP */
1448 if (tmp->running_time == GST_CLOCK_TIME_NONE) {
1453 /* Event for before this frame */
1454 if (tmp->running_time <= running_time) {
1461 fevt->frame_id = frame->system_frame_number;
1462 GST_DEBUG_OBJECT (encoder,
1463 "Forcing a key unit at running time %" GST_TIME_FORMAT,
1464 GST_TIME_ARGS (running_time));
1465 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME (frame);
1466 if (fevt->all_headers)
1467 GST_VIDEO_CODEC_FRAME_SET_FORCE_KEYFRAME_HEADERS (frame);
1468 fevt->pending = TRUE;
1471 GST_OBJECT_UNLOCK (encoder);
1473 gst_video_codec_frame_ref (frame);
1474 priv->frames = g_list_append (priv->frames, frame);
1476 /* new data, more finish needed */
1477 priv->drained = FALSE;
1479 GST_LOG_OBJECT (encoder, "passing frame pfn %d to subclass",
1480 frame->presentation_frame_number);
1482 ret = klass->handle_frame (encoder, frame);
1484 if (ret == GST_VIDEO_ENCODER_FLOW_DROPPED || ret == GST_FLOW_ERROR) {
1485 GST_INFO_OBJECT (encoder, "Dropping frame %p", frame);
1486 gst_video_encoder_release_frame (encoder, frame);
1490 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1497 GST_ELEMENT_ERROR (encoder, CORE, NEGOTIATION, (NULL),
1498 ("encoder not initialized"));
1499 gst_buffer_unref (buf);
1500 return GST_FLOW_NOT_NEGOTIATED;
1504 static GstStateChangeReturn
1505 gst_video_encoder_change_state (GstElement * element, GstStateChange transition)
1507 GstVideoEncoder *encoder;
1508 GstVideoEncoderClass *encoder_class;
1509 GstStateChangeReturn ret;
1511 encoder = GST_VIDEO_ENCODER (element);
1512 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (element);
1514 switch (transition) {
1515 case GST_STATE_CHANGE_NULL_TO_READY:
1516 /* open device/library if needed */
1517 if (encoder_class->open && !encoder_class->open (encoder))
1520 case GST_STATE_CHANGE_READY_TO_PAUSED:
1521 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1522 gst_video_encoder_reset (encoder, TRUE);
1523 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1525 /* Initialize device/library if needed */
1526 if (encoder_class->start && !encoder_class->start (encoder))
1533 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1535 switch (transition) {
1536 case GST_STATE_CHANGE_PAUSED_TO_READY:{
1537 gboolean stopped = TRUE;
1539 if (encoder_class->stop)
1540 stopped = encoder_class->stop (encoder);
1542 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1543 gst_video_encoder_reset (encoder, TRUE);
1544 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1550 case GST_STATE_CHANGE_READY_TO_NULL:
1551 /* close device/library if needed */
1552 if (encoder_class->close && !encoder_class->close (encoder))
1565 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1566 ("Failed to open encoder"));
1567 return GST_STATE_CHANGE_FAILURE;
1572 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1573 ("Failed to start encoder"));
1574 return GST_STATE_CHANGE_FAILURE;
1579 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1580 ("Failed to stop encoder"));
1581 return GST_STATE_CHANGE_FAILURE;
1586 GST_ELEMENT_ERROR (encoder, LIBRARY, INIT, (NULL),
1587 ("Failed to close encoder"));
1588 return GST_STATE_CHANGE_FAILURE;
1593 gst_video_encoder_negotiate_default (GstVideoEncoder * encoder)
1595 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1596 GstAllocator *allocator;
1597 GstAllocationParams params;
1598 gboolean ret = TRUE;
1599 GstVideoCodecState *state = encoder->priv->output_state;
1600 GstVideoInfo *info = &state->info;
1601 GstQuery *query = NULL;
1602 GstVideoCodecFrame *frame;
1605 g_return_val_if_fail (state->caps != NULL, FALSE);
1607 if (encoder->priv->output_state_changed) {
1608 state->caps = gst_caps_make_writable (state->caps);
1611 gst_caps_set_simple (state->caps, "width", G_TYPE_INT, info->width,
1612 "height", G_TYPE_INT, info->height,
1613 "pixel-aspect-ratio", GST_TYPE_FRACTION,
1614 info->par_n, info->par_d, NULL);
1615 if (info->flags & GST_VIDEO_FLAG_VARIABLE_FPS && info->fps_n != 0) {
1616 /* variable fps with a max-framerate */
1617 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION, 0, 1,
1618 "max-framerate", GST_TYPE_FRACTION, info->fps_n, info->fps_d, NULL);
1620 /* no variable fps or no max-framerate */
1621 gst_caps_set_simple (state->caps, "framerate", GST_TYPE_FRACTION,
1622 info->fps_n, info->fps_d, NULL);
1624 if (state->codec_data)
1625 gst_caps_set_simple (state->caps, "codec_data", GST_TYPE_BUFFER,
1626 state->codec_data, NULL);
1628 if (GST_VIDEO_INFO_MULTIVIEW_MODE (info) != GST_VIDEO_MULTIVIEW_MODE_NONE) {
1629 const gchar *caps_mview_mode =
1630 gst_video_multiview_mode_to_caps_string (GST_VIDEO_INFO_MULTIVIEW_MODE
1633 gst_caps_set_simple (state->caps, "multiview-mode", G_TYPE_STRING,
1634 caps_mview_mode, "multiview-flags", GST_TYPE_VIDEO_MULTIVIEW_FLAGSET,
1635 GST_VIDEO_INFO_MULTIVIEW_FLAGS (info), GST_FLAG_SET_MASK_EXACT, NULL);
1637 encoder->priv->output_state_changed = FALSE;
1640 /* Push all pending pre-caps events of the oldest frame before
1642 frame = encoder->priv->frames ? encoder->priv->frames->data : NULL;
1643 if (frame || encoder->priv->current_frame_events) {
1647 events = &frame->events;
1649 events = &encoder->priv->current_frame_events;
1652 for (l = g_list_last (*events); l;) {
1653 GstEvent *event = GST_EVENT (l->data);
1656 if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
1657 gst_video_encoder_push_event (encoder, event);
1660 *events = g_list_delete_link (*events, tmp);
1667 prevcaps = gst_pad_get_current_caps (encoder->srcpad);
1668 if (!prevcaps || !gst_caps_is_equal (prevcaps, state->caps))
1669 ret = gst_pad_set_caps (encoder->srcpad, state->caps);
1673 gst_caps_unref (prevcaps);
1678 query = gst_query_new_allocation (state->caps, TRUE);
1679 if (!gst_pad_peer_query (encoder->srcpad, query)) {
1680 GST_DEBUG_OBJECT (encoder, "didn't get downstream ALLOCATION hints");
1683 g_assert (klass->decide_allocation != NULL);
1684 ret = klass->decide_allocation (encoder, query);
1686 GST_DEBUG_OBJECT (encoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
1690 goto no_decide_allocation;
1692 /* we got configuration from our peer or the decide_allocation method,
1694 if (gst_query_get_n_allocation_params (query) > 0) {
1695 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
1698 gst_allocation_params_init (¶ms);
1701 if (encoder->priv->allocator)
1702 gst_object_unref (encoder->priv->allocator);
1703 encoder->priv->allocator = allocator;
1704 encoder->priv->params = params;
1708 gst_query_unref (query);
1713 no_decide_allocation:
1715 GST_WARNING_OBJECT (encoder, "Subclass failed to decide allocation");
1721 gst_video_encoder_negotiate_unlocked (GstVideoEncoder * encoder)
1723 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1724 gboolean ret = TRUE;
1726 if (G_LIKELY (klass->negotiate))
1727 ret = klass->negotiate (encoder);
1733 * gst_video_encoder_negotiate:
1734 * @encoder: a #GstVideoEncoder
1736 * Negotiate with downstream elements to currently configured #GstVideoCodecState.
1737 * Unmark GST_PAD_FLAG_NEED_RECONFIGURE in any case. But mark it again if
1740 * Returns: #TRUE if the negotiation succeeded, else #FALSE.
1743 gst_video_encoder_negotiate (GstVideoEncoder * encoder)
1745 GstVideoEncoderClass *klass;
1746 gboolean ret = TRUE;
1748 g_return_val_if_fail (GST_IS_VIDEO_ENCODER (encoder), FALSE);
1749 g_return_val_if_fail (encoder->priv->output_state, FALSE);
1751 klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1753 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1754 gst_pad_check_reconfigure (encoder->srcpad);
1755 if (klass->negotiate) {
1756 ret = klass->negotiate (encoder);
1758 gst_pad_mark_reconfigure (encoder->srcpad);
1760 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1766 * gst_video_encoder_allocate_output_buffer:
1767 * @encoder: a #GstVideoEncoder
1768 * @size: size of the buffer
1770 * Helper function that allocates a buffer to hold an encoded video frame
1771 * for @encoder's current #GstVideoCodecState.
1773 * Returns: (transfer full): allocated buffer
1776 gst_video_encoder_allocate_output_buffer (GstVideoEncoder * encoder, gsize size)
1779 gboolean needs_reconfigure = FALSE;
1781 g_return_val_if_fail (size > 0, NULL);
1783 GST_DEBUG ("alloc src buffer");
1785 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1786 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
1787 if (G_UNLIKELY (encoder->priv->output_state_changed
1788 || (encoder->priv->output_state && needs_reconfigure))) {
1789 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
1790 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
1791 gst_pad_mark_reconfigure (encoder->srcpad);
1797 gst_buffer_new_allocate (encoder->priv->allocator, size,
1798 &encoder->priv->params);
1800 GST_INFO_OBJECT (encoder, "couldn't allocate output buffer");
1804 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1809 buffer = gst_buffer_new_allocate (NULL, size, NULL);
1811 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1817 * gst_video_encoder_allocate_output_frame:
1818 * @encoder: a #GstVideoEncoder
1819 * @frame: a #GstVideoCodecFrame
1820 * @size: size of the buffer
1822 * Helper function that allocates a buffer to hold an encoded video frame for @encoder's
1823 * current #GstVideoCodecState. Subclass should already have configured video
1824 * state and set src pad caps.
1826 * The buffer allocated here is owned by the frame and you should only
1827 * keep references to the frame, not the buffer.
1829 * Returns: %GST_FLOW_OK if an output buffer could be allocated
1832 gst_video_encoder_allocate_output_frame (GstVideoEncoder *
1833 encoder, GstVideoCodecFrame * frame, gsize size)
1835 gboolean needs_reconfigure = FALSE;
1837 g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
1839 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1840 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
1841 if (G_UNLIKELY (encoder->priv->output_state_changed
1842 || (encoder->priv->output_state && needs_reconfigure))) {
1843 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
1844 GST_DEBUG_OBJECT (encoder, "Failed to negotiate, fallback allocation");
1845 gst_pad_mark_reconfigure (encoder->srcpad);
1849 GST_LOG_OBJECT (encoder, "alloc buffer size %" G_GSIZE_FORMAT, size);
1851 frame->output_buffer =
1852 gst_buffer_new_allocate (encoder->priv->allocator, size,
1853 &encoder->priv->params);
1855 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
1857 return frame->output_buffer ? GST_FLOW_OK : GST_FLOW_ERROR;
1861 gst_video_encoder_release_frame (GstVideoEncoder * enc,
1862 GstVideoCodecFrame * frame)
1866 /* unref once from the list */
1867 link = g_list_find (enc->priv->frames, frame);
1869 gst_video_codec_frame_unref (frame);
1870 enc->priv->frames = g_list_delete_link (enc->priv->frames, link);
1872 /* unref because this function takes ownership */
1873 gst_video_codec_frame_unref (frame);
1877 gst_video_encoder_transform_meta_default (GstVideoEncoder *
1878 encoder, GstVideoCodecFrame * frame, GstMeta * meta)
1880 const GstMetaInfo *info = meta->info;
1881 const gchar *const *tags;
1883 tags = gst_meta_api_type_get_tags (info->api);
1885 if (!tags || (g_strv_length ((gchar **) tags) == 1
1886 && gst_meta_api_type_has_tag (info->api,
1887 g_quark_from_string (GST_META_TAG_VIDEO_STR))))
1895 GstVideoEncoder *encoder;
1896 GstVideoCodecFrame *frame;
1900 foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
1902 CopyMetaData *data = user_data;
1903 GstVideoEncoder *encoder = data->encoder;
1904 GstVideoEncoderClass *klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1905 GstVideoCodecFrame *frame = data->frame;
1906 const GstMetaInfo *info = (*meta)->info;
1907 gboolean do_copy = FALSE;
1909 if (gst_meta_api_type_has_tag (info->api, _gst_meta_tag_memory)) {
1910 /* never call the transform_meta with memory specific metadata */
1911 GST_DEBUG_OBJECT (encoder, "not copying memory specific metadata %s",
1912 g_type_name (info->api));
1914 } else if (klass->transform_meta) {
1915 do_copy = klass->transform_meta (encoder, frame, *meta);
1916 GST_DEBUG_OBJECT (encoder, "transformed metadata %s: copy: %d",
1917 g_type_name (info->api), do_copy);
1920 /* we only copy metadata when the subclass implemented a transform_meta
1921 * function and when it returns %TRUE */
1923 GstMetaTransformCopy copy_data = { FALSE, 0, -1 };
1924 GST_DEBUG_OBJECT (encoder, "copy metadata %s", g_type_name (info->api));
1925 /* simply copy then */
1926 info->transform_func (frame->output_buffer, *meta, inbuf,
1927 _gst_meta_transform_copy, ©_data);
1933 * gst_video_encoder_finish_frame:
1934 * @encoder: a #GstVideoEncoder
1935 * @frame: (transfer full): an encoded #GstVideoCodecFrame
1937 * @frame must have a valid encoded data buffer, whose metadata fields
1938 * are then appropriately set according to frame data or no buffer at
1939 * all if the frame should be dropped.
1940 * It is subsequently pushed downstream or provided to @pre_push.
1941 * In any case, the frame is considered finished and released.
1943 * After calling this function the output buffer of the frame is to be
1944 * considered read-only. This function will also change the metadata
1947 * Returns: a #GstFlowReturn resulting from sending data downstream
1950 gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
1951 GstVideoCodecFrame * frame)
1953 GstVideoEncoderPrivate *priv = encoder->priv;
1954 GstFlowReturn ret = GST_FLOW_OK;
1955 GstVideoEncoderClass *encoder_class;
1957 gboolean send_headers = FALSE;
1958 gboolean discont = (frame->presentation_frame_number == 0);
1960 gboolean needs_reconfigure = FALSE;
1962 encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
1964 GST_LOG_OBJECT (encoder,
1965 "finish frame fpn %d", frame->presentation_frame_number);
1967 GST_LOG_OBJECT (encoder, "frame PTS %" GST_TIME_FORMAT
1968 ", DTS %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->pts),
1969 GST_TIME_ARGS (frame->dts));
1971 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
1973 needs_reconfigure = gst_pad_check_reconfigure (encoder->srcpad);
1974 if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
1975 && needs_reconfigure))) {
1976 if (!gst_video_encoder_negotiate_unlocked (encoder)) {
1977 gst_pad_mark_reconfigure (encoder->srcpad);
1978 if (GST_PAD_IS_FLUSHING (encoder->srcpad))
1979 ret = GST_FLOW_FLUSHING;
1981 ret = GST_FLOW_NOT_NEGOTIATED;
1986 if (G_UNLIKELY (priv->output_state == NULL))
1987 goto no_output_state;
1989 /* Push all pending events that arrived before this frame */
1990 for (l = priv->frames; l; l = l->next) {
1991 GstVideoCodecFrame *tmp = l->data;
1996 for (k = g_list_last (tmp->events); k; k = k->prev)
1997 gst_video_encoder_push_event (encoder, k->data);
1998 g_list_free (tmp->events);
2006 gst_video_encoder_check_and_push_tags (encoder);
2008 /* no buffer data means this frame is skipped/dropped */
2009 if (!frame->output_buffer) {
2010 GST_DEBUG_OBJECT (encoder, "skipping frame %" GST_TIME_FORMAT,
2011 GST_TIME_ARGS (frame->pts));
2015 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) && priv->force_key_unit) {
2016 GstClockTime stream_time, running_time;
2018 ForcedKeyUnitEvent *fevt = NULL;
2022 gst_segment_to_running_time (&encoder->output_segment, GST_FORMAT_TIME,
2025 GST_OBJECT_LOCK (encoder);
2026 for (l = priv->force_key_unit; l; l = l->next) {
2027 ForcedKeyUnitEvent *tmp = l->data;
2029 /* Skip non-pending keyunits */
2033 /* Exact match using the frame id */
2034 if (frame->system_frame_number == tmp->frame_id) {
2039 /* Simple case, keyunit ASAP */
2040 if (tmp->running_time == GST_CLOCK_TIME_NONE) {
2045 /* Event for before this frame */
2046 if (tmp->running_time <= running_time) {
2053 priv->force_key_unit = g_list_remove (priv->force_key_unit, fevt);
2055 GST_OBJECT_UNLOCK (encoder);
2059 gst_segment_to_stream_time (&encoder->output_segment, GST_FORMAT_TIME,
2062 ev = gst_video_event_new_downstream_force_key_unit
2063 (frame->pts, stream_time, running_time,
2064 fevt->all_headers, fevt->count);
2066 gst_video_encoder_push_event (encoder, ev);
2068 if (fevt->all_headers)
2069 send_headers = TRUE;
2071 GST_DEBUG_OBJECT (encoder,
2072 "Forced key unit: running-time %" GST_TIME_FORMAT
2073 ", all_headers %d, count %u",
2074 GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
2075 forced_key_unit_event_free (fevt);
2079 if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
2080 priv->distance_from_sync = 0;
2081 GST_BUFFER_FLAG_UNSET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2082 /* For keyframes, DTS = PTS, if encoder doesn't decide otherwise */
2083 if (!GST_CLOCK_TIME_IS_VALID (frame->dts)) {
2084 frame->dts = frame->pts;
2087 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
2090 /* DTS is expected monotone ascending,
2091 * so a good guess is the lowest unsent PTS (all being OK) */
2093 GstClockTime min_ts = GST_CLOCK_TIME_NONE;
2094 GstVideoCodecFrame *oframe = NULL;
2095 gboolean seen_none = FALSE;
2097 /* some maintenance regardless */
2098 for (l = priv->frames; l; l = l->next) {
2099 GstVideoCodecFrame *tmp = l->data;
2101 if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
2106 if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
2107 min_ts = tmp->abidata.ABI.ts;
2111 /* save a ts if needed */
2112 if (oframe && oframe != frame) {
2113 oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
2116 /* and set if needed */
2117 if (!GST_CLOCK_TIME_IS_VALID (frame->dts) && !seen_none) {
2118 frame->dts = min_ts;
2119 GST_DEBUG_OBJECT (encoder,
2120 "no valid DTS, using oldest PTS %" GST_TIME_FORMAT,
2121 GST_TIME_ARGS (frame->pts));
2125 frame->distance_from_sync = priv->distance_from_sync;
2126 priv->distance_from_sync++;
2128 GST_BUFFER_PTS (frame->output_buffer) = frame->pts;
2129 GST_BUFFER_DTS (frame->output_buffer) = frame->dts;
2130 GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
2132 /* update rate estimate */
2133 priv->bytes += gst_buffer_get_size (frame->output_buffer);
2134 if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
2135 priv->time += frame->duration;
2137 /* better none than nothing valid */
2138 priv->time = GST_CLOCK_TIME_NONE;
2141 if (G_UNLIKELY (send_headers || priv->new_headers)) {
2142 GList *tmp, *copy = NULL;
2144 GST_DEBUG_OBJECT (encoder, "Sending headers");
2146 /* First make all buffers metadata-writable */
2147 for (tmp = priv->headers; tmp; tmp = tmp->next) {
2148 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2150 copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
2152 g_list_free (priv->headers);
2153 priv->headers = copy;
2155 for (tmp = priv->headers; tmp; tmp = tmp->next) {
2156 GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
2158 priv->bytes += gst_buffer_get_size (tmpbuf);
2159 if (G_UNLIKELY (discont)) {
2160 GST_LOG_OBJECT (encoder, "marking discont");
2161 GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
2165 gst_pad_push (encoder->srcpad, gst_buffer_ref (tmpbuf));
2167 priv->new_headers = FALSE;
2170 if (G_UNLIKELY (discont)) {
2171 GST_LOG_OBJECT (encoder, "marking discont");
2172 GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
2175 if (encoder_class->pre_push)
2176 ret = encoder_class->pre_push (encoder, frame);
2178 if (encoder_class->transform_meta) {
2179 if (G_LIKELY (frame->input_buffer)) {
2182 data.encoder = encoder;
2184 gst_buffer_foreach_meta (frame->input_buffer, foreach_metadata, &data);
2186 GST_WARNING_OBJECT (encoder,
2187 "Can't copy metadata because input frame disappeared");
2191 /* Get an additional ref to the buffer, which is going to be pushed
2192 * downstream, the original ref is owned by the frame */
2193 buffer = gst_buffer_ref (frame->output_buffer);
2195 /* Release frame so the buffer is writable when we push it downstream
2196 * if possible, i.e. if the subclass does not hold additional references
2199 gst_video_encoder_release_frame (encoder, frame);
2202 if (ret == GST_FLOW_OK)
2203 ret = gst_pad_push (encoder->srcpad, buffer);
2208 gst_video_encoder_release_frame (encoder, frame);
2210 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2217 gst_video_encoder_release_frame (encoder, frame);
2218 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2219 GST_ERROR_OBJECT (encoder, "Output state was not configured");
2220 return GST_FLOW_ERROR;
2225 * gst_video_encoder_get_output_state:
2226 * @encoder: a #GstVideoEncoder
2228 * Get the current #GstVideoCodecState
2230 * Returns: (transfer full): #GstVideoCodecState describing format of video data.
2232 GstVideoCodecState *
2233 gst_video_encoder_get_output_state (GstVideoEncoder * encoder)
2235 GstVideoCodecState *state;
2237 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2238 state = gst_video_codec_state_ref (encoder->priv->output_state);
2239 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2245 * gst_video_encoder_set_output_state:
2246 * @encoder: a #GstVideoEncoder
2247 * @caps: (transfer full): the #GstCaps to use for the output
2248 * @reference: (allow-none) (transfer none): An optional reference @GstVideoCodecState
2250 * Creates a new #GstVideoCodecState with the specified caps as the output state
2252 * Any previously set output state on @encoder will be replaced by the newly
2255 * The specified @caps should not contain any resolution, pixel-aspect-ratio,
2256 * framerate, codec-data, .... Those should be specified instead in the returned
2257 * #GstVideoCodecState.
2259 * If the subclass wishes to copy over existing fields (like pixel aspect ratio,
2260 * or framerate) from an existing #GstVideoCodecState, it can be provided as a
2263 * If the subclass wishes to override some fields from the output state (like
2264 * pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
2266 * The new output state will only take effect (set on pads and buffers) starting
2267 * from the next call to #gst_video_encoder_finish_frame().
2269 * Returns: (transfer full): the newly configured output state.
2271 GstVideoCodecState *
2272 gst_video_encoder_set_output_state (GstVideoEncoder * encoder, GstCaps * caps,
2273 GstVideoCodecState * reference)
2275 GstVideoEncoderPrivate *priv = encoder->priv;
2276 GstVideoCodecState *state;
2278 g_return_val_if_fail (caps != NULL, NULL);
2280 state = _new_output_state (caps, reference);
2282 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2283 if (priv->output_state)
2284 gst_video_codec_state_unref (priv->output_state);
2285 priv->output_state = gst_video_codec_state_ref (state);
2287 priv->output_state_changed = TRUE;
2288 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2294 * gst_video_encoder_set_latency:
2295 * @encoder: a #GstVideoEncoder
2296 * @min_latency: minimum latency
2297 * @max_latency: maximum latency
2299 * Informs baseclass of encoding latency.
2302 gst_video_encoder_set_latency (GstVideoEncoder * encoder,
2303 GstClockTime min_latency, GstClockTime max_latency)
2305 g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
2306 g_return_if_fail (max_latency >= min_latency);
2308 GST_OBJECT_LOCK (encoder);
2309 encoder->priv->min_latency = min_latency;
2310 encoder->priv->max_latency = max_latency;
2311 GST_OBJECT_UNLOCK (encoder);
2313 gst_element_post_message (GST_ELEMENT_CAST (encoder),
2314 gst_message_new_latency (GST_OBJECT_CAST (encoder)));
2318 * gst_video_encoder_get_latency:
2319 * @encoder: a #GstVideoEncoder
2320 * @min_latency: (out) (allow-none): address of variable in which to store the
2321 * configured minimum latency, or %NULL
2322 * @max_latency: (out) (allow-none): address of variable in which to store the
2323 * configured maximum latency, or %NULL
2325 * Query the configured encoding latency. Results will be returned via
2326 * @min_latency and @max_latency.
2329 gst_video_encoder_get_latency (GstVideoEncoder * encoder,
2330 GstClockTime * min_latency, GstClockTime * max_latency)
2332 GST_OBJECT_LOCK (encoder);
2334 *min_latency = encoder->priv->min_latency;
2336 *max_latency = encoder->priv->max_latency;
2337 GST_OBJECT_UNLOCK (encoder);
2341 * gst_video_encoder_get_oldest_frame:
2342 * @encoder: a #GstVideoEncoder
2344 * Get the oldest unfinished pending #GstVideoCodecFrame
2346 * Returns: (transfer full): oldest unfinished pending #GstVideoCodecFrame
2348 GstVideoCodecFrame *
2349 gst_video_encoder_get_oldest_frame (GstVideoEncoder * encoder)
2351 GstVideoCodecFrame *frame = NULL;
2353 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2354 if (encoder->priv->frames)
2355 frame = gst_video_codec_frame_ref (encoder->priv->frames->data);
2356 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2358 return (GstVideoCodecFrame *) frame;
2362 * gst_video_encoder_get_frame:
2363 * @encoder: a #GstVideoEnccoder
2364 * @frame_number: system_frame_number of a frame
2366 * Get a pending unfinished #GstVideoCodecFrame
2368 * Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
2370 GstVideoCodecFrame *
2371 gst_video_encoder_get_frame (GstVideoEncoder * encoder, int frame_number)
2374 GstVideoCodecFrame *frame = NULL;
2376 GST_DEBUG_OBJECT (encoder, "frame_number : %d", frame_number);
2378 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2379 for (g = encoder->priv->frames; g; g = g->next) {
2380 GstVideoCodecFrame *tmp = g->data;
2382 if (tmp->system_frame_number == frame_number) {
2383 frame = gst_video_codec_frame_ref (tmp);
2387 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2393 * gst_video_encoder_get_frames:
2394 * @encoder: a #GstVideoEncoder
2396 * Get all pending unfinished #GstVideoCodecFrame
2398 * Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
2401 gst_video_encoder_get_frames (GstVideoEncoder * encoder)
2405 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2406 frames = g_list_copy (encoder->priv->frames);
2407 g_list_foreach (frames, (GFunc) gst_video_codec_frame_ref, NULL);
2408 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2414 * gst_video_encoder_merge_tags:
2415 * @encoder: a #GstVideoEncoder
2416 * @tags: (allow-none): a #GstTagList to merge, or NULL to unset
2417 * previously-set tags
2418 * @mode: the #GstTagMergeMode to use, usually #GST_TAG_MERGE_REPLACE
2420 * Sets the video encoder tags and how they should be merged with any
2421 * upstream stream tags. This will override any tags previously-set
2422 * with gst_video_encoder_merge_tags().
2424 * Note that this is provided for convenience, and the subclass is
2425 * not required to use this and can still do tag handling on its own.
2430 gst_video_encoder_merge_tags (GstVideoEncoder * encoder,
2431 const GstTagList * tags, GstTagMergeMode mode)
2433 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2434 g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
2435 g_return_if_fail (tags == NULL || mode != GST_TAG_MERGE_UNDEFINED);
2437 GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
2438 if (encoder->priv->tags != tags) {
2439 if (encoder->priv->tags) {
2440 gst_tag_list_unref (encoder->priv->tags);
2441 encoder->priv->tags = NULL;
2442 encoder->priv->tags_merge_mode = GST_TAG_MERGE_APPEND;
2445 encoder->priv->tags = gst_tag_list_ref ((GstTagList *) tags);
2446 encoder->priv->tags_merge_mode = mode;
2449 GST_DEBUG_OBJECT (encoder, "setting encoder tags to %" GST_PTR_FORMAT,
2451 encoder->priv->tags_changed = TRUE;
2453 GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
2457 * gst_video_encoder_get_allocator:
2458 * @encoder: a #GstVideoEncoder
2459 * @allocator: (out) (allow-none) (transfer full): the #GstAllocator
2461 * @params: (out) (allow-none) (transfer full): the
2462 * #GstAllocatorParams of @allocator
2464 * Lets #GstVideoEncoder sub-classes to know the memory @allocator
2465 * used by the base class and its @params.
2467 * Unref the @allocator after use it.
2470 gst_video_encoder_get_allocator (GstVideoEncoder * encoder,
2471 GstAllocator ** allocator, GstAllocationParams * params)
2473 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2476 *allocator = encoder->priv->allocator ?
2477 gst_object_ref (encoder->priv->allocator) : NULL;
2480 *params = encoder->priv->params;
2484 * gst_video_encoder_set_min_pts:
2485 * @encoder: a #GstVideoEncoder
2486 * @min_pts: minimal PTS that will be passed to handle_frame
2488 * Request minimal value for PTS passed to handle_frame.
2490 * For streams with reordered frames this can be used to ensure that there
2491 * is enough time to accomodate first DTS, which may be less than first PTS
2496 gst_video_encoder_set_min_pts (GstVideoEncoder * encoder, GstClockTime min_pts)
2498 g_return_if_fail (GST_IS_VIDEO_ENCODER (encoder));
2499 encoder->priv->min_pts = min_pts;
2500 encoder->priv->time_adjustment = GST_CLOCK_TIME_NONE;