1 /* Generic video aggregator plugin
2 * Copyright (C) 2004, 2008 Wim Taymans <wim@fluendo.com>
3 * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:gstvideoaggregator
23 * @title: GstVideoAggregator
24 * @short_description: Base class for video aggregators
26 * VideoAggregator can accept AYUV, ARGB and BGRA video streams. For each of the requested
27 * sink pads it will compare the incoming geometry and framerate to define the
28 * output parameters. Indeed output video frames will have the geometry of the
29 * biggest incoming video stream and the framerate of the fastest incoming one.
31 * VideoAggregator will do colorspace conversion.
33 * Zorder for each input stream can be configured on the
34 * #GstVideoAggregatorPad.
44 #include "gstvideoaggregator.h"
46 GST_DEBUG_CATEGORY_STATIC (gst_video_aggregator_debug);
47 #define GST_CAT_DEFAULT gst_video_aggregator_debug
49 /* Needed prototypes */
50 static void gst_video_aggregator_reset_qos (GstVideoAggregator * vagg);
52 /****************************************
53 * GstVideoAggregatorPad implementation *
54 ****************************************/
56 #define DEFAULT_PAD_ZORDER 0
57 #define DEFAULT_PAD_REPEAT_AFTER_EOS FALSE
62 PROP_PAD_REPEAT_AFTER_EOS,
66 struct _GstVideoAggregatorPadPrivate
69 GstVideoFrame prepared_frame;
73 gboolean repeat_after_eos;
75 /* Subclasses can force an alpha channel in the (input thus output)
76 * colorspace format */
79 GstClockTime start_time;
80 GstClockTime end_time;
82 GstVideoInfo pending_vinfo;
86 G_DEFINE_TYPE (GstVideoAggregatorPad, gst_video_aggregator_pad,
87 GST_TYPE_AGGREGATOR_PAD);
90 gst_video_aggregator_pad_get_property (GObject * object, guint prop_id,
91 GValue * value, GParamSpec * pspec)
93 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
97 g_value_set_uint (value, pad->priv->zorder);
99 case PROP_PAD_REPEAT_AFTER_EOS:
100 g_value_set_boolean (value, pad->priv->repeat_after_eos);
103 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
109 pad_zorder_compare (const GstVideoAggregatorPad * pad1,
110 const GstVideoAggregatorPad * pad2)
112 return pad1->priv->zorder - pad2->priv->zorder;
116 gst_video_aggregator_pad_set_property (GObject * object, guint prop_id,
117 const GValue * value, GParamSpec * pspec)
119 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
120 GstVideoAggregator *vagg =
121 GST_VIDEO_AGGREGATOR (gst_pad_get_parent (GST_PAD (pad)));
124 case PROP_PAD_ZORDER:
125 GST_OBJECT_LOCK (vagg);
126 pad->priv->zorder = g_value_get_uint (value);
127 GST_ELEMENT (vagg)->sinkpads = g_list_sort (GST_ELEMENT (vagg)->sinkpads,
128 (GCompareFunc) pad_zorder_compare);
129 GST_OBJECT_UNLOCK (vagg);
131 case PROP_PAD_REPEAT_AFTER_EOS:
132 pad->priv->repeat_after_eos = g_value_get_boolean (value);
135 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
139 gst_object_unref (vagg);
143 _flush_pad (GstAggregatorPad * aggpad, GstAggregator * aggregator)
145 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (aggregator);
146 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
148 gst_video_aggregator_reset_qos (vagg);
149 gst_buffer_replace (&pad->priv->buffer, NULL);
150 pad->priv->start_time = -1;
151 pad->priv->end_time = -1;
157 gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad,
158 GstAggregator * agg, GstBuffer * buffer)
160 gboolean ret = FALSE;
161 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
163 if (agg_segment->position != GST_CLOCK_TIME_NONE
164 && GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE) {
165 GstClockTime start_time =
166 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
167 GST_BUFFER_PTS (buffer));
168 GstClockTime end_time = start_time + GST_BUFFER_DURATION (buffer);
169 GstClockTime output_start_running_time =
170 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
171 agg_segment->position);
173 ret = end_time < output_start_running_time;
180 gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad,
181 GstVideoAggregator * vagg, GstBuffer * buffer,
182 GstVideoFrame * prepared_frame)
184 if (!gst_video_frame_map (prepared_frame, &pad->info, buffer, GST_MAP_READ)) {
185 GST_WARNING_OBJECT (vagg, "Could not map input buffer");
193 gst_video_aggregator_pad_clean_frame (GstVideoAggregatorPad * pad,
194 GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
196 if (prepared_frame->buffer) {
197 gst_video_frame_unmap (prepared_frame);
198 memset (prepared_frame, 0, sizeof (GstVideoFrame));
203 gst_video_aggregator_pad_class_init (GstVideoAggregatorPadClass * klass)
205 GObjectClass *gobject_class = (GObjectClass *) klass;
206 GstAggregatorPadClass *aggpadclass = (GstAggregatorPadClass *) klass;
208 gobject_class->set_property = gst_video_aggregator_pad_set_property;
209 gobject_class->get_property = gst_video_aggregator_pad_get_property;
211 g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
212 g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
213 0, G_MAXUINT, DEFAULT_PAD_ZORDER,
214 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
215 g_object_class_install_property (gobject_class, PROP_PAD_REPEAT_AFTER_EOS,
216 g_param_spec_boolean ("repeat-after-eos", "Repeat After EOS",
217 "Repeat the " "last frame after EOS until all pads are EOS",
218 DEFAULT_PAD_REPEAT_AFTER_EOS,
219 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
221 g_type_class_add_private (klass, sizeof (GstVideoAggregatorPadPrivate));
223 aggpadclass->flush = GST_DEBUG_FUNCPTR (_flush_pad);
224 aggpadclass->skip_buffer =
225 GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_skip_buffer);
226 klass->prepare_frame =
227 GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_prepare_frame);
228 klass->clean_frame = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_clean_frame);
232 gst_video_aggregator_pad_init (GstVideoAggregatorPad * vaggpad)
235 G_TYPE_INSTANCE_GET_PRIVATE (vaggpad, GST_TYPE_VIDEO_AGGREGATOR_PAD,
236 GstVideoAggregatorPadPrivate);
238 vaggpad->priv->zorder = DEFAULT_PAD_ZORDER;
239 vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS;
240 memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
244 * gst_video_aggregator_pad_has_current_buffer:
245 * @pad: a #GstVideoAggregatorPad
247 * Checks if the pad currently has a buffer queued that is going to be used
248 * for the current output frame.
250 * This must only be called from the aggregate_frames() virtual method,
251 * or from the prepare_frame() virtual method of the aggregator pads.
253 * Returns: %TRUE if the pad has currently a buffer queued
256 gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad * pad)
258 g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), FALSE);
260 return pad->priv->buffer != NULL;
264 * gst_video_aggregator_pad_get_current_buffer:
265 * @pad: a #GstVideoAggregatorPad
267 * Returns the currently queued buffer that is going to be used
268 * for the current output frame.
270 * This must only be called from the aggregate_frames() virtual method,
271 * or from the prepare_frame() virtual method of the aggregator pads.
273 * The return value is only valid until aggregate_frames() or prepare_frames()
276 * Returns: (transfer none): The currently queued buffer
279 gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad * pad)
281 g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
283 return pad->priv->buffer;
287 * gst_video_aggregator_pad_get_prepared_frame:
288 * @pad: a #GstVideoAggregatorPad
290 * Returns the currently prepared video frame that has to be aggregated into
291 * the current output frame.
293 * This must only be called from the aggregate_frames() virtual method,
294 * or from the prepare_frame() virtual method of the aggregator pads.
296 * The return value is only valid until aggregate_frames() or prepare_frames()
299 * Returns: (transfer none): The currently prepared video frame
302 gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad * pad)
304 g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
306 return pad->priv->prepared_frame.buffer ? &pad->priv->prepared_frame : NULL;
310 * gst_video_aggregator_pad_set_needs_alpha:
311 * @pad: a #GstVideoAggregatorPad
312 * @needs_alpha: %TRUE if this pad requires alpha output
314 * Allows selecting that this pad requires an output format with alpha
318 gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad * pad,
319 gboolean needs_alpha)
321 g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad));
323 if (needs_alpha != pad->priv->needs_alpha) {
325 GST_AGGREGATOR (gst_object_get_parent (GST_OBJECT (pad)));
326 pad->priv->needs_alpha = needs_alpha;
328 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (agg));
329 gst_object_unref (agg);
334 /****************************************
335 * GstVideoAggregatorConvertPad implementation *
336 ****************************************/
341 PROP_CONVERT_PAD_CONVERTER_CONFIG,
344 struct _GstVideoAggregatorConvertPadPrivate
346 /* Converter, if NULL no conversion is done */
347 GstVideoConverter *convert;
349 /* caps used for conversion if needed */
350 GstVideoInfo conversion_info;
351 GstBuffer *converted_buffer;
353 GstStructure *converter_config;
354 gboolean converter_config_changed;
357 G_DEFINE_TYPE (GstVideoAggregatorConvertPad, gst_video_aggregator_convert_pad,
358 GST_TYPE_VIDEO_AGGREGATOR_PAD);
361 gst_video_aggregator_convert_pad_finalize (GObject * o)
363 GstVideoAggregatorConvertPad *vaggpad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (o);
365 if (vaggpad->priv->convert)
366 gst_video_converter_free (vaggpad->priv->convert);
367 vaggpad->priv->convert = NULL;
369 if (vaggpad->priv->converter_config)
370 gst_structure_free (vaggpad->priv->converter_config);
371 vaggpad->priv->converter_config = NULL;
373 G_OBJECT_CLASS (gst_video_aggregator_pad_parent_class)->finalize (o);
377 gst_video_aggregator_convert_pad_update_conversion_info_internal
378 (GstVideoAggregatorPad * vpad)
380 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
382 pad->priv->converter_config_changed = TRUE;
386 gst_video_aggregator_convert_pad_prepare_frame (GstVideoAggregatorPad * vpad,
387 GstVideoAggregator * vagg, GstBuffer * buffer,
388 GstVideoFrame * prepared_frame)
390 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
393 /* Update/create converter as needed */
394 if (pad->priv->converter_config_changed) {
395 GstVideoAggregatorConvertPadClass *klass =
396 GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS (pad);
397 GstVideoInfo conversion_info;
399 gst_video_info_init (&conversion_info);
400 klass->create_conversion_info (pad, vagg, &conversion_info);
401 if (conversion_info.finfo == NULL)
403 pad->priv->converter_config_changed = FALSE;
405 if (!pad->priv->conversion_info.finfo
406 || !gst_video_info_is_equal (&conversion_info,
407 &pad->priv->conversion_info)) {
408 pad->priv->conversion_info = conversion_info;
410 if (pad->priv->convert)
411 gst_video_converter_free (pad->priv->convert);
412 pad->priv->convert = NULL;
414 if (!gst_video_info_is_equal (&vpad->info, &pad->priv->conversion_info)) {
416 gst_video_converter_new (&vpad->info, &pad->priv->conversion_info,
417 pad->priv->converter_config ? gst_structure_copy (pad->
418 priv->converter_config) : NULL);
419 if (!pad->priv->convert) {
420 GST_WARNING_OBJECT (pad, "No path found for conversion");
424 GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
425 GST_VIDEO_INFO_FORMAT (&vpad->info),
426 GST_VIDEO_INFO_FORMAT (&pad->priv->conversion_info));
428 GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
433 if (!gst_video_frame_map (&frame, &vpad->info, buffer, GST_MAP_READ)) {
434 GST_WARNING_OBJECT (vagg, "Could not map input buffer");
438 if (pad->priv->convert) {
439 GstVideoFrame converted_frame;
440 GstBuffer *converted_buf = NULL;
441 static GstAllocationParams params = { 0, 15, 0, 0, };
445 /* We wait until here to set the conversion infos, in case vagg->info changed */
446 converted_size = pad->priv->conversion_info.size;
447 outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
448 converted_size = converted_size > outsize ? converted_size : outsize;
449 converted_buf = gst_buffer_new_allocate (NULL, converted_size, ¶ms);
451 if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
452 converted_buf, GST_MAP_READWRITE)) {
453 GST_WARNING_OBJECT (vagg, "Could not map converted frame");
455 gst_video_frame_unmap (&frame);
459 gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
460 pad->priv->converted_buffer = converted_buf;
461 gst_video_frame_unmap (&frame);
462 *prepared_frame = converted_frame;
464 *prepared_frame = frame;
471 gst_video_aggregator_convert_pad_clean_frame (GstVideoAggregatorPad * vpad,
472 GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
474 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
476 if (prepared_frame->buffer) {
477 gst_video_frame_unmap (prepared_frame);
478 memset (prepared_frame, 0, sizeof (GstVideoFrame));
481 if (pad->priv->converted_buffer) {
482 gst_buffer_unref (pad->priv->converted_buffer);
483 pad->priv->converted_buffer = NULL;
488 gst_video_aggregator_convert_pad_create_conversion_info
489 (GstVideoAggregatorConvertPad * pad, GstVideoAggregator * agg,
490 GstVideoInfo * convert_info)
492 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD (pad);
493 gchar *colorimetry, *best_colorimetry;
494 const gchar *chroma, *best_chroma;
496 g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
497 g_return_if_fail (convert_info != NULL);
499 if (!vpad->info.finfo
500 || GST_VIDEO_INFO_FORMAT (&vpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
505 || GST_VIDEO_INFO_FORMAT (&agg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
509 colorimetry = gst_video_colorimetry_to_string (&vpad->info.colorimetry);
510 chroma = gst_video_chroma_to_string (vpad->info.chroma_site);
512 best_colorimetry = gst_video_colorimetry_to_string (&agg->info.colorimetry);
513 best_chroma = gst_video_chroma_to_string (agg->info.chroma_site);
515 if (GST_VIDEO_INFO_FORMAT (&agg->info) != GST_VIDEO_INFO_FORMAT (&vpad->info)
516 || g_strcmp0 (colorimetry, best_colorimetry)
517 || g_strcmp0 (chroma, best_chroma)) {
518 GstVideoInfo tmp_info;
520 /* Initialize with the wanted video format and our original width and
521 * height as we don't want to rescale. Then copy over the wanted
522 * colorimetry, and chroma-site and our current pixel-aspect-ratio
523 * and other relevant fields.
525 gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (&agg->info),
526 vpad->info.width, vpad->info.height);
527 tmp_info.chroma_site = agg->info.chroma_site;
528 tmp_info.colorimetry = agg->info.colorimetry;
529 tmp_info.par_n = vpad->info.par_n;
530 tmp_info.par_d = vpad->info.par_d;
531 tmp_info.fps_n = vpad->info.fps_n;
532 tmp_info.fps_d = vpad->info.fps_d;
533 tmp_info.flags = vpad->info.flags;
534 tmp_info.interlace_mode = vpad->info.interlace_mode;
536 *convert_info = tmp_info;
538 *convert_info = vpad->info;
541 g_free (colorimetry);
542 g_free (best_colorimetry);
546 gst_video_aggregator_convert_pad_get_property (GObject * object, guint prop_id,
547 GValue * value, GParamSpec * pspec)
549 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
552 case PROP_CONVERT_PAD_CONVERTER_CONFIG:
553 GST_OBJECT_LOCK (pad);
554 if (pad->priv->converter_config)
555 g_value_set_boxed (value, pad->priv->converter_config);
556 GST_OBJECT_UNLOCK (pad);
559 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
565 gst_video_aggregator_convert_pad_set_property (GObject * object, guint prop_id,
566 const GValue * value, GParamSpec * pspec)
568 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
571 case PROP_CONVERT_PAD_CONVERTER_CONFIG:
572 GST_OBJECT_LOCK (pad);
573 if (pad->priv->converter_config)
574 gst_structure_free (pad->priv->converter_config);
575 pad->priv->converter_config = g_value_dup_boxed (value);
576 pad->priv->converter_config_changed = TRUE;
577 GST_OBJECT_UNLOCK (pad);
580 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
586 gst_video_aggregator_convert_pad_class_init (GstVideoAggregatorConvertPadClass *
589 GObjectClass *gobject_class = (GObjectClass *) klass;
590 GstVideoAggregatorPadClass *vaggpadclass =
591 (GstVideoAggregatorPadClass *) klass;
593 gobject_class->finalize = gst_video_aggregator_convert_pad_finalize;
594 gobject_class->get_property =
595 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_get_property);
596 gobject_class->set_property =
597 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_set_property);
599 g_type_class_add_private (klass,
600 sizeof (GstVideoAggregatorConvertPadPrivate));
602 g_object_class_install_property (gobject_class,
603 PROP_CONVERT_PAD_CONVERTER_CONFIG, g_param_spec_boxed ("converter-config",
604 "Converter configuration",
605 "A GstStructure describing the configuration that should be used "
606 "when scaling and converting this pad's video frames",
607 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
609 vaggpadclass->update_conversion_info =
611 (gst_video_aggregator_convert_pad_update_conversion_info_internal);
612 vaggpadclass->prepare_frame =
613 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_prepare_frame);
614 vaggpadclass->clean_frame =
615 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_clean_frame);
617 klass->create_conversion_info =
618 gst_video_aggregator_convert_pad_create_conversion_info;
622 gst_video_aggregator_convert_pad_init (GstVideoAggregatorConvertPad * vaggpad)
625 G_TYPE_INSTANCE_GET_PRIVATE (vaggpad,
626 GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD,
627 GstVideoAggregatorConvertPadPrivate);
629 vaggpad->priv->converted_buffer = NULL;
630 vaggpad->priv->convert = NULL;
631 vaggpad->priv->converter_config = NULL;
632 vaggpad->priv->converter_config_changed = FALSE;
637 * gst_video_aggregator_convert_pad_update_conversion_info:
638 * @pad: a #GstVideoAggregatorPad
640 * Requests the pad to check and update the converter before the next usage to
641 * update for any changes that have happened.
644 void gst_video_aggregator_convert_pad_update_conversion_info
645 (GstVideoAggregatorConvertPad * pad)
647 g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
649 pad->priv->converter_config_changed = TRUE;
652 /**************************************
653 * GstVideoAggregator implementation *
654 **************************************/
656 #define GST_VIDEO_AGGREGATOR_GET_LOCK(vagg) (&GST_VIDEO_AGGREGATOR(vagg)->priv->lock)
658 #define GST_VIDEO_AGGREGATOR_LOCK(vagg) G_STMT_START { \
659 GST_LOG_OBJECT (vagg, "Taking EVENT lock from thread %p", \
661 g_mutex_lock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg)); \
662 GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p", \
666 #define GST_VIDEO_AGGREGATOR_UNLOCK(vagg) G_STMT_START { \
667 GST_LOG_OBJECT (vagg, "Releasing EVENT lock from thread %p", \
669 g_mutex_unlock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg)); \
670 GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p", \
675 struct _GstVideoAggregatorPrivate
677 /* Lock to prevent the state to change while aggregating */
680 /* Current downstream segment */
681 GstClockTime ts_offset;
686 GstClockTime earliest_time;
687 guint64 qos_processed, qos_dropped;
690 GstCaps *current_caps;
695 /* Can't use the G_DEFINE_TYPE macros because we need the
696 * videoaggregator class in the _init to be able to set
697 * the sink pad non-alpha caps. Using the G_DEFINE_TYPE there
698 * seems to be no way of getting the real class being initialized */
699 static void gst_video_aggregator_init (GstVideoAggregator * self,
700 GstVideoAggregatorClass * klass);
701 static void gst_video_aggregator_class_init (GstVideoAggregatorClass * klass);
702 static gpointer gst_video_aggregator_parent_class = NULL;
705 gst_video_aggregator_get_type (void)
707 static volatile gsize g_define_type_id_volatile = 0;
709 if (g_once_init_enter (&g_define_type_id_volatile)) {
710 GType g_define_type_id = g_type_register_static_simple (GST_TYPE_AGGREGATOR,
711 g_intern_static_string ("GstVideoAggregator"),
712 sizeof (GstVideoAggregatorClass),
713 (GClassInitFunc) gst_video_aggregator_class_init,
714 sizeof (GstVideoAggregator),
715 (GInstanceInitFunc) gst_video_aggregator_init,
716 (GTypeFlags) G_TYPE_FLAG_ABSTRACT);
717 g_once_init_leave (&g_define_type_id_volatile, g_define_type_id);
719 return g_define_type_id_volatile;
723 gst_video_aggregator_find_best_format (GstVideoAggregator * vagg,
724 GstCaps * downstream_caps, GstVideoInfo * best_info,
725 gboolean * at_least_one_alpha)
728 GstCaps *possible_caps;
729 GstVideoAggregatorPad *pad;
730 gboolean need_alpha = FALSE;
731 gint best_format_number = 0;
732 GHashTable *formats_table = g_hash_table_new (g_direct_hash, g_direct_equal);
734 GST_OBJECT_LOCK (vagg);
735 for (tmp = GST_ELEMENT (vagg)->sinkpads; tmp; tmp = tmp->next) {
741 if (!pad->info.finfo)
744 if (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
745 *at_least_one_alpha = TRUE;
747 /* If we want alpha, disregard all the other formats */
748 if (need_alpha && !(pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA))
751 /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
752 if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
755 possible_caps = gst_video_info_to_caps (&pad->info);
757 s = gst_caps_get_structure (possible_caps, 0);
758 gst_structure_remove_fields (s, "width", "height", "framerate",
759 "pixel-aspect-ratio", "interlace-mode", NULL);
761 /* Can downstream accept this format ? */
762 if (!gst_caps_can_intersect (downstream_caps, possible_caps)) {
763 gst_caps_unref (possible_caps);
767 gst_caps_unref (possible_caps);
770 GPOINTER_TO_INT (g_hash_table_lookup (formats_table,
771 GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info))));
772 format_number += pad->info.width * pad->info.height;
774 g_hash_table_replace (formats_table,
775 GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info)),
776 GINT_TO_POINTER (format_number));
778 /* If that pad is the first with alpha, set it as the new best format */
779 if (!need_alpha && (pad->priv->needs_alpha
780 && (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (pad->info.finfo)))) {
782 /* Just fallback to ARGB in case we require alpha but the input pad
783 * does not have alpha.
784 * Do not increment best_format_number in that case. */
785 gst_video_info_set_format (best_info,
786 GST_VIDEO_FORMAT_ARGB,
787 GST_VIDEO_INFO_WIDTH (&pad->info),
788 GST_VIDEO_INFO_HEIGHT (&pad->info));
789 } else if (!need_alpha
790 && (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
792 *best_info = pad->info;
793 best_format_number = format_number;
794 } else if (format_number > best_format_number) {
795 *best_info = pad->info;
796 best_format_number = format_number;
799 GST_OBJECT_UNLOCK (vagg);
801 g_hash_table_unref (formats_table);
805 gst_video_aggregator_default_fixate_src_caps (GstAggregator * agg,
808 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
809 gint best_width = -1, best_height = -1;
810 gint best_fps_n = -1, best_fps_d = -1;
811 gdouble best_fps = -1.;
815 GST_OBJECT_LOCK (vagg);
816 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
817 GstVideoAggregatorPad *mpad = l->data;
822 fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
823 fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
824 width = GST_VIDEO_INFO_WIDTH (&mpad->info);
825 height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
827 if (width == 0 || height == 0)
830 if (best_width < width)
832 if (best_height < height)
833 best_height = height;
838 gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
840 if (best_fps < cur_fps) {
846 GST_OBJECT_UNLOCK (vagg);
848 if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) {
854 caps = gst_caps_make_writable (caps);
855 s = gst_caps_get_structure (caps, 0);
856 gst_structure_fixate_field_nearest_int (s, "width", best_width);
857 gst_structure_fixate_field_nearest_int (s, "height", best_height);
858 gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
860 if (gst_structure_has_field (s, "pixel-aspect-ratio"))
861 gst_structure_fixate_field_nearest_fraction (s, "pixel-aspect-ratio", 1, 1);
862 caps = gst_caps_fixate (caps);
868 gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg,
871 GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg);
872 GstCaps *ret, *best_format_caps;
873 gboolean at_least_one_alpha = FALSE;
874 GstVideoFormat best_format;
875 GstVideoInfo best_info;
878 best_format = GST_VIDEO_FORMAT_UNKNOWN;
879 gst_video_info_init (&best_info);
881 if (vagg_klass->find_best_format) {
882 vagg_klass->find_best_format (vagg, caps, &best_info, &at_least_one_alpha);
884 best_format = GST_VIDEO_INFO_FORMAT (&best_info);
887 if (best_format == GST_VIDEO_FORMAT_UNKNOWN) {
888 GstCaps *tmp = gst_caps_fixate (gst_caps_ref (caps));
889 gst_video_info_from_caps (&best_info, tmp);
890 best_format = GST_VIDEO_INFO_FORMAT (&best_info);
891 gst_caps_unref (tmp);
894 GST_DEBUG_OBJECT (vagg,
895 "The output format will now be : %d with chroma : %s and colorimetry %s",
896 best_format, gst_video_chroma_to_string (best_info.chroma_site),
897 gst_video_colorimetry_to_string (&best_info.colorimetry));
899 best_format_caps = gst_caps_copy (caps);
900 color_name = gst_video_colorimetry_to_string (&best_info.colorimetry);
901 gst_caps_set_simple (best_format_caps, "format", G_TYPE_STRING,
902 gst_video_format_to_string (best_format), "chroma-site", G_TYPE_STRING,
903 gst_video_chroma_to_string (best_info.chroma_site), "colorimetry",
904 G_TYPE_STRING, color_name, NULL);
906 ret = gst_caps_merge (best_format_caps, gst_caps_ref (caps));
912 gst_video_aggregator_default_update_src_caps (GstAggregator * agg,
913 GstCaps * caps, GstCaps ** ret)
915 GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (agg);
916 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
917 gboolean at_least_one_pad_configured = FALSE;
920 GST_OBJECT_LOCK (vagg);
921 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
922 GstVideoAggregatorPad *mpad = l->data;
924 if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
925 || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
928 at_least_one_pad_configured = TRUE;
930 GST_OBJECT_UNLOCK (vagg);
932 if (!at_least_one_pad_configured) {
933 /* We couldn't decide the output video info because the sinkpads don't have
934 * all the caps yet, so we mark the pad as needing a reconfigure. This
935 * allows aggregate() to skip ahead a bit and try again later. */
936 GST_DEBUG_OBJECT (vagg, "Couldn't decide output video info");
937 gst_pad_mark_reconfigure (agg->srcpad);
938 return GST_AGGREGATOR_FLOW_NEED_DATA;
941 g_assert (vagg_klass->update_caps);
943 *ret = vagg_klass->update_caps (vagg, caps);
949 gst_video_aggregator_default_negotiated_src_caps (GstAggregator * agg,
952 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
953 gboolean at_least_one_alpha = FALSE;
954 const GstVideoFormatInfo *finfo;
958 GST_INFO_OBJECT (agg->srcpad, "set src caps: %" GST_PTR_FORMAT, caps);
960 GST_OBJECT_LOCK (vagg);
961 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
962 GstVideoAggregatorPad *mpad = l->data;
964 if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
965 || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
968 if (mpad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
969 at_least_one_alpha = TRUE;
971 GST_OBJECT_UNLOCK (vagg);
973 if (!gst_video_info_from_caps (&info, caps))
976 if (GST_VIDEO_INFO_FPS_N (&vagg->info) != GST_VIDEO_INFO_FPS_N (&info) ||
977 GST_VIDEO_INFO_FPS_D (&vagg->info) != GST_VIDEO_INFO_FPS_D (&info)) {
978 if (GST_AGGREGATOR_PAD (agg->srcpad)->segment.position != -1) {
979 vagg->priv->nframes = 0;
980 /* The timestamp offset will be updated based on the
981 * segment position the next time we aggregate */
982 GST_DEBUG_OBJECT (vagg,
983 "Resetting frame counter because of framerate change");
985 gst_video_aggregator_reset_qos (vagg);
990 finfo = vagg->info.finfo;
992 if (at_least_one_alpha && !(finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
993 GST_ELEMENT_ERROR (vagg, CORE, NEGOTIATION,
994 ("At least one of the input pads contains alpha, but configured caps don't support alpha."),
995 ("Either convert your inputs to not contain alpha or add a videoconvert after the aggregator"));
999 /* Then browse the sinks once more, setting or unsetting conversion if needed */
1000 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1001 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (l->data);
1002 GstVideoAggregatorPadClass *vaggpad_klass =
1003 GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1005 if (vaggpad_klass->update_conversion_info) {
1006 vaggpad_klass->update_conversion_info (pad);
1010 if (vagg->priv->current_caps == NULL ||
1011 gst_caps_is_equal (caps, vagg->priv->current_caps) == FALSE) {
1012 GstClockTime latency;
1014 gst_caps_replace (&vagg->priv->current_caps, caps);
1016 gst_aggregator_set_src_caps (agg, caps);
1017 latency = gst_util_uint64_scale (GST_SECOND,
1018 GST_VIDEO_INFO_FPS_D (&vagg->info), GST_VIDEO_INFO_FPS_N (&vagg->info));
1019 gst_aggregator_set_latency (agg, latency, latency);
1026 gst_video_aggregator_get_sinkpads_interlace_mode (GstVideoAggregator * vagg,
1027 GstVideoAggregatorPad * skip_pad, GstVideoInterlaceMode * mode)
1031 for (walk = GST_ELEMENT (vagg)->sinkpads; walk; walk = g_list_next (walk)) {
1032 GstVideoAggregatorPad *vaggpad = walk->data;
1034 if (skip_pad && vaggpad == skip_pad)
1036 if (vaggpad->info.finfo
1037 && GST_VIDEO_INFO_FORMAT (&vaggpad->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1038 *mode = GST_VIDEO_INFO_INTERLACE_MODE (&vaggpad->info);
1046 gst_video_aggregator_pad_sink_setcaps (GstPad * pad, GstObject * parent,
1049 GstVideoAggregator *vagg;
1050 GstVideoAggregatorPad *vaggpad;
1052 gboolean ret = FALSE;
1054 GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
1056 vagg = GST_VIDEO_AGGREGATOR (parent);
1057 vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1059 if (!gst_video_info_from_caps (&info, caps)) {
1060 GST_DEBUG_OBJECT (pad, "Failed to parse caps");
1064 GST_VIDEO_AGGREGATOR_LOCK (vagg);
1066 GstVideoInterlaceMode pads_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1067 gboolean has_mode = FALSE;
1069 /* get the current output setting or fallback to other pads settings */
1070 if (GST_VIDEO_INFO_FORMAT (&vagg->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1071 pads_mode = GST_VIDEO_INFO_INTERLACE_MODE (&vagg->info);
1075 gst_video_aggregator_get_sinkpads_interlace_mode (vagg, vaggpad,
1080 if (pads_mode != GST_VIDEO_INFO_INTERLACE_MODE (&info)) {
1081 GST_ERROR_OBJECT (pad,
1082 "got input caps %" GST_PTR_FORMAT ", but current caps are %"
1083 GST_PTR_FORMAT, caps, vagg->priv->current_caps);
1084 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1090 if (!vaggpad->info.finfo ||
1091 GST_VIDEO_INFO_FORMAT (&vaggpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
1092 /* no video info was already set, so this is the first time
1093 * that this pad is getting configured; configure immediately to avoid
1094 * problems with the initial negotiation */
1095 vaggpad->info = info;
1096 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1098 /* this pad already had caps but received new ones; keep the new caps
1099 * pending until we pick the next buffer from the queue, otherwise we
1100 * might use an old buffer with the new caps and crash */
1101 vaggpad->priv->pending_vinfo = info;
1102 GST_DEBUG_OBJECT (pad, "delaying caps change");
1106 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1113 gst_video_aggregator_caps_has_alpha (GstCaps * caps)
1115 guint size = gst_caps_get_size (caps);
1118 for (i = 0; i < size; i++) {
1119 GstStructure *s = gst_caps_get_structure (caps, i);
1120 const GValue *formats = gst_structure_get_value (s, "format");
1123 const GstVideoFormatInfo *info;
1125 if (GST_VALUE_HOLDS_LIST (formats)) {
1126 guint list_size = gst_value_list_get_size (formats);
1129 for (index = 0; index < list_size; index++) {
1130 const GValue *list_item = gst_value_list_get_value (formats, index);
1132 gst_video_format_get_info (gst_video_format_from_string
1133 (g_value_get_string (list_item)));
1134 if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1138 } else if (G_VALUE_HOLDS_STRING (formats)) {
1140 gst_video_format_get_info (gst_video_format_from_string
1141 (g_value_get_string (formats)));
1142 if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1146 g_assert_not_reached ();
1147 GST_WARNING ("Unexpected type for video 'format' field: %s",
1148 G_VALUE_TYPE_NAME (formats));
1159 _get_non_alpha_caps (GstCaps * caps)
1164 size = gst_caps_get_size (caps);
1165 result = gst_caps_new_empty ();
1166 for (i = 0; i < size; i++) {
1167 GstStructure *s = gst_caps_get_structure (caps, i);
1168 const GValue *formats = gst_structure_get_value (s, "format");
1169 GValue new_formats = { 0, };
1170 gboolean has_format = FALSE;
1172 /* FIXME what to do if formats are missing? */
1174 const GstVideoFormatInfo *info;
1176 if (GST_VALUE_HOLDS_LIST (formats)) {
1177 guint list_size = gst_value_list_get_size (formats);
1180 g_value_init (&new_formats, GST_TYPE_LIST);
1182 for (index = 0; index < list_size; index++) {
1183 const GValue *list_item = gst_value_list_get_value (formats, index);
1186 gst_video_format_get_info (gst_video_format_from_string
1187 (g_value_get_string (list_item)));
1188 if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1190 gst_value_list_append_value (&new_formats, list_item);
1194 } else if (G_VALUE_HOLDS_STRING (formats)) {
1196 gst_video_format_get_info (gst_video_format_from_string
1197 (g_value_get_string (formats)));
1198 if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1200 gst_value_init_and_copy (&new_formats, formats);
1204 g_assert_not_reached ();
1205 GST_WARNING ("Unexpected type for video 'format' field: %s",
1206 G_VALUE_TYPE_NAME (formats));
1210 s = gst_structure_copy (s);
1211 gst_structure_take_value (s, "format", &new_formats);
1212 gst_caps_append_structure (result, s);
1222 gst_video_aggregator_pad_sink_getcaps (GstPad * pad, GstVideoAggregator * vagg,
1226 GstCaps *template_caps, *sink_template_caps;
1227 GstCaps *returned_caps;
1230 GstAggregator *agg = GST_AGGREGATOR (vagg);
1231 GstPad *srcpad = GST_PAD (agg->srcpad);
1233 GstVideoInterlaceMode interlace_mode;
1234 gboolean has_interlace_mode;
1236 template_caps = gst_pad_get_pad_template_caps (srcpad);
1238 GST_DEBUG_OBJECT (pad, "Get caps with filter: %" GST_PTR_FORMAT, filter);
1240 srccaps = gst_pad_peer_query_caps (srcpad, template_caps);
1241 srccaps = gst_caps_make_writable (srccaps);
1242 has_alpha = gst_video_aggregator_caps_has_alpha (srccaps);
1244 has_interlace_mode =
1245 gst_video_aggregator_get_sinkpads_interlace_mode (vagg, NULL,
1248 n = gst_caps_get_size (srccaps);
1249 for (i = 0; i < n; i++) {
1250 s = gst_caps_get_structure (srccaps, i);
1251 gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
1252 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
1253 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
1255 gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
1256 "pixel-aspect-ratio", NULL);
1257 if (has_interlace_mode)
1258 gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
1259 gst_video_interlace_mode_to_string (interlace_mode), NULL);
1263 returned_caps = gst_caps_intersect (srccaps, filter);
1264 gst_caps_unref (srccaps);
1266 returned_caps = srccaps;
1269 sink_template_caps = gst_pad_get_pad_template_caps (pad);
1271 GstCaps *tmp = _get_non_alpha_caps (sink_template_caps);
1272 gst_caps_unref (sink_template_caps);
1273 sink_template_caps = tmp;
1277 GstCaps *intersect = gst_caps_intersect (returned_caps, sink_template_caps);
1278 gst_caps_unref (returned_caps);
1279 returned_caps = intersect;
1282 gst_caps_unref (template_caps);
1283 gst_caps_unref (sink_template_caps);
1285 GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, returned_caps);
1287 return returned_caps;
1291 gst_video_aggregator_update_qos (GstVideoAggregator * vagg, gdouble proportion,
1292 GstClockTimeDiff diff, GstClockTime timestamp)
1296 GST_DEBUG_OBJECT (vagg,
1297 "Updating QoS: proportion %lf, diff %" GST_STIME_FORMAT ", timestamp %"
1298 GST_TIME_FORMAT, proportion, GST_STIME_ARGS (diff),
1299 GST_TIME_ARGS (timestamp));
1302 GST_CLOCK_TIME_IS_VALID (gst_aggregator_get_latency (GST_AGGREGATOR
1305 GST_OBJECT_LOCK (vagg);
1307 vagg->priv->proportion = proportion;
1308 if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
1309 if (!live && G_UNLIKELY (diff > 0))
1310 vagg->priv->earliest_time =
1311 timestamp + 2 * diff + gst_util_uint64_scale_int_round (GST_SECOND,
1312 GST_VIDEO_INFO_FPS_D (&vagg->info),
1313 GST_VIDEO_INFO_FPS_N (&vagg->info));
1315 vagg->priv->earliest_time = timestamp + diff;
1317 vagg->priv->earliest_time = GST_CLOCK_TIME_NONE;
1319 GST_OBJECT_UNLOCK (vagg);
1323 gst_video_aggregator_reset_qos (GstVideoAggregator * vagg)
1325 gst_video_aggregator_update_qos (vagg, 0.5, 0, GST_CLOCK_TIME_NONE);
1326 vagg->priv->qos_processed = vagg->priv->qos_dropped = 0;
1330 gst_video_aggregator_read_qos (GstVideoAggregator * vagg, gdouble * proportion,
1331 GstClockTime * time)
1333 GST_OBJECT_LOCK (vagg);
1334 *proportion = vagg->priv->proportion;
1335 *time = vagg->priv->earliest_time;
1336 GST_OBJECT_UNLOCK (vagg);
1340 gst_video_aggregator_reset (GstVideoAggregator * vagg)
1342 GstAggregator *agg = GST_AGGREGATOR (vagg);
1345 gst_video_info_init (&vagg->info);
1346 vagg->priv->ts_offset = 0;
1347 vagg->priv->nframes = 0;
1348 vagg->priv->live = FALSE;
1350 GST_AGGREGATOR_PAD (agg->srcpad)->segment.position = -1;
1352 gst_video_aggregator_reset_qos (vagg);
1354 GST_OBJECT_LOCK (vagg);
1355 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1356 GstVideoAggregatorPad *p = l->data;
1358 gst_buffer_replace (&p->priv->buffer, NULL);
1359 p->priv->start_time = -1;
1360 p->priv->end_time = -1;
1362 gst_video_info_init (&p->info);
1364 GST_OBJECT_UNLOCK (vagg);
1367 static GstFlowReturn
1368 gst_video_aggregator_fill_queues (GstVideoAggregator * vagg,
1369 GstClockTime output_start_running_time,
1370 GstClockTime output_end_running_time)
1372 GstAggregator *agg = GST_AGGREGATOR (vagg);
1374 gboolean eos = TRUE;
1375 gboolean need_more_data = FALSE;
1376 gboolean need_reconfigure = FALSE;
1377 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1379 /* get a set of buffers into pad->priv->buffer that are within output_start_running_time
1380 * and output_end_running_time taking into account finished and unresponsive pads */
1382 GST_OBJECT_LOCK (vagg);
1383 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1384 GstVideoAggregatorPad *pad = l->data;
1386 GstAggregatorPad *bpad;
1390 bpad = GST_AGGREGATOR_PAD (pad);
1391 GST_OBJECT_LOCK (bpad);
1392 segment = bpad->segment;
1393 GST_OBJECT_UNLOCK (bpad);
1394 is_eos = gst_aggregator_pad_is_eos (bpad);
1398 buf = gst_aggregator_pad_peek_buffer (bpad);
1400 GstClockTime start_time, end_time;
1402 start_time = GST_BUFFER_TIMESTAMP (buf);
1403 if (start_time == -1) {
1404 gst_buffer_unref (buf);
1405 GST_DEBUG_OBJECT (pad, "Need timestamped buffers!");
1406 GST_OBJECT_UNLOCK (vagg);
1407 return GST_FLOW_ERROR;
1410 /* FIXME: Make all this work with negative rates */
1411 end_time = GST_BUFFER_DURATION (buf);
1413 if (end_time == -1) {
1414 start_time = MAX (start_time, segment.start);
1416 gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1418 if (start_time >= output_end_running_time) {
1419 if (pad->priv->buffer) {
1420 GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1421 "output_end_running_time. Keeping previous buffer");
1423 GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1424 "output_end_running_time. No previous buffer.");
1426 gst_buffer_unref (buf);
1428 } else if (start_time < output_start_running_time) {
1429 GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time < "
1430 "output_start_running_time. Discarding old buffer");
1431 gst_buffer_replace (&pad->priv->buffer, buf);
1432 if (pad->priv->pending_vinfo.finfo) {
1433 pad->info = pad->priv->pending_vinfo;
1434 need_reconfigure = TRUE;
1435 pad->priv->pending_vinfo.finfo = NULL;
1437 gst_buffer_unref (buf);
1438 gst_aggregator_pad_drop_buffer (bpad);
1439 need_more_data = TRUE;
1442 gst_buffer_unref (buf);
1443 buf = gst_aggregator_pad_pop_buffer (bpad);
1444 gst_buffer_replace (&pad->priv->buffer, buf);
1445 if (pad->priv->pending_vinfo.finfo) {
1446 pad->info = pad->priv->pending_vinfo;
1447 need_reconfigure = TRUE;
1448 pad->priv->pending_vinfo.finfo = NULL;
1450 /* FIXME: Set start_time and end_time to something here? */
1451 gst_buffer_unref (buf);
1452 GST_DEBUG_OBJECT (pad, "buffer duration is -1");
1456 g_assert (start_time != -1 && end_time != -1);
1457 end_time += start_time; /* convert from duration to position */
1459 /* Check if it's inside the segment */
1460 if (start_time >= segment.stop || end_time < segment.start) {
1461 GST_DEBUG_OBJECT (pad,
1462 "Buffer outside the segment : segment: [%" GST_TIME_FORMAT " -- %"
1463 GST_TIME_FORMAT "]" " Buffer [%" GST_TIME_FORMAT " -- %"
1464 GST_TIME_FORMAT "]", GST_TIME_ARGS (segment.stop),
1465 GST_TIME_ARGS (segment.start), GST_TIME_ARGS (start_time),
1466 GST_TIME_ARGS (end_time));
1468 gst_buffer_unref (buf);
1469 gst_aggregator_pad_drop_buffer (bpad);
1471 need_more_data = TRUE;
1475 /* Clip to segment and convert to running time */
1476 start_time = MAX (start_time, segment.start);
1477 if (segment.stop != -1)
1478 end_time = MIN (end_time, segment.stop);
1480 gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1482 gst_segment_to_running_time (&segment, GST_FORMAT_TIME, end_time);
1483 g_assert (start_time != -1 && end_time != -1);
1485 /* Convert to the output segment rate */
1486 if (ABS (agg_segment->rate) != 1.0) {
1487 start_time *= ABS (agg_segment->rate);
1488 end_time *= ABS (agg_segment->rate);
1491 GST_TRACE_OBJECT (pad, "dealing with buffer %p start %" GST_TIME_FORMAT
1492 " end %" GST_TIME_FORMAT " out start %" GST_TIME_FORMAT
1493 " out end %" GST_TIME_FORMAT, buf, GST_TIME_ARGS (start_time),
1494 GST_TIME_ARGS (end_time), GST_TIME_ARGS (output_start_running_time),
1495 GST_TIME_ARGS (output_end_running_time));
1497 if (pad->priv->end_time != -1 && pad->priv->end_time > end_time) {
1498 GST_DEBUG_OBJECT (pad, "Buffer from the past, dropping");
1499 gst_buffer_unref (buf);
1500 gst_aggregator_pad_drop_buffer (bpad);
1504 if (end_time >= output_start_running_time
1505 && start_time < output_end_running_time) {
1506 GST_DEBUG_OBJECT (pad,
1507 "Taking new buffer with start time %" GST_TIME_FORMAT,
1508 GST_TIME_ARGS (start_time));
1509 gst_buffer_replace (&pad->priv->buffer, buf);
1510 if (pad->priv->pending_vinfo.finfo) {
1511 pad->info = pad->priv->pending_vinfo;
1512 need_reconfigure = TRUE;
1513 pad->priv->pending_vinfo.finfo = NULL;
1515 pad->priv->start_time = start_time;
1516 pad->priv->end_time = end_time;
1518 gst_buffer_unref (buf);
1519 gst_aggregator_pad_drop_buffer (bpad);
1521 } else if (start_time >= output_end_running_time) {
1522 GST_DEBUG_OBJECT (pad, "Keeping buffer until %" GST_TIME_FORMAT,
1523 GST_TIME_ARGS (start_time));
1524 gst_buffer_unref (buf);
1527 gst_buffer_replace (&pad->priv->buffer, buf);
1528 if (pad->priv->pending_vinfo.finfo) {
1529 pad->info = pad->priv->pending_vinfo;
1530 need_reconfigure = TRUE;
1531 pad->priv->pending_vinfo.finfo = NULL;
1533 pad->priv->start_time = start_time;
1534 pad->priv->end_time = end_time;
1535 GST_DEBUG_OBJECT (pad,
1536 "replacing old buffer with a newer buffer, start %" GST_TIME_FORMAT
1537 " out end %" GST_TIME_FORMAT, GST_TIME_ARGS (start_time),
1538 GST_TIME_ARGS (output_end_running_time));
1539 gst_buffer_unref (buf);
1540 gst_aggregator_pad_drop_buffer (bpad);
1542 need_more_data = TRUE;
1546 if (is_eos && pad->priv->repeat_after_eos) {
1548 GST_DEBUG_OBJECT (pad, "ignoring EOS and re-using previous buffer");
1552 if (pad->priv->end_time != -1) {
1553 if (pad->priv->end_time <= output_start_running_time) {
1554 pad->priv->start_time = pad->priv->end_time = -1;
1556 GST_DEBUG ("I just need more data");
1557 need_more_data = TRUE;
1559 gst_buffer_replace (&pad->priv->buffer, NULL);
1561 } else if (is_eos) {
1564 } else if (is_eos) {
1565 gst_buffer_replace (&pad->priv->buffer, NULL);
1569 GST_OBJECT_UNLOCK (vagg);
1571 if (need_reconfigure)
1572 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1575 return GST_AGGREGATOR_FLOW_NEED_DATA;
1577 return GST_FLOW_EOS;
1583 sync_pad_values (GstElement * vagg, GstPad * pad, gpointer user_data)
1585 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1586 GstAggregatorPad *bpad = GST_AGGREGATOR_PAD_CAST (pad);
1587 GstClockTime timestamp;
1590 if (vpad->priv->buffer == NULL)
1593 timestamp = GST_BUFFER_TIMESTAMP (vpad->priv->buffer);
1594 GST_OBJECT_LOCK (bpad);
1595 stream_time = gst_segment_to_stream_time (&bpad->segment, GST_FORMAT_TIME,
1597 GST_OBJECT_UNLOCK (bpad);
1599 /* sync object properties on stream time */
1600 if (GST_CLOCK_TIME_IS_VALID (stream_time))
1601 gst_object_sync_values (GST_OBJECT_CAST (pad), stream_time);
1607 prepare_frames (GstElement * agg, GstPad * pad, gpointer user_data)
1609 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
1610 GstVideoAggregatorPadClass *vaggpad_class =
1611 GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1613 memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
1615 if (vpad->priv->buffer == NULL || !vaggpad_class->prepare_frame)
1618 return vaggpad_class->prepare_frame (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
1619 vpad->priv->buffer, &vpad->priv->prepared_frame);
1623 clean_pad (GstElement * agg, GstPad * pad, gpointer user_data)
1625 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR_CAST (agg);
1626 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
1627 GstVideoAggregatorPadClass *vaggpad_class =
1628 GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1630 if (vaggpad_class->clean_frame)
1631 vaggpad_class->clean_frame (vpad, vagg, &vpad->priv->prepared_frame);
1633 memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
1638 static GstFlowReturn
1639 gst_video_aggregator_do_aggregate (GstVideoAggregator * vagg,
1640 GstClockTime output_start_time, GstClockTime output_end_time,
1641 GstBuffer ** outbuf)
1643 GstFlowReturn ret = GST_FLOW_OK;
1644 GstElementClass *klass = GST_ELEMENT_GET_CLASS (vagg);
1645 GstVideoAggregatorClass *vagg_klass = (GstVideoAggregatorClass *) klass;
1647 g_assert (vagg_klass->aggregate_frames != NULL);
1648 g_assert (vagg_klass->create_output_buffer != NULL);
1650 if ((ret = vagg_klass->create_output_buffer (vagg, outbuf)) != GST_FLOW_OK) {
1651 GST_WARNING_OBJECT (vagg, "Could not get an output buffer, reason: %s",
1652 gst_flow_get_name (ret));
1655 if (*outbuf == NULL) {
1656 /* sub-class doesn't want to generate output right now */
1660 GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
1661 GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
1663 /* Sync pad properties to the stream time */
1664 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), sync_pad_values, NULL);
1666 /* Convert all the frames the subclass has before aggregating */
1667 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), prepare_frames, NULL);
1669 ret = vagg_klass->aggregate_frames (vagg, *outbuf);
1671 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), clean_pad, NULL);
1676 /* Perform qos calculations before processing the next frame. Returns TRUE if
1677 * the frame should be processed, FALSE if the frame can be dropped entirely */
1679 gst_video_aggregator_do_qos (GstVideoAggregator * vagg, GstClockTime timestamp)
1681 GstAggregator *agg = GST_AGGREGATOR (vagg);
1682 GstClockTime qostime, earliest_time;
1686 /* no timestamp, can't do QoS => process frame */
1687 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
1688 GST_LOG_OBJECT (vagg, "invalid timestamp, can't do QoS, process frame");
1692 /* get latest QoS observation values */
1693 gst_video_aggregator_read_qos (vagg, &proportion, &earliest_time);
1695 /* skip qos if we have no observation (yet) => process frame */
1696 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
1697 GST_LOG_OBJECT (vagg, "no observation yet, process frame");
1701 /* qos is done on running time */
1703 gst_segment_to_running_time (&GST_AGGREGATOR_PAD (agg->srcpad)->segment,
1704 GST_FORMAT_TIME, timestamp);
1706 /* see how our next timestamp relates to the latest qos timestamp */
1707 GST_LOG_OBJECT (vagg, "qostime %" GST_TIME_FORMAT ", earliest %"
1708 GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
1710 jitter = GST_CLOCK_DIFF (qostime, earliest_time);
1711 if (qostime != GST_CLOCK_TIME_NONE && jitter > 0) {
1712 GST_DEBUG_OBJECT (vagg, "we are late, drop frame");
1716 GST_LOG_OBJECT (vagg, "process frame");
1721 gst_video_aggregator_advance_on_timeout (GstVideoAggregator * vagg)
1723 GstAggregator *agg = GST_AGGREGATOR (vagg);
1724 guint64 frame_duration;
1726 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1728 GST_OBJECT_LOCK (agg);
1729 if (agg_segment->position == -1) {
1730 if (agg_segment->rate > 0.0)
1731 agg_segment->position = agg_segment->start;
1733 agg_segment->position = agg_segment->stop;
1736 /* Advance position */
1737 fps_d = GST_VIDEO_INFO_FPS_D (&vagg->info) ?
1738 GST_VIDEO_INFO_FPS_D (&vagg->info) : 1;
1739 fps_n = GST_VIDEO_INFO_FPS_N (&vagg->info) ?
1740 GST_VIDEO_INFO_FPS_N (&vagg->info) : 25;
1741 /* Default to 25/1 if no "best fps" is known */
1742 frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
1743 if (agg_segment->rate > 0.0)
1744 agg_segment->position += frame_duration;
1745 else if (agg_segment->position > frame_duration)
1746 agg_segment->position -= frame_duration;
1748 agg_segment->position = 0;
1749 vagg->priv->nframes++;
1750 GST_OBJECT_UNLOCK (agg);
1753 static GstFlowReturn
1754 gst_video_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
1756 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1757 GstClockTime output_start_time, output_end_time;
1758 GstClockTime output_start_running_time, output_end_running_time;
1759 GstBuffer *outbuf = NULL;
1760 GstFlowReturn flow_ret;
1762 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1764 GST_VIDEO_AGGREGATOR_LOCK (vagg);
1766 if (GST_VIDEO_INFO_FORMAT (&vagg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
1768 gst_video_aggregator_advance_on_timeout (vagg);
1769 flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
1770 goto unlock_and_return;
1773 output_start_time = agg_segment->position;
1774 if (agg_segment->position == -1 || agg_segment->position < agg_segment->start)
1775 output_start_time = agg_segment->start;
1777 if (vagg->priv->nframes == 0) {
1778 vagg->priv->ts_offset = output_start_time;
1779 GST_DEBUG_OBJECT (vagg, "New ts offset %" GST_TIME_FORMAT,
1780 GST_TIME_ARGS (output_start_time));
1783 if (GST_VIDEO_INFO_FPS_N (&vagg->info) == 0) {
1784 output_end_time = -1;
1787 vagg->priv->ts_offset +
1788 gst_util_uint64_scale_round (vagg->priv->nframes + 1,
1789 GST_SECOND * GST_VIDEO_INFO_FPS_D (&vagg->info),
1790 GST_VIDEO_INFO_FPS_N (&vagg->info));
1793 if (agg_segment->stop != -1)
1794 output_end_time = MIN (output_end_time, agg_segment->stop);
1796 output_start_running_time =
1797 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
1799 output_end_running_time =
1800 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
1803 if (output_end_time == output_start_time) {
1804 flow_ret = GST_FLOW_EOS;
1807 gst_video_aggregator_fill_queues (vagg, output_start_running_time,
1808 output_end_running_time);
1811 if (flow_ret == GST_AGGREGATOR_FLOW_NEED_DATA && !timeout) {
1812 GST_DEBUG_OBJECT (vagg, "Need more data for decisions");
1813 goto unlock_and_return;
1814 } else if (flow_ret == GST_FLOW_EOS) {
1815 GST_DEBUG_OBJECT (vagg, "All sinkpads are EOS -- forwarding");
1816 goto unlock_and_return;
1817 } else if (flow_ret == GST_FLOW_ERROR) {
1818 GST_WARNING_OBJECT (vagg, "Error collecting buffers");
1819 goto unlock_and_return;
1822 /* It is possible that gst_video_aggregator_fill_queues() marked the pad
1823 * for reconfiguration. In this case we have to reconfigure before continuing
1824 * because we have picked a new buffer with different caps than before from
1825 * one one of the sink pads and continuing here may lead to a crash.
1826 * https://bugzilla.gnome.org/show_bug.cgi?id=780682
1828 if (gst_pad_needs_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg))) {
1829 GST_DEBUG_OBJECT (vagg, "Need reconfigure");
1830 flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
1831 goto unlock_and_return;
1834 GST_DEBUG_OBJECT (vagg,
1835 "Producing buffer for %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
1836 ", running time start %" GST_TIME_FORMAT ", running time end %"
1837 GST_TIME_FORMAT, GST_TIME_ARGS (output_start_time),
1838 GST_TIME_ARGS (output_end_time),
1839 GST_TIME_ARGS (output_start_running_time),
1840 GST_TIME_ARGS (output_end_running_time));
1842 jitter = gst_video_aggregator_do_qos (vagg, output_start_time);
1844 flow_ret = gst_video_aggregator_do_aggregate (vagg, output_start_time,
1845 output_end_time, &outbuf);
1846 if (flow_ret != GST_FLOW_OK)
1848 vagg->priv->qos_processed++;
1852 vagg->priv->qos_dropped++;
1855 gst_message_new_qos (GST_OBJECT_CAST (vagg), vagg->priv->live,
1856 output_start_running_time, gst_segment_to_stream_time (agg_segment,
1857 GST_FORMAT_TIME, output_start_time), output_start_time,
1858 output_end_time - output_start_time);
1859 gst_message_set_qos_values (msg, jitter, vagg->priv->proportion, 1000000);
1860 gst_message_set_qos_stats (msg, GST_FORMAT_BUFFERS,
1861 vagg->priv->qos_processed, vagg->priv->qos_dropped);
1862 gst_element_post_message (GST_ELEMENT_CAST (vagg), msg);
1864 flow_ret = GST_FLOW_OK;
1867 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1869 GST_DEBUG_OBJECT (vagg,
1870 "Pushing buffer with ts %" GST_TIME_FORMAT " and duration %"
1871 GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
1872 GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
1874 flow_ret = gst_aggregator_finish_buffer (agg, outbuf);
1877 GST_VIDEO_AGGREGATOR_LOCK (vagg);
1878 vagg->priv->nframes++;
1879 agg_segment->position = output_end_time;
1880 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1886 gst_buffer_unref (outbuf);
1888 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1892 /* FIXME, the duration query should reflect how long you will produce
1893 * data, that is the amount of stream time until you will emit EOS.
1895 * For synchronized aggregating this is always the max of all the durations
1896 * of upstream since we emit EOS when all of them finished.
1898 * We don't do synchronized aggregating so this really depends on where the
1899 * streams where punched in and what their relative offsets are against
1900 * each other which we can get from the first timestamps we see.
1902 * When we add a new stream (or remove a stream) the duration might
1903 * also become invalid again and we need to post a new DURATION
1904 * message to notify this fact to the parent.
1905 * For now we take the max of all the upstream elements so the simple
1906 * cases work at least somewhat.
1909 gst_video_aggregator_query_duration (GstVideoAggregator * vagg,
1912 GValue item = { 0 };
1920 gst_query_parse_duration (query, &format, NULL);
1926 /* Take maximum of all durations */
1927 it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (vagg));
1929 switch (gst_iterator_next (it, &item)) {
1930 case GST_ITERATOR_DONE:
1933 case GST_ITERATOR_OK:
1938 pad = g_value_get_object (&item);
1940 /* ask sink peer for duration */
1941 res &= gst_pad_peer_query_duration (pad, format, &duration);
1942 /* take max from all valid return values */
1944 /* valid unknown length, stop searching */
1945 if (duration == -1) {
1949 /* else see if bigger than current max */
1950 else if (duration > max)
1953 g_value_reset (&item);
1956 case GST_ITERATOR_RESYNC:
1959 gst_iterator_resync (it);
1967 g_value_unset (&item);
1968 gst_iterator_free (it);
1971 /* and store the max */
1972 GST_DEBUG_OBJECT (vagg, "Total duration in format %s: %"
1973 GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
1974 gst_query_set_duration (query, format, max);
1981 gst_video_aggregator_src_query (GstAggregator * agg, GstQuery * query)
1983 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1984 gboolean res = FALSE;
1985 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1987 switch (GST_QUERY_TYPE (query)) {
1988 case GST_QUERY_POSITION:
1992 gst_query_parse_position (query, &format, NULL);
1995 case GST_FORMAT_TIME:
1996 gst_query_set_position (query, format,
1997 gst_segment_to_stream_time (agg_segment, GST_FORMAT_TIME,
1998 agg_segment->position));
2006 case GST_QUERY_DURATION:
2007 res = gst_video_aggregator_query_duration (vagg, query);
2009 case GST_QUERY_LATENCY:
2011 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2015 gst_query_parse_latency (query, &vagg->priv->live, NULL, NULL);
2020 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2028 gst_video_aggregator_src_event (GstAggregator * agg, GstEvent * event)
2030 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2032 switch (GST_EVENT_TYPE (event)) {
2036 GstClockTimeDiff diff;
2037 GstClockTime timestamp;
2040 gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
2041 gst_video_aggregator_update_qos (vagg, proportion, diff, timestamp);
2044 case GST_EVENT_SEEK:
2046 GST_DEBUG_OBJECT (vagg, "Handling SEEK event");
2053 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_event (agg,
2057 static GstFlowReturn
2058 gst_video_aggregator_flush (GstAggregator * agg)
2062 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2063 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2065 GST_INFO_OBJECT (agg, "Flushing");
2066 GST_OBJECT_LOCK (vagg);
2067 abs_rate = ABS (agg_segment->rate);
2068 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
2069 GstVideoAggregatorPad *p = l->data;
2071 /* Convert to the output segment rate */
2072 if (ABS (agg_segment->rate) != abs_rate) {
2073 if (ABS (agg_segment->rate) != 1.0 && p->priv->buffer) {
2074 p->priv->start_time /= ABS (agg_segment->rate);
2075 p->priv->end_time /= ABS (agg_segment->rate);
2077 if (abs_rate != 1.0 && p->priv->buffer) {
2078 p->priv->start_time *= abs_rate;
2079 p->priv->end_time *= abs_rate;
2083 GST_OBJECT_UNLOCK (vagg);
2085 agg_segment->position = -1;
2086 vagg->priv->ts_offset = 0;
2087 vagg->priv->nframes = 0;
2089 gst_video_aggregator_reset_qos (vagg);
2094 gst_video_aggregator_sink_event (GstAggregator * agg, GstAggregatorPad * bpad,
2097 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2098 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2099 gboolean ret = TRUE;
2101 GST_DEBUG_OBJECT (pad, "Got %s event on pad %s:%s",
2102 GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad));
2104 switch (GST_EVENT_TYPE (event)) {
2105 case GST_EVENT_CAPS:
2109 gst_event_parse_caps (event, &caps);
2111 gst_video_aggregator_pad_sink_setcaps (GST_PAD (pad),
2112 GST_OBJECT (vagg), caps);
2113 gst_event_unref (event);
2117 case GST_EVENT_SEGMENT:{
2119 gst_event_copy_segment (event, &seg);
2121 g_assert (seg.format == GST_FORMAT_TIME);
2122 gst_video_aggregator_reset_qos (vagg);
2130 return GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_event
2137 gst_video_aggregator_start (GstAggregator * agg)
2139 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2141 gst_caps_replace (&vagg->priv->current_caps, NULL);
2147 gst_video_aggregator_stop (GstAggregator * agg)
2149 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2151 gst_video_aggregator_reset (vagg);
2156 /* GstElement vmethods */
2158 gst_video_aggregator_request_new_pad (GstElement * element,
2159 GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
2161 GstVideoAggregator *vagg;
2162 GstVideoAggregatorPad *vaggpad;
2164 vagg = GST_VIDEO_AGGREGATOR (element);
2166 vaggpad = (GstVideoAggregatorPad *)
2167 GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->request_new_pad
2168 (element, templ, req_name, caps);
2170 if (vaggpad == NULL)
2173 GST_OBJECT_LOCK (vagg);
2174 vaggpad->priv->zorder = GST_ELEMENT (vagg)->numsinkpads;
2175 vaggpad->priv->start_time = -1;
2176 vaggpad->priv->end_time = -1;
2177 element->sinkpads = g_list_sort (element->sinkpads,
2178 (GCompareFunc) pad_zorder_compare);
2179 GST_OBJECT_UNLOCK (vagg);
2181 return GST_PAD (vaggpad);
2185 gst_video_aggregator_release_pad (GstElement * element, GstPad * pad)
2187 GstVideoAggregator *vagg = NULL;
2188 GstVideoAggregatorPad *vaggpad;
2191 vagg = GST_VIDEO_AGGREGATOR (element);
2192 vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
2194 GST_VIDEO_AGGREGATOR_LOCK (vagg);
2196 GST_OBJECT_LOCK (vagg);
2197 last_pad = (GST_ELEMENT (vagg)->numsinkpads - 1 == 0);
2198 GST_OBJECT_UNLOCK (vagg);
2201 gst_video_aggregator_reset (vagg);
2203 gst_buffer_replace (&vaggpad->priv->buffer, NULL);
2205 GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->release_pad
2206 (GST_ELEMENT (vagg), pad);
2208 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
2210 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2215 gst_video_aggregator_propose_allocation (GstAggregator * agg,
2216 GstAggregatorPad * pad, GstQuery * decide_query, GstQuery * query)
2218 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
2224 gst_video_aggregator_decide_allocation (GstAggregator * agg, GstQuery * query)
2226 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2227 GstAllocationParams params = { 0, 15, 0, 0 };
2229 GstBufferPool *pool;
2230 GstAllocator *allocator;
2231 guint size, min, max;
2232 gboolean update = FALSE;
2233 GstStructure *config = NULL;
2234 GstCaps *caps = NULL;
2236 if (gst_query_get_n_allocation_params (query) == 0) {
2237 gst_query_add_allocation_param (query, NULL, ¶ms);
2239 for (i = 0; i < gst_query_get_n_allocation_params (query); i++) {
2240 GstAllocator *allocator;
2242 gst_query_parse_nth_allocation_param (query, i, &allocator, ¶ms);
2243 params.align = MAX (params.align, 15);
2244 gst_query_set_nth_allocation_param (query, i, allocator, ¶ms);
2248 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
2250 if (gst_query_get_n_allocation_pools (query) > 0) {
2251 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
2254 size = MAX (size, vagg->info.size);
2258 size = vagg->info.size;
2263 gst_query_parse_allocation (query, &caps, NULL);
2265 /* no downstream pool, make our own */
2267 pool = gst_video_buffer_pool_new ();
2269 config = gst_buffer_pool_get_config (pool);
2271 gst_buffer_pool_config_set_params (config, caps, size, min, max);
2272 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
2273 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2274 gst_buffer_pool_config_add_option (config,
2275 GST_BUFFER_POOL_OPTION_VIDEO_META);
2278 /* buffer pool may have to do some changes */
2279 if (!gst_buffer_pool_set_config (pool, config)) {
2280 config = gst_buffer_pool_get_config (pool);
2282 /* If change are not acceptable, fallback to generic pool */
2283 if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) {
2284 GST_DEBUG_OBJECT (agg, "unsupported pool, making new pool");
2286 gst_object_unref (pool);
2287 pool = gst_video_buffer_pool_new ();
2288 gst_buffer_pool_config_set_params (config, caps, size, min, max);
2289 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
2291 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2292 gst_buffer_pool_config_add_option (config,
2293 GST_BUFFER_POOL_OPTION_VIDEO_META);
2297 if (!gst_buffer_pool_set_config (pool, config))
2302 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
2304 gst_query_add_allocation_pool (query, pool, size, min, max);
2307 gst_object_unref (pool);
2309 gst_object_unref (allocator);
2315 gst_object_unref (pool);
2317 gst_object_unref (allocator);
2319 GST_ELEMENT_ERROR (agg, RESOURCE, SETTINGS,
2320 ("Failed to configure the buffer pool"),
2321 ("Configuration is most likely invalid, please report this issue."));
2325 static GstFlowReturn
2326 gst_video_aggregator_create_output_buffer (GstVideoAggregator * videoaggregator,
2327 GstBuffer ** outbuf)
2329 GstAggregator *aggregator = GST_AGGREGATOR (videoaggregator);
2330 GstBufferPool *pool;
2331 GstFlowReturn ret = GST_FLOW_OK;
2333 pool = gst_aggregator_get_buffer_pool (aggregator);
2336 if (!gst_buffer_pool_is_active (pool)) {
2337 if (!gst_buffer_pool_set_active (pool, TRUE)) {
2338 GST_ELEMENT_ERROR (videoaggregator, RESOURCE, SETTINGS,
2339 ("failed to activate bufferpool"),
2340 ("failed to activate bufferpool"));
2341 return GST_FLOW_ERROR;
2345 ret = gst_buffer_pool_acquire_buffer (pool, outbuf, NULL);
2346 gst_object_unref (pool);
2349 GstAllocator *allocator;
2350 GstAllocationParams params;
2352 gst_aggregator_get_allocator (aggregator, &allocator, ¶ms);
2354 outsize = GST_VIDEO_INFO_SIZE (&videoaggregator->info);
2355 *outbuf = gst_buffer_new_allocate (allocator, outsize, ¶ms);
2358 gst_object_unref (allocator);
2360 if (*outbuf == NULL) {
2361 GST_ELEMENT_ERROR (videoaggregator, RESOURCE, NO_SPACE_LEFT,
2362 (NULL), ("Could not acquire buffer of size: %d", outsize));
2363 ret = GST_FLOW_ERROR;
2370 gst_video_aggregator_pad_sink_acceptcaps (GstPad * pad,
2371 GstVideoAggregator * vagg, GstCaps * caps)
2374 GstCaps *modified_caps;
2375 GstCaps *accepted_caps;
2376 GstCaps *template_caps;
2377 gboolean had_current_caps = TRUE;
2380 GstAggregator *agg = GST_AGGREGATOR (vagg);
2382 GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
2384 accepted_caps = gst_pad_get_current_caps (GST_PAD (agg->srcpad));
2386 template_caps = gst_pad_get_pad_template_caps (GST_PAD (agg->srcpad));
2388 if (accepted_caps == NULL) {
2389 accepted_caps = template_caps;
2390 had_current_caps = FALSE;
2393 accepted_caps = gst_caps_make_writable (accepted_caps);
2395 GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
2397 n = gst_caps_get_size (accepted_caps);
2398 for (i = 0; i < n; i++) {
2399 s = gst_caps_get_structure (accepted_caps, i);
2400 gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
2401 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
2402 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
2404 gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
2405 "pixel-aspect-ratio", NULL);
2408 modified_caps = gst_caps_intersect (accepted_caps, template_caps);
2410 ret = gst_caps_can_intersect (caps, accepted_caps);
2411 GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT,
2412 (ret ? "" : "not "), caps);
2413 gst_caps_unref (accepted_caps);
2414 gst_caps_unref (modified_caps);
2415 if (had_current_caps)
2416 gst_caps_unref (template_caps);
2421 gst_video_aggregator_sink_query (GstAggregator * agg, GstAggregatorPad * bpad,
2424 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2425 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2426 gboolean ret = FALSE;
2428 switch (GST_QUERY_TYPE (query)) {
2429 case GST_QUERY_CAPS:
2431 GstCaps *filter, *caps;
2433 gst_query_parse_caps (query, &filter);
2435 gst_video_aggregator_pad_sink_getcaps (GST_PAD (pad), vagg, filter);
2436 gst_query_set_caps_result (query, caps);
2437 gst_caps_unref (caps);
2441 case GST_QUERY_ACCEPT_CAPS:
2445 gst_query_parse_accept_caps (query, &caps);
2447 gst_video_aggregator_pad_sink_acceptcaps (GST_PAD (pad), vagg, caps);
2448 gst_query_set_accept_caps_result (query, ret);
2454 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_query
2461 /* GObject vmethods */
2463 gst_video_aggregator_finalize (GObject * o)
2465 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2467 g_mutex_clear (&vagg->priv->lock);
2469 G_OBJECT_CLASS (gst_video_aggregator_parent_class)->finalize (o);
2473 gst_video_aggregator_dispose (GObject * o)
2475 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2477 gst_caps_replace (&vagg->priv->current_caps, NULL);
2479 G_OBJECT_CLASS (gst_video_aggregator_parent_class)->dispose (o);
2483 gst_video_aggregator_get_property (GObject * object,
2484 guint prop_id, GValue * value, GParamSpec * pspec)
2488 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2494 gst_video_aggregator_set_property (GObject * object,
2495 guint prop_id, const GValue * value, GParamSpec * pspec)
2499 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2504 /* GObject boilerplate */
2506 gst_video_aggregator_class_init (GstVideoAggregatorClass * klass)
2508 GObjectClass *gobject_class = (GObjectClass *) klass;
2509 GstElementClass *gstelement_class = (GstElementClass *) klass;
2510 GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
2512 GST_DEBUG_CATEGORY_INIT (gst_video_aggregator_debug, "videoaggregator", 0,
2513 "base video aggregator");
2515 gst_video_aggregator_parent_class = g_type_class_peek_parent (klass);
2517 g_type_class_add_private (klass, sizeof (GstVideoAggregatorPrivate));
2519 gobject_class->finalize = gst_video_aggregator_finalize;
2520 gobject_class->dispose = gst_video_aggregator_dispose;
2522 gobject_class->get_property = gst_video_aggregator_get_property;
2523 gobject_class->set_property = gst_video_aggregator_set_property;
2525 gstelement_class->request_new_pad =
2526 GST_DEBUG_FUNCPTR (gst_video_aggregator_request_new_pad);
2527 gstelement_class->release_pad =
2528 GST_DEBUG_FUNCPTR (gst_video_aggregator_release_pad);
2530 agg_class->start = gst_video_aggregator_start;
2531 agg_class->stop = gst_video_aggregator_stop;
2532 agg_class->sink_query = gst_video_aggregator_sink_query;
2533 agg_class->sink_event = gst_video_aggregator_sink_event;
2534 agg_class->flush = gst_video_aggregator_flush;
2535 agg_class->aggregate = gst_video_aggregator_aggregate;
2536 agg_class->src_event = gst_video_aggregator_src_event;
2537 agg_class->src_query = gst_video_aggregator_src_query;
2538 agg_class->get_next_time = gst_aggregator_simple_get_next_time;
2539 agg_class->update_src_caps = gst_video_aggregator_default_update_src_caps;
2540 agg_class->fixate_src_caps = gst_video_aggregator_default_fixate_src_caps;
2541 agg_class->negotiated_src_caps =
2542 gst_video_aggregator_default_negotiated_src_caps;
2543 agg_class->decide_allocation = gst_video_aggregator_decide_allocation;
2544 agg_class->propose_allocation = gst_video_aggregator_propose_allocation;
2546 klass->find_best_format = gst_video_aggregator_find_best_format;
2547 klass->create_output_buffer = gst_video_aggregator_create_output_buffer;
2548 klass->update_caps = gst_video_aggregator_default_update_caps;
2550 /* Register the pad class */
2551 g_type_class_ref (GST_TYPE_VIDEO_AGGREGATOR_PAD);
2555 gst_video_aggregator_init (GstVideoAggregator * vagg,
2556 GstVideoAggregatorClass * klass)
2559 G_TYPE_INSTANCE_GET_PRIVATE (vagg, GST_TYPE_VIDEO_AGGREGATOR,
2560 GstVideoAggregatorPrivate);
2562 vagg->priv->current_caps = NULL;
2564 g_mutex_init (&vagg->priv->lock);
2566 /* initialize variables */
2567 gst_video_aggregator_reset (vagg);