1 /* Generic video aggregator plugin
2 * Copyright (C) 2004, 2008 Wim Taymans <wim@fluendo.com>
3 * Copyright (C) 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 * SECTION:gstvideoaggregator
23 * @title: GstVideoAggregator
24 * @short_description: Base class for video aggregators
26 * VideoAggregator can accept AYUV, ARGB and BGRA video streams. For each of the requested
27 * sink pads it will compare the incoming geometry and framerate to define the
28 * output parameters. Indeed output video frames will have the geometry of the
29 * biggest incoming video stream and the framerate of the fastest incoming one.
31 * VideoAggregator will do colorspace conversion.
33 * Zorder for each input stream can be configured on the
34 * #GstVideoAggregatorPad.
44 #include "gstvideoaggregator.h"
46 GST_DEBUG_CATEGORY_STATIC (gst_video_aggregator_debug);
47 #define GST_CAT_DEFAULT gst_video_aggregator_debug
49 /* Needed prototypes */
50 static void gst_video_aggregator_reset_qos (GstVideoAggregator * vagg);
52 /****************************************
53 * GstVideoAggregatorPad implementation *
54 ****************************************/
56 #define DEFAULT_PAD_ZORDER 0
57 #define DEFAULT_PAD_REPEAT_AFTER_EOS FALSE
58 #define DEFAULT_PAD_MAX_LAST_BUFFER_REPEAT GST_CLOCK_TIME_NONE
63 PROP_PAD_REPEAT_AFTER_EOS,
64 PROP_PAD_MAX_LAST_BUFFER_REPEAT,
68 struct _GstVideoAggregatorPadPrivate
72 GstVideoFrame prepared_frame;
76 gboolean repeat_after_eos;
77 GstClockTime max_last_buffer_repeat;
79 /* Subclasses can force an alpha channel in the (input thus output)
80 * colorspace format */
83 GstClockTime start_time;
84 GstClockTime end_time;
86 GstVideoInfo pending_vinfo;
87 GstCaps *pending_caps;
91 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorPad, gst_video_aggregator_pad,
92 GST_TYPE_AGGREGATOR_PAD);
95 gst_video_aggregator_pad_get_property (GObject * object, guint prop_id,
96 GValue * value, GParamSpec * pspec)
98 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
101 case PROP_PAD_ZORDER:
102 g_value_set_uint (value, pad->priv->zorder);
104 case PROP_PAD_REPEAT_AFTER_EOS:
105 g_value_set_boolean (value, pad->priv->repeat_after_eos);
107 case PROP_PAD_MAX_LAST_BUFFER_REPEAT:
108 g_value_set_uint64 (value, pad->priv->max_last_buffer_repeat);
111 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
117 pad_zorder_compare (const GstVideoAggregatorPad * pad1,
118 const GstVideoAggregatorPad * pad2)
120 return pad1->priv->zorder - pad2->priv->zorder;
124 gst_video_aggregator_pad_set_property (GObject * object, guint prop_id,
125 const GValue * value, GParamSpec * pspec)
127 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (object);
130 case PROP_PAD_ZORDER:{
131 GstVideoAggregator *vagg =
132 GST_VIDEO_AGGREGATOR (gst_pad_get_parent (GST_PAD (pad)));
134 GST_OBJECT_LOCK (vagg);
135 pad->priv->zorder = g_value_get_uint (value);
136 GST_ELEMENT (vagg)->sinkpads =
137 g_list_sort (GST_ELEMENT (vagg)->sinkpads,
138 (GCompareFunc) pad_zorder_compare);
139 GST_OBJECT_UNLOCK (vagg);
140 gst_object_unref (vagg);
142 pad->priv->zorder = g_value_get_uint (value);
146 case PROP_PAD_REPEAT_AFTER_EOS:
147 pad->priv->repeat_after_eos = g_value_get_boolean (value);
149 case PROP_PAD_MAX_LAST_BUFFER_REPEAT:
150 pad->priv->max_last_buffer_repeat = g_value_get_uint64 (value);
153 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
159 _flush_pad (GstAggregatorPad * aggpad, GstAggregator * aggregator)
161 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (aggregator);
162 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
164 gst_video_aggregator_reset_qos (vagg);
165 gst_buffer_replace (&pad->priv->buffer, NULL);
166 gst_caps_replace (&pad->priv->caps, NULL);
167 pad->priv->start_time = -1;
168 pad->priv->end_time = -1;
174 gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad,
175 GstAggregator * agg, GstBuffer * buffer)
177 gboolean ret = FALSE;
178 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
180 if (agg_segment->position != GST_CLOCK_TIME_NONE
181 && GST_BUFFER_DURATION (buffer) != GST_CLOCK_TIME_NONE) {
182 GstClockTime start_time =
183 gst_segment_to_running_time (&aggpad->segment, GST_FORMAT_TIME,
184 GST_BUFFER_PTS (buffer));
185 GstClockTime end_time = start_time + GST_BUFFER_DURATION (buffer);
186 GstClockTime output_start_running_time =
187 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
188 agg_segment->position);
190 ret = end_time < output_start_running_time;
197 gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad,
198 GstVideoAggregator * vagg, GstBuffer * buffer,
199 GstVideoFrame * prepared_frame)
201 if (!gst_video_frame_map (prepared_frame, &pad->info, buffer, GST_MAP_READ)) {
202 GST_WARNING_OBJECT (vagg, "Could not map input buffer");
210 gst_video_aggregator_pad_clean_frame (GstVideoAggregatorPad * pad,
211 GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
213 if (prepared_frame->buffer) {
214 gst_video_frame_unmap (prepared_frame);
215 memset (prepared_frame, 0, sizeof (GstVideoFrame));
220 gst_video_aggregator_peek_next_sample (GstAggregator * agg,
221 GstAggregatorPad * aggpad)
223 GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (aggpad);
224 GstSample *res = NULL;
226 if (vaggpad->priv->buffer) {
227 res = gst_sample_new (vaggpad->priv->buffer, vaggpad->priv->caps,
228 &aggpad->segment, NULL);
235 gst_video_aggregator_pad_class_init (GstVideoAggregatorPadClass * klass)
237 GObjectClass *gobject_class = (GObjectClass *) klass;
238 GstAggregatorPadClass *aggpadclass = (GstAggregatorPadClass *) klass;
240 gobject_class->set_property = gst_video_aggregator_pad_set_property;
241 gobject_class->get_property = gst_video_aggregator_pad_get_property;
243 g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
244 g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
245 0, G_MAXUINT, DEFAULT_PAD_ZORDER,
246 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
247 g_object_class_install_property (gobject_class, PROP_PAD_REPEAT_AFTER_EOS,
248 g_param_spec_boolean ("repeat-after-eos", "Repeat After EOS",
249 "Repeat the " "last frame after EOS until all pads are EOS",
250 DEFAULT_PAD_REPEAT_AFTER_EOS,
251 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
254 * GstVideoAggregatorPad::max-last-buffer-repeat:
256 * Repeat last buffer for time (in ns, -1 = until EOS).
257 * The default behaviour is for the last buffer received on a pad to be
258 * aggregated until a new buffer is received.
260 * Setting this property causes the last buffer to be discarded once the
261 * running time of the output buffer is `max-last-buffer-repeat` nanoseconds
262 * past its end running time. When the buffer didn't have a duration, the
263 * comparison is made against its running start time.
265 * This is useful in live scenarios: when a stream encounters a temporary
266 * networking problem, a #GstVideoAggregator subclass can then fall back to
267 * displaying a lower z-order stream, or the background.
269 * Setting this property doesn't affect the behaviour on EOS.
273 g_object_class_install_property (gobject_class,
274 PROP_PAD_MAX_LAST_BUFFER_REPEAT,
275 g_param_spec_uint64 ("max-last-buffer-repeat", "Max Last Buffer Repeat",
276 "Repeat last buffer for time (in ns, -1=until EOS), "
277 "behaviour on EOS is not affected", 0, G_MAXUINT64,
278 DEFAULT_PAD_MAX_LAST_BUFFER_REPEAT,
279 G_PARAM_READWRITE | GST_PARAM_MUTABLE_PLAYING |
280 G_PARAM_STATIC_STRINGS));
282 aggpadclass->flush = GST_DEBUG_FUNCPTR (_flush_pad);
283 aggpadclass->skip_buffer =
284 GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_skip_buffer);
285 klass->prepare_frame =
286 GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_prepare_frame);
287 klass->clean_frame = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_clean_frame);
291 gst_video_aggregator_pad_init (GstVideoAggregatorPad * vaggpad)
293 vaggpad->priv = gst_video_aggregator_pad_get_instance_private (vaggpad);
295 vaggpad->priv->zorder = DEFAULT_PAD_ZORDER;
296 vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS;
297 vaggpad->priv->max_last_buffer_repeat = DEFAULT_PAD_MAX_LAST_BUFFER_REPEAT;
298 memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
302 * gst_video_aggregator_pad_has_current_buffer:
303 * @pad: a #GstVideoAggregatorPad
305 * Checks if the pad currently has a buffer queued that is going to be used
306 * for the current output frame.
308 * This must only be called from the #GstVideoAggregatorClass::aggregate_frames virtual method,
309 * or from the #GstVideoAggregatorPadClass::prepare_frame virtual method of the aggregator pads.
311 * Returns: %TRUE if the pad has currently a buffer queued
314 gst_video_aggregator_pad_has_current_buffer (GstVideoAggregatorPad * pad)
316 g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), FALSE);
318 return pad->priv->buffer != NULL;
322 * gst_video_aggregator_pad_get_current_buffer:
323 * @pad: a #GstVideoAggregatorPad
325 * Returns the currently queued buffer that is going to be used
326 * for the current output frame.
328 * This must only be called from the #GstVideoAggregatorClass::aggregate_frames virtual method,
329 * or from the #GstVideoAggregatorPadClass::prepare_frame virtual method of the aggregator pads.
331 * The return value is only valid until #GstVideoAggregatorClass::aggregate_frames or #GstVideoAggregatorPadClass::prepare_frame
334 * Returns: (transfer none): The currently queued buffer
337 gst_video_aggregator_pad_get_current_buffer (GstVideoAggregatorPad * pad)
339 g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
341 return pad->priv->buffer;
345 * gst_video_aggregator_pad_get_prepared_frame:
346 * @pad: a #GstVideoAggregatorPad
348 * Returns the currently prepared video frame that has to be aggregated into
349 * the current output frame.
351 * This must only be called from the #GstVideoAggregatorClass::aggregate_frames virtual method,
352 * or from the #GstVideoAggregatorPadClass::prepare_frame virtual method of the aggregator pads.
354 * The return value is only valid until #GstVideoAggregatorClass::aggregate_frames or #GstVideoAggregatorPadClass::prepare_frame
357 * Returns: (transfer none): The currently prepared video frame
360 gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorPad * pad)
362 g_return_val_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad), NULL);
364 return pad->priv->prepared_frame.buffer ? &pad->priv->prepared_frame : NULL;
368 * gst_video_aggregator_pad_set_needs_alpha:
369 * @pad: a #GstVideoAggregatorPad
370 * @needs_alpha: %TRUE if this pad requires alpha output
372 * Allows selecting that this pad requires an output format with alpha
376 gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad * pad,
377 gboolean needs_alpha)
379 g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_PAD (pad));
381 if (needs_alpha != pad->priv->needs_alpha) {
383 GST_AGGREGATOR (gst_object_get_parent (GST_OBJECT (pad)));
384 pad->priv->needs_alpha = needs_alpha;
386 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (agg));
387 gst_object_unref (agg);
392 /****************************************
393 * GstVideoAggregatorConvertPad implementation *
394 ****************************************/
399 PROP_CONVERT_PAD_CONVERTER_CONFIG,
402 struct _GstVideoAggregatorConvertPadPrivate
404 /* The following fields are only used from the aggregate thread and when
405 * initializing / finalizing */
407 /* Converter, if NULL no conversion is done */
408 GstVideoConverter *convert;
410 /* caps used for conversion if needed */
411 GstVideoInfo conversion_info;
412 GstBuffer *converted_buffer;
414 /* The following fields are accessed from the property setters / getters,
415 * and as such are protected with the object lock */
416 GstStructure *converter_config;
417 gboolean converter_config_changed;
419 GstTaskPool *task_pool;
422 G_DEFINE_TYPE_WITH_PRIVATE (GstVideoAggregatorConvertPad,
423 gst_video_aggregator_convert_pad, GST_TYPE_VIDEO_AGGREGATOR_PAD);
426 gst_video_aggregator_convert_pad_finalize (GObject * o)
428 GstVideoAggregatorConvertPad *vaggpad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (o);
430 if (vaggpad->priv->convert)
431 gst_video_converter_free (vaggpad->priv->convert);
432 vaggpad->priv->convert = NULL;
434 if (vaggpad->priv->converter_config)
435 gst_structure_free (vaggpad->priv->converter_config);
436 vaggpad->priv->converter_config = NULL;
438 if (vaggpad->priv->task_pool)
439 gst_task_pool_cleanup (vaggpad->priv->task_pool);
441 gst_object_replace ((GstObject **) & vaggpad->priv->task_pool, NULL);
443 G_OBJECT_CLASS (gst_video_aggregator_pad_parent_class)->finalize (o);
447 gst_video_aggregator_convert_pad_update_conversion_info_internal
448 (GstVideoAggregatorPad * vpad)
450 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
452 GST_OBJECT_LOCK (pad);
453 pad->priv->converter_config_changed = TRUE;
454 GST_OBJECT_UNLOCK (pad);
458 get_opt_uint (const GstStructure * config, const gchar * opt, guint def)
461 if (!gst_structure_get_uint (config, opt, &res))
467 gst_video_aggregator_convert_pad_prepare_frame (GstVideoAggregatorPad * vpad,
468 GstVideoAggregator * vagg, GstBuffer * buffer,
469 GstVideoFrame * prepared_frame)
471 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
474 /* Update/create converter as needed */
475 GST_OBJECT_LOCK (pad);
476 if (pad->priv->converter_config_changed) {
477 GstVideoAggregatorConvertPadClass *klass =
478 GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS (pad);
479 GstVideoInfo conversion_info;
481 gst_video_info_init (&conversion_info);
482 klass->create_conversion_info (pad, vagg, &conversion_info);
483 if (conversion_info.finfo == NULL)
485 pad->priv->converter_config_changed = FALSE;
487 pad->priv->conversion_info = conversion_info;
489 if (pad->priv->convert)
490 gst_video_converter_free (pad->priv->convert);
491 pad->priv->convert = NULL;
493 if (!gst_video_info_is_equal (&vpad->info, &pad->priv->conversion_info)) {
494 if (pad->priv->converter_config) {
495 guint n_threads = get_opt_uint (pad->priv->converter_config,
496 GST_VIDEO_CONVERTER_OPT_THREADS, 1);
498 if (n_threads == 0 || n_threads > g_get_num_processors ())
499 n_threads = g_get_num_processors ();
501 gst_shared_task_pool_set_max_threads (GST_SHARED_TASK_POOL (pad->priv->
502 task_pool), n_threads);
506 gst_video_converter_new_with_pool (&vpad->info,
507 &pad->priv->conversion_info,
508 pad->priv->converter_config ? gst_structure_copy (pad->
509 priv->converter_config) : NULL, pad->priv->task_pool);
510 if (!pad->priv->convert) {
511 GST_WARNING_OBJECT (pad, "No path found for conversion");
515 GST_DEBUG_OBJECT (pad, "This pad will be converted from %s to %s",
516 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&vpad->info)),
517 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&pad->priv->
520 GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
523 GST_OBJECT_UNLOCK (pad);
525 if (!gst_video_frame_map (&frame, &vpad->info, buffer, GST_MAP_READ)) {
526 GST_WARNING_OBJECT (vagg, "Could not map input buffer");
530 if (pad->priv->convert) {
531 GstVideoFrame converted_frame;
532 GstBuffer *converted_buf = NULL;
533 static GstAllocationParams params = { 0, 15, 0, 0, };
537 /* We wait until here to set the conversion infos, in case vagg->info changed */
538 converted_size = pad->priv->conversion_info.size;
539 outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
540 converted_size = converted_size > outsize ? converted_size : outsize;
541 converted_buf = gst_buffer_new_allocate (NULL, converted_size, ¶ms);
543 if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
544 converted_buf, GST_MAP_READWRITE)) {
545 GST_WARNING_OBJECT (vagg, "Could not map converted frame");
547 gst_video_frame_unmap (&frame);
551 gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
552 pad->priv->converted_buffer = converted_buf;
553 gst_video_frame_unmap (&frame);
554 *prepared_frame = converted_frame;
556 *prepared_frame = frame;
563 gst_video_aggregator_convert_pad_clean_frame (GstVideoAggregatorPad * vpad,
564 GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
566 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
568 if (prepared_frame->buffer) {
569 gst_video_frame_unmap (prepared_frame);
570 memset (prepared_frame, 0, sizeof (GstVideoFrame));
573 if (pad->priv->converted_buffer) {
574 gst_buffer_unref (pad->priv->converted_buffer);
575 pad->priv->converted_buffer = NULL;
580 gst_video_aggregator_convert_pad_create_conversion_info
581 (GstVideoAggregatorConvertPad * pad, GstVideoAggregator * agg,
582 GstVideoInfo * convert_info)
584 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD (pad);
585 gchar *colorimetry, *best_colorimetry;
586 gchar *chroma, *best_chroma;
588 g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
589 g_return_if_fail (convert_info != NULL);
591 if (!vpad->info.finfo
592 || GST_VIDEO_INFO_FORMAT (&vpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
597 || GST_VIDEO_INFO_FORMAT (&agg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
601 colorimetry = gst_video_colorimetry_to_string (&vpad->info.colorimetry);
602 chroma = gst_video_chroma_site_to_string (vpad->info.chroma_site);
604 best_colorimetry = gst_video_colorimetry_to_string (&agg->info.colorimetry);
605 best_chroma = gst_video_chroma_site_to_string (agg->info.chroma_site);
607 if (GST_VIDEO_INFO_FORMAT (&agg->info) != GST_VIDEO_INFO_FORMAT (&vpad->info)
608 || g_strcmp0 (colorimetry, best_colorimetry)
609 || g_strcmp0 (chroma, best_chroma)) {
610 GstVideoInfo tmp_info;
612 /* Initialize with the wanted video format and our original width and
613 * height as we don't want to rescale. Then copy over the wanted
614 * colorimetry, and chroma-site and our current pixel-aspect-ratio
615 * and other relevant fields.
617 gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (&agg->info),
618 vpad->info.width, vpad->info.height);
619 tmp_info.chroma_site = agg->info.chroma_site;
620 tmp_info.colorimetry = agg->info.colorimetry;
621 tmp_info.par_n = vpad->info.par_n;
622 tmp_info.par_d = vpad->info.par_d;
623 tmp_info.fps_n = vpad->info.fps_n;
624 tmp_info.fps_d = vpad->info.fps_d;
625 tmp_info.flags = vpad->info.flags;
626 tmp_info.interlace_mode = vpad->info.interlace_mode;
628 *convert_info = tmp_info;
630 *convert_info = vpad->info;
633 g_free (colorimetry);
634 g_free (best_colorimetry);
636 g_free (best_chroma);
640 gst_video_aggregator_convert_pad_get_property (GObject * object, guint prop_id,
641 GValue * value, GParamSpec * pspec)
643 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
646 case PROP_CONVERT_PAD_CONVERTER_CONFIG:
647 GST_OBJECT_LOCK (pad);
648 if (pad->priv->converter_config)
649 g_value_set_boxed (value, pad->priv->converter_config);
650 GST_OBJECT_UNLOCK (pad);
653 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
659 gst_video_aggregator_convert_pad_set_property (GObject * object, guint prop_id,
660 const GValue * value, GParamSpec * pspec)
662 GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (object);
665 case PROP_CONVERT_PAD_CONVERTER_CONFIG:
666 GST_OBJECT_LOCK (pad);
667 if (pad->priv->converter_config)
668 gst_structure_free (pad->priv->converter_config);
669 pad->priv->converter_config = g_value_dup_boxed (value);
670 pad->priv->converter_config_changed = TRUE;
671 GST_OBJECT_UNLOCK (pad);
674 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
680 gst_video_aggregator_convert_pad_class_init (GstVideoAggregatorConvertPadClass *
683 GObjectClass *gobject_class = (GObjectClass *) klass;
684 GstVideoAggregatorPadClass *vaggpadclass =
685 (GstVideoAggregatorPadClass *) klass;
687 gobject_class->finalize = gst_video_aggregator_convert_pad_finalize;
688 gobject_class->get_property =
689 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_get_property);
690 gobject_class->set_property =
691 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_set_property);
693 g_object_class_install_property (gobject_class,
694 PROP_CONVERT_PAD_CONVERTER_CONFIG, g_param_spec_boxed ("converter-config",
695 "Converter configuration",
696 "A GstStructure describing the configuration that should be used "
697 "when scaling and converting this pad's video frames",
698 GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
700 vaggpadclass->update_conversion_info =
702 (gst_video_aggregator_convert_pad_update_conversion_info_internal);
703 vaggpadclass->prepare_frame =
704 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_prepare_frame);
705 vaggpadclass->clean_frame =
706 GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_clean_frame);
708 klass->create_conversion_info =
709 gst_video_aggregator_convert_pad_create_conversion_info;
713 gst_video_aggregator_convert_pad_init (GstVideoAggregatorConvertPad * vaggpad)
716 gst_video_aggregator_convert_pad_get_instance_private (vaggpad);
718 vaggpad->priv->converted_buffer = NULL;
719 vaggpad->priv->convert = NULL;
720 vaggpad->priv->converter_config = NULL;
721 vaggpad->priv->converter_config_changed = FALSE;
722 vaggpad->priv->task_pool = gst_shared_task_pool_new ();
724 gst_task_pool_prepare (vaggpad->priv->task_pool, NULL);
728 * gst_video_aggregator_convert_pad_update_conversion_info:
729 * @pad: a #GstVideoAggregatorPad
731 * Requests the pad to check and update the converter before the next usage to
732 * update for any changes that have happened.
735 void gst_video_aggregator_convert_pad_update_conversion_info
736 (GstVideoAggregatorConvertPad * pad)
738 g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
740 GST_OBJECT_LOCK (pad);
741 pad->priv->converter_config_changed = TRUE;
742 GST_OBJECT_UNLOCK (pad);
745 /**************************************
746 * GstVideoAggregator implementation *
747 **************************************/
749 #define GST_VIDEO_AGGREGATOR_GET_LOCK(vagg) (&GST_VIDEO_AGGREGATOR(vagg)->priv->lock)
751 #define GST_VIDEO_AGGREGATOR_LOCK(vagg) G_STMT_START { \
752 GST_LOG_OBJECT (vagg, "Taking EVENT lock from thread %p", \
754 g_mutex_lock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg)); \
755 GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p", \
759 #define GST_VIDEO_AGGREGATOR_UNLOCK(vagg) G_STMT_START { \
760 GST_LOG_OBJECT (vagg, "Releasing EVENT lock from thread %p", \
762 g_mutex_unlock(GST_VIDEO_AGGREGATOR_GET_LOCK(vagg)); \
763 GST_LOG_OBJECT (vagg, "Took EVENT lock from thread %p", \
768 struct _GstVideoAggregatorPrivate
770 /* Lock to prevent the state to change while aggregating */
773 /* Current downstream segment */
774 GstClockTime ts_offset;
779 GstClockTime earliest_time;
780 guint64 qos_processed, qos_dropped;
783 GstCaps *current_caps;
787 /* The (ordered) list of #GstVideoFormatInfo supported by the aggregation
788 method (from the srcpad template caps). */
789 GPtrArray *supported_formats;
792 /* Can't use the G_DEFINE_TYPE macros because we need the
793 * videoaggregator class in the _init to be able to set
794 * the sink pad non-alpha caps. Using the G_DEFINE_TYPE there
795 * seems to be no way of getting the real class being initialized */
796 static void gst_video_aggregator_init (GstVideoAggregator * self,
797 GstVideoAggregatorClass * klass);
798 static void gst_video_aggregator_class_init (GstVideoAggregatorClass * klass);
799 static gpointer gst_video_aggregator_parent_class = NULL;
800 static gint video_aggregator_private_offset = 0;
803 gst_video_aggregator_get_type (void)
805 static volatile gsize g_define_type_id_volatile = 0;
807 if (g_once_init_enter (&g_define_type_id_volatile)) {
808 GType g_define_type_id = g_type_register_static_simple (GST_TYPE_AGGREGATOR,
809 g_intern_static_string ("GstVideoAggregator"),
810 sizeof (GstVideoAggregatorClass),
811 (GClassInitFunc) gst_video_aggregator_class_init,
812 sizeof (GstVideoAggregator),
813 (GInstanceInitFunc) gst_video_aggregator_init,
814 (GTypeFlags) G_TYPE_FLAG_ABSTRACT);
816 video_aggregator_private_offset =
817 g_type_add_instance_private (g_define_type_id,
818 sizeof (GstVideoAggregatorPrivate));
820 g_once_init_leave (&g_define_type_id_volatile, g_define_type_id);
822 return g_define_type_id_volatile;
825 static inline GstVideoAggregatorPrivate *
826 gst_video_aggregator_get_instance_private (GstVideoAggregator * self)
828 return (G_STRUCT_MEMBER_P (self, video_aggregator_private_offset));
832 gst_video_aggregator_supports_format (GstVideoAggregator * vagg,
833 GstVideoFormat format)
837 for (i = 0; i < vagg->priv->supported_formats->len; i++) {
838 GstVideoFormatInfo *format_info = vagg->priv->supported_formats->pdata[i];
840 if (GST_VIDEO_FORMAT_INFO_FORMAT (format_info) == format)
848 gst_video_aggregator_get_possible_caps_for_info (GstVideoInfo * info)
851 GstCaps *possible_caps = gst_video_info_to_caps (info);
853 s = gst_caps_get_structure (possible_caps, 0);
854 gst_structure_remove_fields (s, "width", "height", "framerate",
855 "pixel-aspect-ratio", "interlace-mode", NULL);
857 return possible_caps;
861 gst_video_aggregator_find_best_format (GstVideoAggregator * vagg,
862 GstCaps * downstream_caps, GstVideoInfo * best_info,
863 gboolean * at_least_one_alpha)
866 GstCaps *possible_caps;
867 GstVideoAggregatorPad *pad;
868 gboolean need_alpha = FALSE;
869 gint best_format_number = 0, i;
870 GHashTable *formats_table = g_hash_table_new (g_direct_hash, g_direct_equal);
872 GST_OBJECT_LOCK (vagg);
873 for (tmp = GST_ELEMENT (vagg)->sinkpads; tmp; tmp = tmp->next) {
874 gint format_number = 0;
878 if (!pad->info.finfo)
881 if (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
882 *at_least_one_alpha = TRUE;
884 /* If we want alpha, disregard all the other formats */
885 if (need_alpha && !(pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA))
888 /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */
889 if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN)
892 /* Can downstream accept this format ? */
893 if (!GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
895 gst_video_aggregator_get_possible_caps_for_info (&pad->info);
896 if (!gst_caps_can_intersect (downstream_caps, possible_caps)) {
897 gst_caps_unref (possible_caps);
902 /* If the format is supported, consider it very high weight */
903 if (gst_video_aggregator_supports_format (vagg,
904 GST_VIDEO_INFO_FORMAT (&pad->info))) {
906 GPOINTER_TO_INT (g_hash_table_lookup (formats_table,
907 GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info))));
909 format_number += pad->info.width * pad->info.height;
911 g_hash_table_replace (formats_table,
912 GINT_TO_POINTER (GST_VIDEO_INFO_FORMAT (&pad->info)),
913 GINT_TO_POINTER (format_number));
916 /* If that pad is the first with alpha, set it as the new best format */
917 if (!need_alpha && (pad->priv->needs_alpha
918 && (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (pad->info.finfo)))) {
920 /* Just fallback to ARGB in case we require alpha but the input pad
921 * does not have alpha.
922 * Do not increment best_format_number in that case. */
923 gst_video_info_set_format (best_info,
924 GST_VIDEO_FORMAT_ARGB,
925 GST_VIDEO_INFO_WIDTH (&pad->info),
926 GST_VIDEO_INFO_HEIGHT (&pad->info));
927 } else if (!need_alpha
928 && (pad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
930 *best_info = pad->info;
931 best_format_number = format_number;
932 } else if (format_number > best_format_number) {
933 *best_info = pad->info;
934 best_format_number = format_number;
937 GST_OBJECT_UNLOCK (vagg);
939 g_hash_table_unref (formats_table);
941 if (gst_video_aggregator_supports_format (vagg,
942 GST_VIDEO_INFO_FORMAT (best_info))) {
943 possible_caps = gst_video_aggregator_get_possible_caps_for_info (best_info);
944 if (gst_caps_can_intersect (downstream_caps, possible_caps)) {
945 gst_caps_unref (possible_caps);
948 gst_caps_unref (possible_caps);
951 for (i = 0; i < vagg->priv->supported_formats->len; i++) {
952 GstVideoFormatInfo *format_info = vagg->priv->supported_formats->pdata[i];
954 if ((! !GST_VIDEO_FORMAT_INFO_HAS_ALPHA (format_info)) == (! !need_alpha)) {
955 gst_video_info_set_format (best_info, format_info->format,
956 best_info->width, best_info->height);
958 gst_video_aggregator_get_possible_caps_for_info (best_info);
960 if (gst_caps_can_intersect (downstream_caps, possible_caps)) {
961 GST_INFO_OBJECT (vagg, "Using supported caps: %" GST_PTR_FORMAT,
963 gst_caps_unref (possible_caps);
968 gst_caps_unref (possible_caps);
972 GST_WARNING_OBJECT (vagg, "Nothing compatible with %" GST_PTR_FORMAT,
974 gst_video_info_init (best_info);
978 gst_video_aggregator_default_fixate_src_caps (GstAggregator * agg,
981 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
982 gint best_width = -1, best_height = -1;
983 gint best_fps_n = -1, best_fps_d = -1;
984 gdouble best_fps = -1.;
988 GST_OBJECT_LOCK (vagg);
989 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
990 GstVideoAggregatorPad *mpad = l->data;
995 fps_n = GST_VIDEO_INFO_FPS_N (&mpad->info);
996 fps_d = GST_VIDEO_INFO_FPS_D (&mpad->info);
997 width = GST_VIDEO_INFO_WIDTH (&mpad->info);
998 height = GST_VIDEO_INFO_HEIGHT (&mpad->info);
1000 if (width == 0 || height == 0)
1003 if (best_width < width)
1005 if (best_height < height)
1006 best_height = height;
1011 gst_util_fraction_to_double (fps_n, fps_d, &cur_fps);
1013 if (best_fps < cur_fps) {
1019 GST_OBJECT_UNLOCK (vagg);
1021 if (best_fps_n <= 0 || best_fps_d <= 0 || best_fps == 0.0) {
1027 caps = gst_caps_make_writable (caps);
1028 s = gst_caps_get_structure (caps, 0);
1029 gst_structure_fixate_field_nearest_int (s, "width", best_width);
1030 gst_structure_fixate_field_nearest_int (s, "height", best_height);
1031 gst_structure_fixate_field_nearest_fraction (s, "framerate", best_fps_n,
1033 if (gst_structure_has_field (s, "pixel-aspect-ratio"))
1034 gst_structure_fixate_field_nearest_fraction (s, "pixel-aspect-ratio", 1, 1);
1035 caps = gst_caps_fixate (caps);
1041 gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg,
1044 GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg);
1045 GstCaps *ret, *best_format_caps;
1046 gboolean at_least_one_alpha = FALSE;
1047 GstVideoFormat best_format;
1048 GstVideoInfo best_info;
1052 best_format = GST_VIDEO_FORMAT_UNKNOWN;
1053 gst_video_info_init (&best_info);
1055 if (vagg_klass->find_best_format) {
1056 vagg_klass->find_best_format (vagg, caps, &best_info, &at_least_one_alpha);
1058 best_format = GST_VIDEO_INFO_FORMAT (&best_info);
1061 if (best_format == GST_VIDEO_FORMAT_UNKNOWN) {
1062 GstCaps *tmp = gst_caps_fixate (gst_caps_ref (caps));
1063 gst_video_info_from_caps (&best_info, tmp);
1064 best_format = GST_VIDEO_INFO_FORMAT (&best_info);
1065 gst_caps_unref (tmp);
1068 color_name = gst_video_colorimetry_to_string (&best_info.colorimetry);
1069 chroma_site = gst_video_chroma_site_to_string (best_info.chroma_site);
1071 GST_DEBUG_OBJECT (vagg,
1072 "The output format will now be : %s with chroma : %s and colorimetry %s",
1073 gst_video_format_to_string (best_format),
1074 GST_STR_NULL (chroma_site), GST_STR_NULL (color_name));
1076 best_format_caps = gst_caps_copy (caps);
1077 gst_caps_set_simple (best_format_caps, "format", G_TYPE_STRING,
1078 gst_video_format_to_string (best_format), NULL);
1080 if (chroma_site != NULL)
1081 gst_caps_set_simple (best_format_caps, "chroma-site", G_TYPE_STRING,
1083 if (color_name != NULL)
1084 gst_caps_set_simple (best_format_caps, "colorimetry", G_TYPE_STRING,
1087 g_free (color_name);
1088 g_free (chroma_site);
1089 ret = gst_caps_merge (best_format_caps, gst_caps_ref (caps));
1094 static GstFlowReturn
1095 gst_video_aggregator_default_update_src_caps (GstAggregator * agg,
1096 GstCaps * caps, GstCaps ** ret)
1098 GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (agg);
1099 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1101 g_assert (vagg_klass->update_caps);
1103 *ret = vagg_klass->update_caps (vagg, caps);
1109 _update_conversion_info (GstElement * element, GstPad * pad, gpointer user_data)
1111 GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1112 GstVideoAggregatorPadClass *vaggpad_klass =
1113 GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (vaggpad);
1115 if (vaggpad_klass->update_conversion_info) {
1116 vaggpad_klass->update_conversion_info (vaggpad);
1123 gst_video_aggregator_default_negotiated_src_caps (GstAggregator * agg,
1126 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1127 gboolean at_least_one_alpha = FALSE;
1128 gboolean ret = FALSE;
1129 const GstVideoFormatInfo *finfo;
1133 GST_INFO_OBJECT (agg->srcpad, "set src caps: %" GST_PTR_FORMAT, caps);
1135 GST_VIDEO_AGGREGATOR_LOCK (vagg);
1137 GST_OBJECT_LOCK (vagg);
1138 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1139 GstVideoAggregatorPad *mpad = l->data;
1141 if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
1142 || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
1145 if (mpad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
1146 at_least_one_alpha = TRUE;
1148 GST_OBJECT_UNLOCK (vagg);
1150 if (!gst_video_info_from_caps (&info, caps))
1151 goto unlock_and_return;
1153 if (GST_VIDEO_INFO_FPS_N (&vagg->info) != GST_VIDEO_INFO_FPS_N (&info) ||
1154 GST_VIDEO_INFO_FPS_D (&vagg->info) != GST_VIDEO_INFO_FPS_D (&info)) {
1155 if (GST_AGGREGATOR_PAD (agg->srcpad)->segment.position != -1) {
1156 vagg->priv->nframes = 0;
1157 /* The timestamp offset will be updated based on the
1158 * segment position the next time we aggregate */
1159 GST_DEBUG_OBJECT (vagg,
1160 "Resetting frame counter because of framerate change");
1162 gst_video_aggregator_reset_qos (vagg);
1165 GST_OBJECT_LOCK (vagg);
1167 GST_OBJECT_UNLOCK (vagg);
1171 if (at_least_one_alpha && !(finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
1172 GST_ELEMENT_ERROR (vagg, CORE, NEGOTIATION,
1173 ("At least one of the input pads contains alpha, but configured caps don't support alpha."),
1174 ("Either convert your inputs to not contain alpha or add a videoconvert after the aggregator"));
1175 goto unlock_and_return;
1178 /* Then browse the sinks once more, setting or unsetting conversion if needed */
1179 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg),
1180 _update_conversion_info, NULL);
1182 if (vagg->priv->current_caps == NULL ||
1183 gst_caps_is_equal (caps, vagg->priv->current_caps) == FALSE) {
1184 GstClockTime latency;
1186 gst_caps_replace (&vagg->priv->current_caps, caps);
1188 gst_aggregator_set_src_caps (agg, caps);
1189 latency = gst_util_uint64_scale (GST_SECOND,
1190 GST_VIDEO_INFO_FPS_D (&info), GST_VIDEO_INFO_FPS_N (&info));
1191 gst_aggregator_set_latency (agg, latency, latency);
1197 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1202 gst_video_aggregator_get_sinkpads_interlace_mode (GstVideoAggregator * vagg,
1203 GstVideoAggregatorPad * skip_pad, GstVideoInterlaceMode * mode)
1207 GST_OBJECT_LOCK (vagg);
1208 for (walk = GST_ELEMENT (vagg)->sinkpads; walk; walk = g_list_next (walk)) {
1209 GstVideoAggregatorPad *vaggpad = walk->data;
1211 if (skip_pad && vaggpad == skip_pad)
1213 if (vaggpad->info.finfo
1214 && GST_VIDEO_INFO_FORMAT (&vaggpad->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1215 *mode = GST_VIDEO_INFO_INTERLACE_MODE (&vaggpad->info);
1216 GST_OBJECT_UNLOCK (vagg);
1220 GST_OBJECT_UNLOCK (vagg);
1225 gst_video_aggregator_pad_sink_setcaps (GstPad * pad, GstObject * parent,
1228 GstVideoAggregator *vagg;
1229 GstVideoAggregatorPad *vaggpad;
1231 gboolean ret = FALSE;
1233 GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, caps);
1235 vagg = GST_VIDEO_AGGREGATOR (parent);
1236 vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
1238 if (!gst_video_info_from_caps (&info, caps)) {
1239 GST_DEBUG_OBJECT (pad, "Failed to parse caps");
1243 GST_VIDEO_AGGREGATOR_LOCK (vagg);
1245 GstVideoInterlaceMode pads_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
1246 gboolean has_mode = FALSE;
1248 /* get the current output setting or fallback to other pads settings */
1249 if (GST_VIDEO_INFO_FORMAT (&vagg->info) != GST_VIDEO_FORMAT_UNKNOWN) {
1250 pads_mode = GST_VIDEO_INFO_INTERLACE_MODE (&vagg->info);
1254 gst_video_aggregator_get_sinkpads_interlace_mode (vagg, vaggpad,
1259 if (pads_mode != GST_VIDEO_INFO_INTERLACE_MODE (&info)) {
1260 GST_ERROR_OBJECT (pad,
1261 "got input caps %" GST_PTR_FORMAT ", but current caps are %"
1262 GST_PTR_FORMAT, caps, vagg->priv->current_caps);
1263 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1269 if (!vaggpad->info.finfo ||
1270 GST_VIDEO_INFO_FORMAT (&vaggpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
1271 /* no video info was already set, so this is the first time
1272 * that this pad is getting configured; configure immediately to avoid
1273 * problems with the initial negotiation */
1274 vaggpad->info = info;
1275 gst_caps_replace (&vaggpad->priv->caps, caps);
1276 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1278 /* this pad already had caps but received new ones; keep the new caps
1279 * pending until we pick the next buffer from the queue, otherwise we
1280 * might use an old buffer with the new caps and crash */
1281 vaggpad->priv->pending_vinfo = info;
1282 gst_caps_replace (&vaggpad->priv->pending_caps, caps);
1283 GST_DEBUG_OBJECT (pad, "delaying caps change");
1287 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
1294 gst_video_aggregator_caps_has_alpha (GstCaps * caps)
1296 guint size = gst_caps_get_size (caps);
1299 for (i = 0; i < size; i++) {
1300 GstStructure *s = gst_caps_get_structure (caps, i);
1301 const GValue *formats = gst_structure_get_value (s, "format");
1304 const GstVideoFormatInfo *info;
1306 if (GST_VALUE_HOLDS_LIST (formats)) {
1307 guint list_size = gst_value_list_get_size (formats);
1310 for (index = 0; index < list_size; index++) {
1311 const GValue *list_item = gst_value_list_get_value (formats, index);
1313 gst_video_format_get_info (gst_video_format_from_string
1314 (g_value_get_string (list_item)));
1315 if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1319 } else if (G_VALUE_HOLDS_STRING (formats)) {
1321 gst_video_format_get_info (gst_video_format_from_string
1322 (g_value_get_string (formats)));
1323 if (GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info))
1327 g_assert_not_reached ();
1328 GST_WARNING ("Unexpected type for video 'format' field: %s",
1329 G_VALUE_TYPE_NAME (formats));
1340 _get_non_alpha_caps (GstCaps * caps)
1345 size = gst_caps_get_size (caps);
1346 result = gst_caps_new_empty ();
1347 for (i = 0; i < size; i++) {
1348 GstStructure *s = gst_caps_get_structure (caps, i);
1349 const GValue *formats = gst_structure_get_value (s, "format");
1350 GValue new_formats = { 0, };
1351 gboolean has_format = FALSE;
1353 /* FIXME what to do if formats are missing? */
1355 const GstVideoFormatInfo *info;
1357 if (GST_VALUE_HOLDS_LIST (formats)) {
1358 guint list_size = gst_value_list_get_size (formats);
1361 g_value_init (&new_formats, GST_TYPE_LIST);
1363 for (index = 0; index < list_size; index++) {
1364 const GValue *list_item = gst_value_list_get_value (formats, index);
1367 gst_video_format_get_info (gst_video_format_from_string
1368 (g_value_get_string (list_item)));
1369 if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1371 gst_value_list_append_value (&new_formats, list_item);
1375 } else if (G_VALUE_HOLDS_STRING (formats)) {
1377 gst_video_format_get_info (gst_video_format_from_string
1378 (g_value_get_string (formats)));
1379 if (!GST_VIDEO_FORMAT_INFO_HAS_ALPHA (info)) {
1381 gst_value_init_and_copy (&new_formats, formats);
1385 g_assert_not_reached ();
1386 GST_WARNING ("Unexpected type for video 'format' field: %s",
1387 G_VALUE_TYPE_NAME (formats));
1391 s = gst_structure_copy (s);
1392 gst_structure_take_value (s, "format", &new_formats);
1393 gst_caps_append_structure (result, s);
1403 gst_video_aggregator_pad_sink_getcaps (GstPad * pad, GstVideoAggregator * vagg,
1407 GstCaps *template_caps, *sink_template_caps;
1408 GstCaps *returned_caps;
1411 GstAggregator *agg = GST_AGGREGATOR (vagg);
1412 GstPad *srcpad = GST_PAD (agg->srcpad);
1414 GstVideoInterlaceMode interlace_mode;
1415 gboolean has_interlace_mode;
1417 template_caps = gst_pad_get_pad_template_caps (srcpad);
1419 GST_DEBUG_OBJECT (pad, "Get caps with filter: %" GST_PTR_FORMAT, filter);
1421 srccaps = gst_pad_peer_query_caps (srcpad, template_caps);
1422 srccaps = gst_caps_make_writable (srccaps);
1423 has_alpha = gst_video_aggregator_caps_has_alpha (srccaps);
1425 has_interlace_mode =
1426 gst_video_aggregator_get_sinkpads_interlace_mode (vagg, NULL,
1429 n = gst_caps_get_size (srccaps);
1430 for (i = 0; i < n; i++) {
1431 s = gst_caps_get_structure (srccaps, i);
1432 gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
1435 if (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
1436 gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
1437 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
1438 gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
1439 "pixel-aspect-ratio", NULL);
1442 if (has_interlace_mode)
1443 gst_structure_set (s, "interlace-mode", G_TYPE_STRING,
1444 gst_video_interlace_mode_to_string (interlace_mode), NULL);
1448 returned_caps = gst_caps_intersect (srccaps, filter);
1449 gst_caps_unref (srccaps);
1451 returned_caps = srccaps;
1454 sink_template_caps = gst_pad_get_pad_template_caps (pad);
1456 GstCaps *tmp = _get_non_alpha_caps (sink_template_caps);
1457 gst_caps_unref (sink_template_caps);
1458 sink_template_caps = tmp;
1462 GstCaps *intersect = gst_caps_intersect (returned_caps, sink_template_caps);
1463 gst_caps_unref (returned_caps);
1464 returned_caps = intersect;
1467 gst_caps_unref (template_caps);
1468 gst_caps_unref (sink_template_caps);
1470 GST_DEBUG_OBJECT (pad, "Returning caps: %" GST_PTR_FORMAT, returned_caps);
1472 return returned_caps;
1476 gst_video_aggregator_update_qos (GstVideoAggregator * vagg, gdouble proportion,
1477 GstClockTimeDiff diff, GstClockTime timestamp)
1481 GST_DEBUG_OBJECT (vagg,
1482 "Updating QoS: proportion %lf, diff %" GST_STIME_FORMAT ", timestamp %"
1483 GST_TIME_FORMAT, proportion, GST_STIME_ARGS (diff),
1484 GST_TIME_ARGS (timestamp));
1487 GST_CLOCK_TIME_IS_VALID (gst_aggregator_get_latency (GST_AGGREGATOR
1490 GST_OBJECT_LOCK (vagg);
1492 vagg->priv->proportion = proportion;
1493 if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
1494 if (!live && G_UNLIKELY (diff > 0))
1495 vagg->priv->earliest_time =
1496 timestamp + 2 * diff + gst_util_uint64_scale_int_round (GST_SECOND,
1497 GST_VIDEO_INFO_FPS_D (&vagg->info),
1498 GST_VIDEO_INFO_FPS_N (&vagg->info));
1500 vagg->priv->earliest_time = timestamp + diff;
1502 vagg->priv->earliest_time = GST_CLOCK_TIME_NONE;
1504 GST_OBJECT_UNLOCK (vagg);
1508 gst_video_aggregator_reset_qos (GstVideoAggregator * vagg)
1510 gst_video_aggregator_update_qos (vagg, 0.5, 0, GST_CLOCK_TIME_NONE);
1511 vagg->priv->qos_processed = vagg->priv->qos_dropped = 0;
1515 gst_video_aggregator_read_qos (GstVideoAggregator * vagg, gdouble * proportion,
1516 GstClockTime * time)
1518 GST_OBJECT_LOCK (vagg);
1519 *proportion = vagg->priv->proportion;
1520 *time = vagg->priv->earliest_time;
1521 GST_OBJECT_UNLOCK (vagg);
1525 gst_video_aggregator_reset (GstVideoAggregator * vagg)
1527 GstAggregator *agg = GST_AGGREGATOR (vagg);
1530 GST_OBJECT_LOCK (vagg);
1531 gst_video_info_init (&vagg->info);
1532 GST_OBJECT_UNLOCK (vagg);
1534 vagg->priv->ts_offset = 0;
1535 vagg->priv->nframes = 0;
1536 vagg->priv->live = FALSE;
1538 GST_AGGREGATOR_PAD (agg->srcpad)->segment.position = -1;
1540 gst_video_aggregator_reset_qos (vagg);
1542 GST_OBJECT_LOCK (vagg);
1543 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1544 GstVideoAggregatorPad *p = l->data;
1546 gst_buffer_replace (&p->priv->buffer, NULL);
1547 gst_caps_replace (&p->priv->caps, NULL);
1548 p->priv->start_time = -1;
1549 p->priv->end_time = -1;
1551 gst_video_info_init (&p->info);
1553 GST_OBJECT_UNLOCK (vagg);
1556 static GstFlowReturn
1557 gst_video_aggregator_fill_queues (GstVideoAggregator * vagg,
1558 GstClockTime output_start_running_time,
1559 GstClockTime output_end_running_time)
1562 gboolean eos = TRUE;
1563 gboolean repeat_pad_eos = FALSE;
1564 gboolean has_no_repeat_pads = FALSE;
1565 gboolean need_more_data = FALSE;
1566 gboolean need_reconfigure = FALSE;
1568 /* get a set of buffers into pad->priv->buffer that are within output_start_running_time
1569 * and output_end_running_time taking into account finished and unresponsive pads */
1571 GST_OBJECT_LOCK (vagg);
1572 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
1573 GstVideoAggregatorPad *pad = l->data;
1575 GstAggregatorPad *bpad;
1579 bpad = GST_AGGREGATOR_PAD (pad);
1580 GST_OBJECT_LOCK (bpad);
1581 segment = bpad->segment;
1582 GST_OBJECT_UNLOCK (bpad);
1583 is_eos = gst_aggregator_pad_is_eos (bpad);
1587 if (!pad->priv->repeat_after_eos)
1588 has_no_repeat_pads = TRUE;
1589 buf = gst_aggregator_pad_peek_buffer (bpad);
1591 GstClockTime start_time, end_time;
1593 start_time = GST_BUFFER_TIMESTAMP (buf);
1594 if (start_time == -1) {
1595 gst_buffer_unref (buf);
1596 GST_ERROR_OBJECT (pad, "Need timestamped buffers!");
1597 GST_OBJECT_UNLOCK (vagg);
1598 return GST_FLOW_ERROR;
1601 /* FIXME: Make all this work with negative rates */
1602 end_time = GST_BUFFER_DURATION (buf);
1604 if (end_time == -1) {
1605 start_time = MAX (start_time, segment.start);
1607 gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1609 if (start_time >= output_end_running_time) {
1610 if (pad->priv->buffer) {
1611 GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1612 "output_end_running_time. Keeping previous buffer");
1614 GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time >= "
1615 "output_end_running_time. No previous buffer.");
1617 gst_buffer_unref (buf);
1619 } else if (start_time < output_start_running_time) {
1620 GST_DEBUG_OBJECT (pad, "buffer duration is -1, start_time < "
1621 "output_start_running_time. Discarding old buffer");
1622 gst_buffer_replace (&pad->priv->buffer, buf);
1623 if (pad->priv->pending_vinfo.finfo) {
1624 gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1625 gst_caps_replace (&pad->priv->pending_caps, NULL);
1626 pad->info = pad->priv->pending_vinfo;
1627 need_reconfigure = TRUE;
1628 pad->priv->pending_vinfo.finfo = NULL;
1630 gst_buffer_unref (buf);
1631 gst_aggregator_pad_drop_buffer (bpad);
1632 pad->priv->start_time = start_time;
1633 need_more_data = TRUE;
1636 gst_buffer_unref (buf);
1637 buf = gst_aggregator_pad_pop_buffer (bpad);
1638 gst_buffer_replace (&pad->priv->buffer, buf);
1639 if (pad->priv->pending_vinfo.finfo) {
1640 gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1641 gst_caps_replace (&pad->priv->pending_caps, NULL);
1642 pad->info = pad->priv->pending_vinfo;
1643 need_reconfigure = TRUE;
1644 pad->priv->pending_vinfo.finfo = NULL;
1646 /* FIXME: Set end_time to something here? */
1647 pad->priv->start_time = start_time;
1648 gst_buffer_unref (buf);
1649 GST_DEBUG_OBJECT (pad, "buffer duration is -1");
1653 g_assert (start_time != -1 && end_time != -1);
1654 end_time += start_time; /* convert from duration to position */
1656 /* Check if it's inside the segment */
1657 if (start_time >= segment.stop || end_time < segment.start) {
1658 GST_DEBUG_OBJECT (pad,
1659 "Buffer outside the segment : segment: [%" GST_TIME_FORMAT " -- %"
1660 GST_TIME_FORMAT "]" " Buffer [%" GST_TIME_FORMAT " -- %"
1661 GST_TIME_FORMAT "]", GST_TIME_ARGS (segment.stop),
1662 GST_TIME_ARGS (segment.start), GST_TIME_ARGS (start_time),
1663 GST_TIME_ARGS (end_time));
1665 gst_buffer_unref (buf);
1666 gst_aggregator_pad_drop_buffer (bpad);
1668 need_more_data = TRUE;
1672 /* Clip to segment and convert to running time */
1673 start_time = MAX (start_time, segment.start);
1674 if (segment.stop != -1)
1675 end_time = MIN (end_time, segment.stop);
1677 gst_segment_to_running_time (&segment, GST_FORMAT_TIME, start_time);
1679 gst_segment_to_running_time (&segment, GST_FORMAT_TIME, end_time);
1680 g_assert (start_time != -1 && end_time != -1);
1682 GST_TRACE_OBJECT (pad, "dealing with buffer %p start %" GST_TIME_FORMAT
1683 " end %" GST_TIME_FORMAT " out start %" GST_TIME_FORMAT
1684 " out end %" GST_TIME_FORMAT, buf, GST_TIME_ARGS (start_time),
1685 GST_TIME_ARGS (end_time), GST_TIME_ARGS (output_start_running_time),
1686 GST_TIME_ARGS (output_end_running_time));
1688 if (pad->priv->end_time != -1 && pad->priv->end_time > end_time) {
1689 GST_DEBUG_OBJECT (pad, "Buffer from the past, dropping");
1690 gst_buffer_unref (buf);
1691 gst_aggregator_pad_drop_buffer (bpad);
1695 if (end_time > output_start_running_time
1696 && start_time < output_end_running_time) {
1697 GST_DEBUG_OBJECT (pad,
1698 "Taking new buffer with start time %" GST_TIME_FORMAT,
1699 GST_TIME_ARGS (start_time));
1700 gst_buffer_replace (&pad->priv->buffer, buf);
1701 if (pad->priv->pending_vinfo.finfo) {
1702 gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1703 gst_caps_replace (&pad->priv->pending_caps, NULL);
1704 pad->info = pad->priv->pending_vinfo;
1705 need_reconfigure = TRUE;
1706 pad->priv->pending_vinfo.finfo = NULL;
1708 pad->priv->start_time = start_time;
1709 pad->priv->end_time = end_time;
1711 gst_buffer_unref (buf);
1712 gst_aggregator_pad_drop_buffer (bpad);
1714 } else if (start_time >= output_end_running_time) {
1715 GST_DEBUG_OBJECT (pad, "Keeping buffer until %" GST_TIME_FORMAT,
1716 GST_TIME_ARGS (start_time));
1717 gst_buffer_unref (buf);
1720 gst_buffer_replace (&pad->priv->buffer, buf);
1721 if (pad->priv->pending_vinfo.finfo) {
1722 gst_caps_replace (&pad->priv->caps, pad->priv->pending_caps);
1723 gst_caps_replace (&pad->priv->pending_caps, NULL);
1724 pad->info = pad->priv->pending_vinfo;
1725 need_reconfigure = TRUE;
1726 pad->priv->pending_vinfo.finfo = NULL;
1728 pad->priv->start_time = start_time;
1729 pad->priv->end_time = end_time;
1730 GST_DEBUG_OBJECT (pad,
1731 "replacing old buffer with a newer buffer, start %" GST_TIME_FORMAT
1732 " out end %" GST_TIME_FORMAT, GST_TIME_ARGS (start_time),
1733 GST_TIME_ARGS (output_end_running_time));
1734 gst_buffer_unref (buf);
1735 gst_aggregator_pad_drop_buffer (bpad);
1737 need_more_data = TRUE;
1741 if (is_eos && pad->priv->repeat_after_eos) {
1742 repeat_pad_eos = TRUE;
1743 GST_DEBUG_OBJECT (pad, "ignoring EOS and re-using previous buffer");
1747 if (pad->priv->end_time != -1) {
1748 if (pad->priv->end_time <= output_start_running_time) {
1750 GST_DEBUG_OBJECT (pad, "I just need more data");
1751 if (GST_CLOCK_TIME_IS_VALID (pad->priv->max_last_buffer_repeat)) {
1752 if (output_start_running_time - pad->priv->end_time >
1753 pad->priv->max_last_buffer_repeat) {
1754 pad->priv->start_time = pad->priv->end_time = -1;
1755 gst_buffer_replace (&pad->priv->buffer, NULL);
1756 gst_caps_replace (&pad->priv->caps, NULL);
1759 pad->priv->start_time = pad->priv->end_time = -1;
1761 need_more_data = TRUE;
1763 gst_buffer_replace (&pad->priv->buffer, NULL);
1764 gst_caps_replace (&pad->priv->caps, NULL);
1765 pad->priv->start_time = pad->priv->end_time = -1;
1767 } else if (is_eos) {
1770 } else if (is_eos) {
1771 gst_buffer_replace (&pad->priv->buffer, NULL);
1772 gst_caps_replace (&pad->priv->caps, NULL);
1773 } else if (pad->priv->start_time != -1) {
1774 /* When the current buffer didn't have a duration, but
1775 * max-last-buffer-repeat was set, we use start_time as
1776 * the comparison point
1778 if (pad->priv->start_time <= output_start_running_time) {
1779 if (GST_CLOCK_TIME_IS_VALID (pad->priv->max_last_buffer_repeat)) {
1780 if (output_start_running_time - pad->priv->start_time >
1781 pad->priv->max_last_buffer_repeat) {
1782 pad->priv->start_time = pad->priv->end_time = -1;
1783 gst_buffer_replace (&pad->priv->buffer, NULL);
1784 gst_caps_replace (&pad->priv->caps, NULL);
1791 GST_OBJECT_UNLOCK (vagg);
1793 if (need_reconfigure)
1794 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
1797 return GST_AGGREGATOR_FLOW_NEED_DATA;
1798 if (eos && !has_no_repeat_pads && repeat_pad_eos)
1801 return GST_FLOW_EOS;
1807 sync_pad_values (GstElement * vagg, GstPad * pad, gpointer user_data)
1809 gint64 *out_stream_time = user_data;
1811 /* sync object properties on stream time */
1812 if (GST_CLOCK_TIME_IS_VALID (*out_stream_time))
1813 gst_object_sync_values (GST_OBJECT_CAST (pad), *out_stream_time);
1819 prepare_frames (GstElement * agg, GstPad * pad, gpointer user_data)
1821 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
1822 GstVideoAggregatorPadClass *vaggpad_class =
1823 GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1825 memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
1827 if (vpad->priv->buffer == NULL || !vaggpad_class->prepare_frame)
1830 /* GAP event, nothing to do */
1831 if (vpad->priv->buffer &&
1832 gst_buffer_get_size (vpad->priv->buffer) == 0 &&
1833 GST_BUFFER_FLAG_IS_SET (vpad->priv->buffer, GST_BUFFER_FLAG_GAP)) {
1837 return vaggpad_class->prepare_frame (vpad, GST_VIDEO_AGGREGATOR_CAST (agg),
1838 vpad->priv->buffer, &vpad->priv->prepared_frame);
1842 clean_pad (GstElement * agg, GstPad * pad, gpointer user_data)
1844 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR_CAST (agg);
1845 GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD_CAST (pad);
1846 GstVideoAggregatorPadClass *vaggpad_class =
1847 GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
1849 if (vaggpad_class->clean_frame)
1850 vaggpad_class->clean_frame (vpad, vagg, &vpad->priv->prepared_frame);
1852 memset (&vpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
1857 static GstFlowReturn
1858 gst_video_aggregator_do_aggregate (GstVideoAggregator * vagg,
1859 GstClockTime output_start_time, GstClockTime output_end_time,
1860 GstClockTime output_start_running_time, GstBuffer ** outbuf)
1862 GstAggregator *agg = GST_AGGREGATOR (vagg);
1863 GstFlowReturn ret = GST_FLOW_OK;
1864 GstElementClass *klass = GST_ELEMENT_GET_CLASS (vagg);
1865 GstVideoAggregatorClass *vagg_klass = (GstVideoAggregatorClass *) klass;
1866 GstClockTime out_stream_time;
1868 g_assert (vagg_klass->aggregate_frames != NULL);
1869 g_assert (vagg_klass->create_output_buffer != NULL);
1871 if ((ret = vagg_klass->create_output_buffer (vagg, outbuf)) != GST_FLOW_OK) {
1872 GST_WARNING_OBJECT (vagg, "Could not get an output buffer, reason: %s",
1873 gst_flow_get_name (ret));
1876 if (*outbuf == NULL) {
1877 /* sub-class doesn't want to generate output right now */
1881 GST_BUFFER_TIMESTAMP (*outbuf) = output_start_time;
1882 GST_BUFFER_DURATION (*outbuf) = output_end_time - output_start_time;
1884 GST_OBJECT_LOCK (agg->srcpad);
1886 gst_segment_to_stream_time (&GST_AGGREGATOR_PAD (agg->srcpad)->segment,
1887 GST_FORMAT_TIME, output_start_time);
1888 GST_OBJECT_UNLOCK (agg->srcpad);
1890 /* Sync pad properties to the stream time */
1891 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), sync_pad_values,
1894 /* Let the application know that input buffers have been staged */
1895 gst_aggregator_selected_samples (agg, GST_BUFFER_PTS (*outbuf),
1896 GST_BUFFER_DTS (*outbuf), GST_BUFFER_DURATION (*outbuf), NULL);
1898 /* Convert all the frames the subclass has before aggregating */
1899 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), prepare_frames, NULL);
1901 ret = vagg_klass->aggregate_frames (vagg, *outbuf);
1903 gst_element_foreach_sink_pad (GST_ELEMENT_CAST (vagg), clean_pad, NULL);
1908 /* Perform qos calculations before processing the next frame. Returns TRUE if
1909 * the frame should be processed, FALSE if the frame can be dropped entirely */
1911 gst_video_aggregator_do_qos (GstVideoAggregator * vagg, GstClockTime timestamp)
1913 GstAggregator *agg = GST_AGGREGATOR (vagg);
1914 GstClockTime qostime, earliest_time;
1918 /* no timestamp, can't do QoS => process frame */
1919 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
1920 GST_LOG_OBJECT (vagg, "invalid timestamp, can't do QoS, process frame");
1924 /* get latest QoS observation values */
1925 gst_video_aggregator_read_qos (vagg, &proportion, &earliest_time);
1927 /* skip qos if we have no observation (yet) => process frame */
1928 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
1929 GST_LOG_OBJECT (vagg, "no observation yet, process frame");
1933 /* qos is done on running time */
1935 gst_segment_to_running_time (&GST_AGGREGATOR_PAD (agg->srcpad)->segment,
1936 GST_FORMAT_TIME, timestamp);
1938 /* see how our next timestamp relates to the latest qos timestamp */
1939 GST_LOG_OBJECT (vagg, "qostime %" GST_TIME_FORMAT ", earliest %"
1940 GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
1942 jitter = GST_CLOCK_DIFF (qostime, earliest_time);
1943 if (qostime != GST_CLOCK_TIME_NONE && jitter > 0) {
1944 GST_DEBUG_OBJECT (vagg, "we are late, drop frame");
1948 GST_LOG_OBJECT (vagg, "process frame");
1953 gst_video_aggregator_advance_on_timeout (GstVideoAggregator * vagg)
1955 GstAggregator *agg = GST_AGGREGATOR (vagg);
1956 guint64 frame_duration;
1958 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1960 GST_OBJECT_LOCK (agg);
1961 if (agg_segment->position == -1) {
1962 if (agg_segment->rate > 0.0)
1963 agg_segment->position = agg_segment->start;
1965 agg_segment->position = agg_segment->stop;
1968 /* Advance position */
1969 fps_d = GST_VIDEO_INFO_FPS_D (&vagg->info) ?
1970 GST_VIDEO_INFO_FPS_D (&vagg->info) : 1;
1971 fps_n = GST_VIDEO_INFO_FPS_N (&vagg->info) ?
1972 GST_VIDEO_INFO_FPS_N (&vagg->info) : 25;
1973 /* Default to 25/1 if no "best fps" is known */
1974 frame_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
1975 if (agg_segment->rate > 0.0)
1976 agg_segment->position += frame_duration;
1977 else if (agg_segment->position > frame_duration)
1978 agg_segment->position -= frame_duration;
1980 agg_segment->position = 0;
1981 vagg->priv->nframes++;
1982 GST_OBJECT_UNLOCK (agg);
1985 static GstFlowReturn
1986 gst_video_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
1988 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
1989 GstClockTime output_start_time, output_end_time;
1990 GstClockTime output_start_running_time, output_end_running_time;
1991 GstBuffer *outbuf = NULL;
1992 GstFlowReturn flow_ret;
1994 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
1996 GST_VIDEO_AGGREGATOR_LOCK (vagg);
1998 if (GST_VIDEO_INFO_FORMAT (&vagg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
2000 gst_video_aggregator_advance_on_timeout (vagg);
2001 flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
2002 goto unlock_and_return;
2005 output_start_time = agg_segment->position;
2006 if (agg_segment->position == -1 || agg_segment->position < agg_segment->start)
2007 output_start_time = agg_segment->start;
2009 if (vagg->priv->nframes == 0) {
2010 vagg->priv->ts_offset = output_start_time;
2011 GST_DEBUG_OBJECT (vagg, "New ts offset %" GST_TIME_FORMAT,
2012 GST_TIME_ARGS (output_start_time));
2015 if (GST_VIDEO_INFO_FPS_N (&vagg->info) == 0) {
2016 output_end_time = -1;
2019 vagg->priv->ts_offset +
2020 gst_util_uint64_scale_round (vagg->priv->nframes + 1,
2021 GST_SECOND * GST_VIDEO_INFO_FPS_D (&vagg->info),
2022 GST_VIDEO_INFO_FPS_N (&vagg->info));
2025 if (agg_segment->stop != -1)
2026 output_end_time = MIN (output_end_time, agg_segment->stop);
2028 output_start_running_time =
2029 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
2031 output_end_running_time =
2032 gst_segment_to_running_time (agg_segment, GST_FORMAT_TIME,
2035 if (output_end_time == output_start_time) {
2036 flow_ret = GST_FLOW_EOS;
2039 gst_video_aggregator_fill_queues (vagg, output_start_running_time,
2040 output_end_running_time);
2043 if (flow_ret == GST_AGGREGATOR_FLOW_NEED_DATA && !timeout) {
2044 GST_DEBUG_OBJECT (vagg, "Need more data for decisions");
2045 goto unlock_and_return;
2046 } else if (flow_ret == GST_FLOW_EOS) {
2047 GST_DEBUG_OBJECT (vagg, "All sinkpads are EOS -- forwarding");
2048 goto unlock_and_return;
2049 } else if (flow_ret == GST_FLOW_ERROR) {
2050 GST_WARNING_OBJECT (vagg, "Error collecting buffers");
2051 goto unlock_and_return;
2054 /* It is possible that gst_video_aggregator_fill_queues() marked the pad
2055 * for reconfiguration. In this case we have to reconfigure before continuing
2056 * because we have picked a new buffer with different caps than before from
2057 * one one of the sink pads and continuing here may lead to a crash.
2058 * https://bugzilla.gnome.org/show_bug.cgi?id=780682
2060 if (gst_pad_needs_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg))) {
2061 GST_DEBUG_OBJECT (vagg, "Need reconfigure");
2062 flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
2063 goto unlock_and_return;
2066 GST_DEBUG_OBJECT (vagg,
2067 "Producing buffer for %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
2068 ", running time start %" GST_TIME_FORMAT ", running time end %"
2069 GST_TIME_FORMAT, GST_TIME_ARGS (output_start_time),
2070 GST_TIME_ARGS (output_end_time),
2071 GST_TIME_ARGS (output_start_running_time),
2072 GST_TIME_ARGS (output_end_running_time));
2074 jitter = gst_video_aggregator_do_qos (vagg, output_start_time);
2076 flow_ret = gst_video_aggregator_do_aggregate (vagg, output_start_time,
2077 output_end_time, output_start_running_time, &outbuf);
2078 if (flow_ret != GST_FLOW_OK)
2080 vagg->priv->qos_processed++;
2084 vagg->priv->qos_dropped++;
2087 gst_message_new_qos (GST_OBJECT_CAST (vagg), vagg->priv->live,
2088 output_start_running_time, gst_segment_to_stream_time (agg_segment,
2089 GST_FORMAT_TIME, output_start_time), output_start_time,
2090 output_end_time - output_start_time);
2091 gst_message_set_qos_values (msg, jitter, vagg->priv->proportion, 1000000);
2092 gst_message_set_qos_stats (msg, GST_FORMAT_BUFFERS,
2093 vagg->priv->qos_processed, vagg->priv->qos_dropped);
2094 gst_element_post_message (GST_ELEMENT_CAST (vagg), msg);
2096 flow_ret = GST_FLOW_OK;
2099 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2101 GST_DEBUG_OBJECT (vagg,
2102 "Pushing buffer with ts %" GST_TIME_FORMAT " and duration %"
2103 GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
2104 GST_TIME_ARGS (GST_BUFFER_DURATION (outbuf)));
2106 flow_ret = gst_aggregator_finish_buffer (agg, outbuf);
2109 GST_VIDEO_AGGREGATOR_LOCK (vagg);
2110 vagg->priv->nframes++;
2111 agg_segment->position = output_end_time;
2112 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2118 gst_buffer_unref (outbuf);
2120 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2124 /* FIXME, the duration query should reflect how long you will produce
2125 * data, that is the amount of stream time until you will emit EOS.
2127 * For synchronized aggregating this is always the max of all the durations
2128 * of upstream since we emit EOS when all of them finished.
2130 * We don't do synchronized aggregating so this really depends on where the
2131 * streams where punched in and what their relative offsets are against
2132 * each other which we can get from the first timestamps we see.
2134 * When we add a new stream (or remove a stream) the duration might
2135 * also become invalid again and we need to post a new DURATION
2136 * message to notify this fact to the parent.
2137 * For now we take the max of all the upstream elements so the simple
2138 * cases work at least somewhat.
2141 gst_video_aggregator_query_duration (GstVideoAggregator * vagg,
2144 GValue item = { 0 };
2152 gst_query_parse_duration (query, &format, NULL);
2158 /* Take maximum of all durations */
2159 it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (vagg));
2161 switch (gst_iterator_next (it, &item)) {
2162 case GST_ITERATOR_DONE:
2165 case GST_ITERATOR_OK:
2170 pad = g_value_get_object (&item);
2172 /* ask sink peer for duration */
2173 res &= gst_pad_peer_query_duration (pad, format, &duration);
2174 /* take max from all valid return values */
2176 /* valid unknown length, stop searching */
2177 if (duration == -1) {
2181 /* else see if bigger than current max */
2182 else if (duration > max)
2185 g_value_reset (&item);
2188 case GST_ITERATOR_RESYNC:
2191 gst_iterator_resync (it);
2199 g_value_unset (&item);
2200 gst_iterator_free (it);
2203 /* and store the max */
2204 GST_DEBUG_OBJECT (vagg, "Total duration in format %s: %"
2205 GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
2206 gst_query_set_duration (query, format, max);
2213 gst_video_aggregator_src_query (GstAggregator * agg, GstQuery * query)
2215 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2216 gboolean res = FALSE;
2217 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2219 switch (GST_QUERY_TYPE (query)) {
2220 case GST_QUERY_POSITION:
2224 gst_query_parse_position (query, &format, NULL);
2227 case GST_FORMAT_TIME:
2228 gst_query_set_position (query, format,
2229 gst_segment_to_stream_time (agg_segment, GST_FORMAT_TIME,
2230 agg_segment->position));
2238 case GST_QUERY_DURATION:
2239 res = gst_video_aggregator_query_duration (vagg, query);
2241 case GST_QUERY_LATENCY:
2243 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2247 gst_query_parse_latency (query, &vagg->priv->live, NULL, NULL);
2252 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_query
2260 gst_video_aggregator_src_event (GstAggregator * agg, GstEvent * event)
2262 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2264 switch (GST_EVENT_TYPE (event)) {
2268 GstClockTimeDiff diff;
2269 GstClockTime timestamp;
2272 gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
2273 gst_video_aggregator_update_qos (vagg, proportion, diff, timestamp);
2276 case GST_EVENT_SEEK:
2278 GST_DEBUG_OBJECT (vagg, "Handling SEEK event");
2285 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->src_event (agg,
2289 static GstFlowReturn
2290 gst_video_aggregator_flush (GstAggregator * agg)
2294 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2295 GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
2297 GST_INFO_OBJECT (agg, "Flushing");
2298 GST_OBJECT_LOCK (vagg);
2299 abs_rate = ABS (agg_segment->rate);
2300 for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
2301 GstVideoAggregatorPad *p = l->data;
2303 /* Convert to the output segment rate */
2304 if (ABS (agg_segment->rate) != abs_rate) {
2305 if (ABS (agg_segment->rate) != 1.0 && p->priv->buffer) {
2306 p->priv->start_time /= ABS (agg_segment->rate);
2307 p->priv->end_time /= ABS (agg_segment->rate);
2309 if (abs_rate != 1.0 && p->priv->buffer) {
2310 p->priv->start_time *= abs_rate;
2311 p->priv->end_time *= abs_rate;
2315 GST_OBJECT_UNLOCK (vagg);
2317 agg_segment->position = -1;
2318 vagg->priv->ts_offset = 0;
2319 vagg->priv->nframes = 0;
2321 gst_video_aggregator_reset_qos (vagg);
2326 gst_video_aggregator_sink_event (GstAggregator * agg, GstAggregatorPad * bpad,
2329 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2330 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2331 gboolean ret = TRUE;
2333 GST_DEBUG_OBJECT (pad, "Got %s event on pad %s:%s",
2334 GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad));
2336 switch (GST_EVENT_TYPE (event)) {
2337 case GST_EVENT_CAPS:
2341 gst_event_parse_caps (event, &caps);
2343 gst_video_aggregator_pad_sink_setcaps (GST_PAD (pad),
2344 GST_OBJECT (vagg), caps);
2345 gst_event_unref (event);
2349 case GST_EVENT_SEGMENT:{
2351 gst_event_copy_segment (event, &seg);
2353 g_assert (seg.format == GST_FORMAT_TIME);
2354 gst_video_aggregator_reset_qos (vagg);
2362 return GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_event
2369 gst_video_aggregator_start (GstAggregator * agg)
2371 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2373 gst_caps_replace (&vagg->priv->current_caps, NULL);
2379 gst_video_aggregator_stop (GstAggregator * agg)
2381 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2383 gst_video_aggregator_reset (vagg);
2388 /* GstElement vmethods */
2390 gst_video_aggregator_request_new_pad (GstElement * element,
2391 GstPadTemplate * templ, const gchar * req_name, const GstCaps * caps)
2393 GstVideoAggregator *vagg;
2394 GstVideoAggregatorPad *vaggpad;
2396 vagg = GST_VIDEO_AGGREGATOR (element);
2398 vaggpad = (GstVideoAggregatorPad *)
2399 GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->request_new_pad
2400 (element, templ, req_name, caps);
2402 if (vaggpad == NULL)
2405 GST_OBJECT_LOCK (vagg);
2406 vaggpad->priv->zorder = GST_ELEMENT (vagg)->numsinkpads;
2407 vaggpad->priv->start_time = -1;
2408 vaggpad->priv->end_time = -1;
2409 element->sinkpads = g_list_sort (element->sinkpads,
2410 (GCompareFunc) pad_zorder_compare);
2411 GST_OBJECT_UNLOCK (vagg);
2413 return GST_PAD (vaggpad);
2417 gst_video_aggregator_release_pad (GstElement * element, GstPad * pad)
2419 GstVideoAggregator *vagg = NULL;
2420 GstVideoAggregatorPad *vaggpad;
2423 vagg = GST_VIDEO_AGGREGATOR (element);
2424 vaggpad = GST_VIDEO_AGGREGATOR_PAD (pad);
2426 GST_VIDEO_AGGREGATOR_LOCK (vagg);
2428 GST_OBJECT_LOCK (vagg);
2429 last_pad = (GST_ELEMENT (vagg)->numsinkpads - 1 == 0);
2430 GST_OBJECT_UNLOCK (vagg);
2433 gst_video_aggregator_reset (vagg);
2435 gst_buffer_replace (&vaggpad->priv->buffer, NULL);
2436 gst_caps_replace (&vaggpad->priv->caps, NULL);
2437 gst_caps_replace (&vaggpad->priv->pending_caps, NULL);
2439 GST_ELEMENT_CLASS (gst_video_aggregator_parent_class)->release_pad
2440 (GST_ELEMENT (vagg), pad);
2442 gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
2444 GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
2449 gst_video_aggregator_propose_allocation (GstAggregator * agg,
2450 GstAggregatorPad * pad, GstQuery * decide_query, GstQuery * query)
2452 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
2458 gst_video_aggregator_decide_allocation (GstAggregator * agg, GstQuery * query)
2460 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2461 GstAllocationParams params = { 0, 15, 0, 0 };
2463 GstBufferPool *pool;
2464 GstAllocator *allocator;
2465 guint size, min, max;
2466 gboolean update = FALSE;
2467 GstStructure *config = NULL;
2468 GstCaps *caps = NULL;
2470 if (gst_query_get_n_allocation_params (query) == 0) {
2471 gst_query_add_allocation_param (query, NULL, ¶ms);
2473 for (i = 0; i < gst_query_get_n_allocation_params (query); i++) {
2474 GstAllocator *allocator;
2476 gst_query_parse_nth_allocation_param (query, i, &allocator, ¶ms);
2477 params.align = MAX (params.align, 15);
2478 gst_query_set_nth_allocation_param (query, i, allocator, ¶ms);
2482 gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
2484 if (gst_query_get_n_allocation_pools (query) > 0) {
2485 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
2488 size = MAX (size, vagg->info.size);
2492 size = vagg->info.size;
2497 gst_query_parse_allocation (query, &caps, NULL);
2499 /* no downstream pool, make our own */
2501 pool = gst_video_buffer_pool_new ();
2503 config = gst_buffer_pool_get_config (pool);
2505 gst_buffer_pool_config_set_params (config, caps, size, min, max);
2506 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
2507 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2508 gst_buffer_pool_config_add_option (config,
2509 GST_BUFFER_POOL_OPTION_VIDEO_META);
2512 /* buffer pool may have to do some changes */
2513 if (!gst_buffer_pool_set_config (pool, config)) {
2514 config = gst_buffer_pool_get_config (pool);
2516 /* If change are not acceptable, fallback to generic pool */
2517 if (!gst_buffer_pool_config_validate_params (config, caps, size, min, max)) {
2518 GST_DEBUG_OBJECT (agg, "unsupported pool, making new pool");
2520 gst_object_unref (pool);
2521 pool = gst_video_buffer_pool_new ();
2522 gst_buffer_pool_config_set_params (config, caps, size, min, max);
2523 gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
2525 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
2526 gst_buffer_pool_config_add_option (config,
2527 GST_BUFFER_POOL_OPTION_VIDEO_META);
2531 if (!gst_buffer_pool_set_config (pool, config))
2536 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
2538 gst_query_add_allocation_pool (query, pool, size, min, max);
2541 gst_object_unref (pool);
2543 gst_object_unref (allocator);
2549 gst_object_unref (pool);
2551 gst_object_unref (allocator);
2553 GST_ELEMENT_ERROR (agg, RESOURCE, SETTINGS,
2554 ("Failed to configure the buffer pool"),
2555 ("Configuration is most likely invalid, please report this issue."));
2559 static GstFlowReturn
2560 gst_video_aggregator_create_output_buffer (GstVideoAggregator * videoaggregator,
2561 GstBuffer ** outbuf)
2563 GstAggregator *aggregator = GST_AGGREGATOR (videoaggregator);
2564 GstBufferPool *pool;
2565 GstFlowReturn ret = GST_FLOW_OK;
2567 pool = gst_aggregator_get_buffer_pool (aggregator);
2570 if (!gst_buffer_pool_is_active (pool)) {
2571 if (!gst_buffer_pool_set_active (pool, TRUE)) {
2572 GST_ELEMENT_ERROR (videoaggregator, RESOURCE, SETTINGS,
2573 ("failed to activate bufferpool"),
2574 ("failed to activate bufferpool"));
2575 return GST_FLOW_ERROR;
2579 ret = gst_buffer_pool_acquire_buffer (pool, outbuf, NULL);
2580 gst_object_unref (pool);
2583 GstAllocator *allocator;
2584 GstAllocationParams params;
2586 gst_aggregator_get_allocator (aggregator, &allocator, ¶ms);
2588 outsize = GST_VIDEO_INFO_SIZE (&videoaggregator->info);
2589 *outbuf = gst_buffer_new_allocate (allocator, outsize, ¶ms);
2592 gst_object_unref (allocator);
2594 if (*outbuf == NULL) {
2595 GST_ELEMENT_ERROR (videoaggregator, RESOURCE, NO_SPACE_LEFT,
2596 (NULL), ("Could not acquire buffer of size: %d", outsize));
2597 ret = GST_FLOW_ERROR;
2604 gst_video_aggregator_pad_sink_acceptcaps (GstPad * pad,
2605 GstVideoAggregator * vagg, GstCaps * caps)
2608 GstCaps *accepted_caps;
2611 GstAggregator *agg = GST_AGGREGATOR (vagg);
2613 GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, caps);
2615 accepted_caps = gst_pad_get_current_caps (GST_PAD (agg->srcpad));
2617 if (accepted_caps == NULL)
2618 accepted_caps = gst_pad_get_pad_template_caps (GST_PAD (agg->srcpad));
2620 accepted_caps = gst_caps_make_writable (accepted_caps);
2622 GST_LOG_OBJECT (pad, "src caps %" GST_PTR_FORMAT, accepted_caps);
2624 n = gst_caps_get_size (accepted_caps);
2625 for (i = 0; i < n; i++) {
2626 s = gst_caps_get_structure (accepted_caps, i);
2627 gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT,
2630 if (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad)) {
2631 gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
2632 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT, NULL);
2633 gst_structure_remove_fields (s, "colorimetry", "chroma-site", "format",
2634 "pixel-aspect-ratio", NULL);
2638 ret = gst_caps_can_intersect (caps, accepted_caps);
2639 GST_DEBUG_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT,
2640 (ret ? "" : "not "), caps);
2641 gst_caps_unref (accepted_caps);
2646 gst_video_aggregator_sink_query (GstAggregator * agg, GstAggregatorPad * bpad,
2649 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
2650 GstVideoAggregatorPad *pad = GST_VIDEO_AGGREGATOR_PAD (bpad);
2651 gboolean ret = FALSE;
2653 switch (GST_QUERY_TYPE (query)) {
2654 case GST_QUERY_CAPS:
2656 GstCaps *filter, *caps;
2658 gst_query_parse_caps (query, &filter);
2660 gst_video_aggregator_pad_sink_getcaps (GST_PAD (pad), vagg, filter);
2661 gst_query_set_caps_result (query, caps);
2662 gst_caps_unref (caps);
2666 case GST_QUERY_ACCEPT_CAPS:
2670 gst_query_parse_accept_caps (query, &caps);
2672 gst_video_aggregator_pad_sink_acceptcaps (GST_PAD (pad), vagg, caps);
2673 gst_query_set_accept_caps_result (query, ret);
2679 GST_AGGREGATOR_CLASS (gst_video_aggregator_parent_class)->sink_query
2686 /* GObject vmethods */
2688 gst_video_aggregator_finalize (GObject * o)
2690 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2692 g_mutex_clear (&vagg->priv->lock);
2693 g_ptr_array_unref (vagg->priv->supported_formats);
2695 G_OBJECT_CLASS (gst_video_aggregator_parent_class)->finalize (o);
2699 gst_video_aggregator_dispose (GObject * o)
2701 GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (o);
2703 gst_caps_replace (&vagg->priv->current_caps, NULL);
2705 G_OBJECT_CLASS (gst_video_aggregator_parent_class)->dispose (o);
2709 gst_video_aggregator_get_property (GObject * object,
2710 guint prop_id, GValue * value, GParamSpec * pspec)
2714 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2720 gst_video_aggregator_set_property (GObject * object,
2721 guint prop_id, const GValue * value, GParamSpec * pspec)
2725 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
2730 /* GObject boilerplate */
2732 gst_video_aggregator_class_init (GstVideoAggregatorClass * klass)
2734 GObjectClass *gobject_class = (GObjectClass *) klass;
2735 GstElementClass *gstelement_class = (GstElementClass *) klass;
2736 GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
2738 GST_DEBUG_CATEGORY_INIT (gst_video_aggregator_debug, "videoaggregator", 0,
2739 "base video aggregator");
2741 gst_video_aggregator_parent_class = g_type_class_peek_parent (klass);
2743 if (video_aggregator_private_offset != 0)
2744 g_type_class_adjust_private_offset (klass,
2745 &video_aggregator_private_offset);
2747 gobject_class->finalize = gst_video_aggregator_finalize;
2748 gobject_class->dispose = gst_video_aggregator_dispose;
2750 gobject_class->get_property = gst_video_aggregator_get_property;
2751 gobject_class->set_property = gst_video_aggregator_set_property;
2753 gstelement_class->request_new_pad =
2754 GST_DEBUG_FUNCPTR (gst_video_aggregator_request_new_pad);
2755 gstelement_class->release_pad =
2756 GST_DEBUG_FUNCPTR (gst_video_aggregator_release_pad);
2758 agg_class->start = gst_video_aggregator_start;
2759 agg_class->stop = gst_video_aggregator_stop;
2760 agg_class->sink_query = gst_video_aggregator_sink_query;
2761 agg_class->sink_event = gst_video_aggregator_sink_event;
2762 agg_class->flush = gst_video_aggregator_flush;
2763 agg_class->aggregate = gst_video_aggregator_aggregate;
2764 agg_class->src_event = gst_video_aggregator_src_event;
2765 agg_class->src_query = gst_video_aggregator_src_query;
2766 agg_class->get_next_time = gst_aggregator_simple_get_next_time;
2767 agg_class->update_src_caps = gst_video_aggregator_default_update_src_caps;
2768 agg_class->fixate_src_caps = gst_video_aggregator_default_fixate_src_caps;
2769 agg_class->negotiated_src_caps =
2770 gst_video_aggregator_default_negotiated_src_caps;
2771 agg_class->decide_allocation = gst_video_aggregator_decide_allocation;
2772 agg_class->propose_allocation = gst_video_aggregator_propose_allocation;
2773 agg_class->peek_next_sample = gst_video_aggregator_peek_next_sample;
2775 klass->find_best_format = gst_video_aggregator_find_best_format;
2776 klass->create_output_buffer = gst_video_aggregator_create_output_buffer;
2777 klass->update_caps = gst_video_aggregator_default_update_caps;
2779 /* Register the pad class */
2780 g_type_class_ref (GST_TYPE_VIDEO_AGGREGATOR_PAD);
2784 gst_video_aggregator_init (GstVideoAggregator * vagg,
2785 GstVideoAggregatorClass * klass)
2787 GstCaps *src_template;
2788 GstPadTemplate *pad_template;
2791 vagg->priv = gst_video_aggregator_get_instance_private (vagg);
2792 vagg->priv->current_caps = NULL;
2794 g_mutex_init (&vagg->priv->lock);
2796 /* initialize variables */
2797 gst_video_aggregator_reset (vagg);
2799 /* Finding all supported formats */
2800 vagg->priv->supported_formats = g_ptr_array_new ();
2802 gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
2803 src_template = gst_pad_template_get_caps (pad_template);
2804 for (i = 0; i < gst_caps_get_size (src_template); i++) {
2806 gst_structure_get_value (gst_caps_get_structure (src_template, i),
2809 if (G_VALUE_HOLDS_STRING (v)) {
2810 GstVideoFormat f = gst_video_format_from_string (g_value_get_string (v));
2811 GstVideoFormatInfo *format_info =
2812 (GstVideoFormatInfo *) gst_video_format_get_info (f);
2813 g_ptr_array_add (vagg->priv->supported_formats, format_info);
2817 if (GST_VALUE_HOLDS_LIST (v)) {
2820 for (j = 0; j < gst_value_list_get_size (v); j++) {
2821 const GValue *v1 = gst_value_list_get_value (v, j);
2823 gst_video_format_from_string (g_value_get_string (v1));
2824 GstVideoFormatInfo *format_info =
2825 (GstVideoFormatInfo *) gst_video_format_get_info (f);
2826 g_ptr_array_add (vagg->priv->supported_formats, format_info);
2831 gst_caps_unref (src_template);