1 /* Generic video mixer plugin
2 * Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
21 * SECTION:element-videomixer
23 * Videomixer can accept AYUV, ARGB and BGRA video streams. For each of the requested
24 * sink pads it will compare the incoming geometry and framerate to define the
25 * output parameters. Indeed output video frames will have the geometry of the
26 * biggest incoming video stream and the framerate of the fastest incoming one.
28 * All sink pads must be either AYUV, ARGB or BGRA, but a mixture of them is not
29 * supported. The src pad will have the same colorspace as the sinks.
30 * No colorspace conversion is done.
32 * Individual parameters for each input stream can be configured on the
36 * <title>Sample pipelines</title>
39 * videotestsrc pattern=1 ! \
40 * video/x-raw-yuv,format=\(fourcc\)AYUV,framerate=\(fraction\)10/1,width=100,height=100 ! \
41 * videobox border-alpha=0 top=-70 bottom=-70 right=-220 ! \
42 * videomixer name=mix sink_0::alpha=0.7 sink_1::alpha=0.5 ! \
43 * ffmpegcolorspace ! xvimagesink \
45 * video/x-raw-yuv,format=\(fourcc\)AYUV,framerate=\(fraction\)5/1,width=320,height=240 ! mix.
46 * ]| A pipeline to demonstrate videomixer used together with videobox.
47 * This should show a 320x240 pixels video test source with some transparency
48 * showing the background checker pattern. Another video test source with just
49 * the snow pattern of 100x100 pixels is overlayed on top of the first one on
50 * the left vertically centered with a small transparency showing the first
51 * video test source behind and the checker pattern under it. Note that the
52 * framerate of the output video is 10 frames per second.
54 * gst-launch videotestsrc pattern=1 ! \
55 * video/x-raw-rgb, framerate=\(fraction\)10/1, width=100, height=100 ! \
56 * videomixer name=mix ! ffmpegcolorspace ! ximagesink \
58 * video/x-raw-rgb, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
59 * ]| A pipeline to demostrate bgra mixing. (This does not demonstrate alpha blending).
61 * gst-launch videotestsrc pattern=1 ! \
62 * video/x-raw-yuv,format =\(fourcc\)I420, framerate=\(fraction\)10/1, width=100, height=100 ! \
63 * videomixer name=mix ! ffmpegcolorspace ! ximagesink \
65 * video/x-raw-yuv,format=\(fourcc\)I420, framerate=\(fraction\)5/1, width=320, height=240 ! mix.
66 * ]| A pipeline to test I420
68 * gst-launch videotestsrc pattern="snow" ! video/x-raw-yuv, framerate=\(fraction\)10/1, width=200, height=150 ! videomixer name=mix sink_1::xpos=20 sink_1::ypos=20 sink_1::alpha=0.5 ! ffmpegcolorspace ! xvimagesink videotestsrc ! video/x-raw-yuv, framerate=\(fraction\)10/1, width=640, height=360 ! mix.
69 * ]| Set position and alpha on the mixer using #GstVideoMixerPad properties.
78 #include <gst/base/gstcollectpads.h>
79 #include <gst/controller/gstcontroller.h>
80 #include <gst/video/video.h>
89 #include "videomixer.h"
90 #include "videomixer2.h"
93 #define orc_memset memset
95 #include <orc/orcfunctions.h>
98 GST_DEBUG_CATEGORY_STATIC (gst_videomixer_debug);
99 #define GST_CAT_DEFAULT gst_videomixer_debug
101 #define GST_VIDEO_MIXER_GET_STATE_LOCK(mix) \
102 (GST_VIDEO_MIXER(mix)->state_lock)
103 #define GST_VIDEO_MIXER_STATE_LOCK(mix) \
104 (g_mutex_lock(GST_VIDEO_MIXER_GET_STATE_LOCK (mix)))
105 #define GST_VIDEO_MIXER_STATE_UNLOCK(mix) \
106 (g_mutex_unlock(GST_VIDEO_MIXER_GET_STATE_LOCK (mix)))
108 static GType gst_videomixer_get_type (void);
110 static void gst_videomixer_pad_get_property (GObject * object, guint prop_id,
111 GValue * value, GParamSpec * pspec);
112 static void gst_videomixer_pad_set_property (GObject * object, guint prop_id,
113 const GValue * value, GParamSpec * pspec);
115 static gboolean gst_videomixer_src_event (GstPad * pad, GstEvent * event);
116 static gboolean gst_videomixer_sink_event (GstPad * pad, GstEvent * event);
118 static void gst_videomixer_sort_pads (GstVideoMixer * mix);
120 #define DEFAULT_PAD_ZORDER 0
121 #define DEFAULT_PAD_XPOS 0
122 #define DEFAULT_PAD_YPOS 0
123 #define DEFAULT_PAD_ALPHA 1.0
133 GType gst_videomixer_pad_get_type (void);
134 G_DEFINE_TYPE (GstVideoMixerPad, gst_videomixer_pad, GST_TYPE_PAD);
137 gst_videomixer_pad_class_init (GstVideoMixerPadClass * klass)
139 GObjectClass *gobject_class = (GObjectClass *) klass;
141 gobject_class->set_property = gst_videomixer_pad_set_property;
142 gobject_class->get_property = gst_videomixer_pad_get_property;
144 g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
145 g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
146 0, 10000, DEFAULT_PAD_ZORDER,
147 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
148 g_object_class_install_property (gobject_class, PROP_PAD_XPOS,
149 g_param_spec_int ("xpos", "X Position", "X Position of the picture",
150 G_MININT, G_MAXINT, DEFAULT_PAD_XPOS,
151 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
152 g_object_class_install_property (gobject_class, PROP_PAD_YPOS,
153 g_param_spec_int ("ypos", "Y Position", "Y Position of the picture",
154 G_MININT, G_MAXINT, DEFAULT_PAD_YPOS,
155 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
156 g_object_class_install_property (gobject_class, PROP_PAD_ALPHA,
157 g_param_spec_double ("alpha", "Alpha", "Alpha of the picture", 0.0, 1.0,
159 G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
163 gst_videomixer_pad_get_property (GObject * object, guint prop_id,
164 GValue * value, GParamSpec * pspec)
166 GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (object);
169 case PROP_PAD_ZORDER:
170 g_value_set_uint (value, pad->zorder);
173 g_value_set_int (value, pad->xpos);
176 g_value_set_int (value, pad->ypos);
179 g_value_set_double (value, pad->alpha);
182 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
188 gst_videomixer_pad_set_property (GObject * object, guint prop_id,
189 const GValue * value, GParamSpec * pspec)
191 GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (object);
192 GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (GST_PAD (pad)));
195 case PROP_PAD_ZORDER:
196 GST_VIDEO_MIXER_STATE_LOCK (mix);
197 pad->zorder = g_value_get_uint (value);
198 gst_videomixer_sort_pads (mix);
199 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
202 pad->xpos = g_value_get_int (value);
205 pad->ypos = g_value_get_int (value);
208 pad->alpha = g_value_get_double (value);
211 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
215 gst_object_unref (mix);
219 gst_videomixer_update_qos (GstVideoMixer * mix, gdouble proportion,
220 GstClockTimeDiff diff, GstClockTime timestamp)
222 GST_DEBUG_OBJECT (mix,
223 "Updating QoS: proportion %lf, diff %s%" GST_TIME_FORMAT ", timestamp %"
224 GST_TIME_FORMAT, proportion, (diff < 0) ? "-" : "",
225 GST_TIME_ARGS (ABS (diff)), GST_TIME_ARGS (timestamp));
227 GST_OBJECT_LOCK (mix);
228 mix->proportion = proportion;
229 if (G_LIKELY (timestamp != GST_CLOCK_TIME_NONE)) {
230 if (G_UNLIKELY (diff > 0))
232 timestamp + 2 * diff + gst_util_uint64_scale_int (GST_SECOND,
233 mix->fps_d, mix->fps_n);
235 mix->earliest_time = timestamp + diff;
237 mix->earliest_time = GST_CLOCK_TIME_NONE;
239 GST_OBJECT_UNLOCK (mix);
243 gst_videomixer_reset_qos (GstVideoMixer * mix)
245 gst_videomixer_update_qos (mix, 0.5, 0, GST_CLOCK_TIME_NONE);
249 gst_videomixer_read_qos (GstVideoMixer * mix, gdouble * proportion,
252 GST_OBJECT_LOCK (mix);
253 *proportion = mix->proportion;
254 *time = mix->earliest_time;
255 GST_OBJECT_UNLOCK (mix);
258 /* Perform qos calculations before processing the next frame. Returns TRUE if
259 * the frame should be processed, FALSE if the frame can be dropped entirely */
261 gst_videomixer_do_qos (GstVideoMixer * mix, GstClockTime timestamp)
263 GstClockTime qostime, earliest_time;
266 /* no timestamp, can't do QoS => process frame */
267 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (timestamp))) {
268 GST_LOG_OBJECT (mix, "invalid timestamp, can't do QoS, process frame");
272 /* get latest QoS observation values */
273 gst_videomixer_read_qos (mix, &proportion, &earliest_time);
275 /* skip qos if we have no observation (yet) => process frame */
276 if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (earliest_time))) {
277 GST_LOG_OBJECT (mix, "no observation yet, process frame");
281 /* qos is done on running time */
283 gst_segment_to_running_time (&mix->segment, GST_FORMAT_TIME, timestamp);
285 /* see how our next timestamp relates to the latest qos timestamp */
286 GST_LOG_OBJECT (mix, "qostime %" GST_TIME_FORMAT ", earliest %"
287 GST_TIME_FORMAT, GST_TIME_ARGS (qostime), GST_TIME_ARGS (earliest_time));
289 if (qostime != GST_CLOCK_TIME_NONE && qostime <= earliest_time) {
290 GST_DEBUG_OBJECT (mix, "we are late, drop frame");
294 GST_LOG_OBJECT (mix, "process frame");
299 gst_videomixer_set_master_geometry (GstVideoMixer * mix)
302 gint width = 0, height = 0, fps_n = 0, fps_d = 0, par_n = 0, par_d = 0;
303 GstVideoMixerPad *master = NULL;
305 walk = mix->sinkpads;
307 GstVideoMixerPad *mixpad = GST_VIDEO_MIXER_PAD (walk->data);
309 walk = g_slist_next (walk);
311 /* Biggest input geometry will be our output geometry */
312 width = MAX (width, mixpad->in_width);
313 height = MAX (height, mixpad->in_height);
315 /* If mix framerate < mixpad framerate, using fractions */
316 GST_DEBUG_OBJECT (mixpad, "comparing framerate %d/%d to mixpad's %d/%d",
317 fps_n, fps_d, mixpad->fps_n, mixpad->fps_d);
318 if ((!fps_n && !fps_d) ||
319 ((gint64) fps_n * mixpad->fps_d < (gint64) mixpad->fps_n * fps_d)) {
320 fps_n = mixpad->fps_n;
321 fps_d = mixpad->fps_d;
322 par_n = mixpad->par_n;
323 par_d = mixpad->par_d;
324 GST_DEBUG_OBJECT (mixpad, "becomes the master pad");
330 if (mix->master != master || mix->in_width != width
331 || mix->in_height != height || mix->fps_n != fps_n
332 || mix->fps_d != fps_d || mix->par_n != par_n || mix->par_d != par_d) {
335 gst_videomixer_reset_qos (mix);
336 mix->master = master;
337 mix->in_width = width;
338 mix->in_height = height;
347 gst_videomixer_pad_sink_setcaps (GstPad * pad, GstCaps * vscaps)
350 GstVideoMixerPad *mixpad;
351 GstStructure *structure;
352 gint in_width, in_height;
353 gboolean ret = FALSE;
354 const GValue *framerate, *par;
356 GST_INFO_OBJECT (pad, "Setting caps %" GST_PTR_FORMAT, vscaps);
358 mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
359 mixpad = GST_VIDEO_MIXER_PAD (pad);
364 structure = gst_caps_get_structure (vscaps, 0);
366 if (!gst_structure_get_int (structure, "width", &in_width)
367 || !gst_structure_get_int (structure, "height", &in_height)
368 || (framerate = gst_structure_get_value (structure, "framerate")) == NULL)
370 par = gst_structure_get_value (structure, "pixel-aspect-ratio");
372 GST_VIDEO_MIXER_STATE_LOCK (mix);
373 mixpad->fps_n = gst_value_get_fraction_numerator (framerate);
374 mixpad->fps_d = gst_value_get_fraction_denominator (framerate);
376 mixpad->par_n = gst_value_get_fraction_numerator (par);
377 mixpad->par_d = gst_value_get_fraction_denominator (par);
379 mixpad->par_n = mixpad->par_d = 1;
382 mixpad->in_width = in_width;
383 mixpad->in_height = in_height;
385 gst_videomixer_set_master_geometry (mix);
386 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
391 gst_object_unref (mix);
397 gst_videomixer_pad_sink_getcaps (GstPad * pad)
400 GstVideoMixerPad *mixpad;
405 mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
406 mixpad = GST_VIDEO_MIXER_PAD (pad);
411 /* Get downstream allowed caps */
412 res = gst_pad_get_allowed_caps (mix->srcpad);
413 if (G_UNLIKELY (res == NULL)) {
414 res = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
418 GST_VIDEO_MIXER_STATE_LOCK (mix);
420 /* Return as-is if not other sinkpad set as master */
421 if (mix->master == NULL) {
422 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
426 mastercaps = gst_pad_get_fixed_caps_func (GST_PAD (mix->master));
428 /* If master pad caps aren't negotiated yet, return downstream
430 if (!GST_CAPS_IS_SIMPLE (mastercaps)) {
431 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
432 gst_caps_unref (mastercaps);
436 gst_caps_unref (res);
437 res = gst_caps_make_writable (mastercaps);
438 st = gst_caps_get_structure (res, 0);
439 gst_structure_set (st, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
440 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
441 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
442 if (!gst_structure_has_field (st, "pixel-aspect-ratio"))
443 gst_structure_set (st, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1, NULL);
445 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
449 GST_DEBUG_OBJECT (pad, "Returning %" GST_PTR_FORMAT, res);
455 * We accept the caps if it has the same format as other sink pads in
459 gst_videomixer_pad_sink_acceptcaps (GstPad * pad, GstCaps * vscaps)
463 GstCaps *acceptedCaps;
465 mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
466 GST_DEBUG_OBJECT (pad, "%" GST_PTR_FORMAT, vscaps);
467 GST_VIDEO_MIXER_STATE_LOCK (mix);
470 acceptedCaps = gst_pad_get_fixed_caps_func (GST_PAD (mix->master));
471 acceptedCaps = gst_caps_make_writable (acceptedCaps);
472 GST_LOG_OBJECT (pad, "master's caps %" GST_PTR_FORMAT, acceptedCaps);
473 if (GST_CAPS_IS_SIMPLE (acceptedCaps)) {
475 s = gst_caps_get_structure (acceptedCaps, 0);
476 gst_structure_set (s, "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
477 "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
478 "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
479 if (!gst_structure_has_field (s, "pixel-aspect-ratio"))
480 gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
484 acceptedCaps = gst_pad_get_fixed_caps_func (pad);
487 GST_INFO_OBJECT (pad, "vscaps: %" GST_PTR_FORMAT, vscaps);
488 GST_INFO_OBJECT (pad, "acceptedCaps: %" GST_PTR_FORMAT, acceptedCaps);
490 ret = gst_caps_can_intersect (vscaps, acceptedCaps);
491 GST_INFO_OBJECT (pad, "%saccepted caps %" GST_PTR_FORMAT, (ret ? "" : "not "),
493 gst_caps_unref (acceptedCaps);
494 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
495 gst_object_unref (mix);
502 gst_videomixer_pad_init (GstVideoMixerPad * mixerpad)
504 /* setup some pad functions */
505 gst_pad_set_setcaps_function (GST_PAD (mixerpad),
506 gst_videomixer_pad_sink_setcaps);
507 gst_pad_set_acceptcaps_function (GST_PAD (mixerpad),
508 GST_DEBUG_FUNCPTR (gst_videomixer_pad_sink_acceptcaps));
509 gst_pad_set_getcaps_function (GST_PAD (mixerpad),
510 gst_videomixer_pad_sink_getcaps);
512 mixerpad->zorder = DEFAULT_PAD_ZORDER;
513 mixerpad->xpos = DEFAULT_PAD_XPOS;
514 mixerpad->ypos = DEFAULT_PAD_YPOS;
515 mixerpad->alpha = DEFAULT_PAD_ALPHA;
518 /* VideoMixer signals and args */
525 #define DEFAULT_BACKGROUND VIDEO_MIXER_BACKGROUND_CHECKER
532 #define GST_TYPE_VIDEO_MIXER_BACKGROUND (gst_video_mixer_background_get_type())
534 gst_video_mixer_background_get_type (void)
536 static GType video_mixer_background_type = 0;
538 static const GEnumValue video_mixer_background[] = {
539 {VIDEO_MIXER_BACKGROUND_CHECKER, "Checker pattern", "checker"},
540 {VIDEO_MIXER_BACKGROUND_BLACK, "Black", "black"},
541 {VIDEO_MIXER_BACKGROUND_WHITE, "White", "white"},
542 {VIDEO_MIXER_BACKGROUND_TRANSPARENT,
543 "Transparent Background to enable further mixing", "transparent"},
547 if (!video_mixer_background_type) {
548 video_mixer_background_type =
549 g_enum_register_static ("GstVideoMixerBackground",
550 video_mixer_background);
552 return video_mixer_background_type;
555 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
558 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" GST_VIDEO_CAPS_BGRA ";"
559 GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ABGR ";"
560 GST_VIDEO_CAPS_YUV ("Y444") ";" GST_VIDEO_CAPS_YUV ("Y42B") ";"
561 GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
562 GST_VIDEO_CAPS_YUV ("YVYU") ";"
563 GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12") ";"
564 GST_VIDEO_CAPS_YUV ("Y41B") ";" GST_VIDEO_CAPS_RGB ";"
565 GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
566 GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx)
569 static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%d",
572 GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") ";" GST_VIDEO_CAPS_BGRA ";"
573 GST_VIDEO_CAPS_ARGB ";" GST_VIDEO_CAPS_RGBA ";" GST_VIDEO_CAPS_ABGR ";"
574 GST_VIDEO_CAPS_YUV ("Y444") ";" GST_VIDEO_CAPS_YUV ("Y42B") ";"
575 GST_VIDEO_CAPS_YUV ("YUY2") ";" GST_VIDEO_CAPS_YUV ("UYVY") ";"
576 GST_VIDEO_CAPS_YUV ("YVYU") ";"
577 GST_VIDEO_CAPS_YUV ("I420") ";" GST_VIDEO_CAPS_YUV ("YV12") ";"
578 GST_VIDEO_CAPS_YUV ("Y41B") ";" GST_VIDEO_CAPS_RGB ";"
579 GST_VIDEO_CAPS_BGR ";" GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_xBGR ";"
580 GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_BGRx)
583 static void gst_videomixer_finalize (GObject * object);
585 static GstCaps *gst_videomixer_getcaps (GstPad * pad);
586 static gboolean gst_videomixer_setcaps (GstPad * pad, GstCaps * caps);
587 static gboolean gst_videomixer_query (GstPad * pad, GstQuery * query);
589 static GstFlowReturn gst_videomixer_collected (GstCollectPads * pads,
590 GstVideoMixer * mix);
591 static GstPad *gst_videomixer_request_new_pad (GstElement * element,
592 GstPadTemplate * templ, const gchar * name);
593 static void gst_videomixer_release_pad (GstElement * element, GstPad * pad);
595 static void gst_videomixer_set_property (GObject * object, guint prop_id,
596 const GValue * value, GParamSpec * pspec);
597 static void gst_videomixer_get_property (GObject * object, guint prop_id,
598 GValue * value, GParamSpec * pspec);
599 static GstStateChangeReturn gst_videomixer_change_state (GstElement * element,
600 GstStateChange transition);
602 /*static guint gst_videomixer_signals[LAST_SIGNAL] = { 0 }; */
604 static void gst_videomixer_child_proxy_init (gpointer g_iface,
605 gpointer iface_data);
606 static void _do_init (GType object_type);
608 GST_BOILERPLATE_FULL (GstVideoMixer, gst_videomixer, GstElement,
609 GST_TYPE_ELEMENT, _do_init);
612 _do_init (GType object_type)
614 static const GInterfaceInfo child_proxy_info = {
615 (GInterfaceInitFunc) gst_videomixer_child_proxy_init,
620 g_type_add_interface_static (object_type, GST_TYPE_CHILD_PROXY,
622 GST_INFO ("GstChildProxy interface registered");
626 gst_videomixer_child_proxy_get_child_by_index (GstChildProxy * child_proxy,
629 GstVideoMixer *mix = GST_VIDEO_MIXER (child_proxy);
632 GST_VIDEO_MIXER_STATE_LOCK (mix);
633 if ((obj = g_slist_nth_data (mix->sinkpads, index)))
634 gst_object_ref (obj);
635 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
640 gst_videomixer_child_proxy_get_children_count (GstChildProxy * child_proxy)
643 GstVideoMixer *mix = GST_VIDEO_MIXER (child_proxy);
645 GST_VIDEO_MIXER_STATE_LOCK (mix);
646 count = mix->numpads;
647 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
648 GST_INFO_OBJECT (mix, "Children Count: %d", count);
653 gst_videomixer_child_proxy_init (gpointer g_iface, gpointer iface_data)
655 GstChildProxyInterface *iface = g_iface;
657 GST_INFO ("intializing child proxy interface");
658 iface->get_child_by_index = gst_videomixer_child_proxy_get_child_by_index;
659 iface->get_children_count = gst_videomixer_child_proxy_get_children_count;
663 gst_videomixer_base_init (gpointer g_class)
665 GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
667 gst_element_class_add_static_pad_template (element_class, &src_factory);
668 gst_element_class_add_static_pad_template (element_class, &sink_factory);
670 gst_element_class_set_details_simple (element_class, "Video mixer",
671 "Filter/Editor/Video",
672 "Mix multiple video streams", "Wim Taymans <wim@fluendo.com>");
676 gst_videomixer_class_init (GstVideoMixerClass * klass)
678 GObjectClass *gobject_class = (GObjectClass *) klass;
679 GstElementClass *gstelement_class = (GstElementClass *) klass;
681 gobject_class->finalize = gst_videomixer_finalize;
683 gobject_class->get_property = gst_videomixer_get_property;
684 gobject_class->set_property = gst_videomixer_set_property;
686 g_object_class_install_property (gobject_class, PROP_BACKGROUND,
687 g_param_spec_enum ("background", "Background", "Background type",
688 GST_TYPE_VIDEO_MIXER_BACKGROUND,
689 DEFAULT_BACKGROUND, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
691 gstelement_class->request_new_pad =
692 GST_DEBUG_FUNCPTR (gst_videomixer_request_new_pad);
693 gstelement_class->release_pad =
694 GST_DEBUG_FUNCPTR (gst_videomixer_release_pad);
695 gstelement_class->change_state =
696 GST_DEBUG_FUNCPTR (gst_videomixer_change_state);
698 /* Register the pad class */
699 (void) (GST_TYPE_VIDEO_MIXER_PAD);
700 /* Register the background enum */
701 (void) (GST_TYPE_VIDEO_MIXER_BACKGROUND);
705 gst_videomixer_collect_free (GstVideoMixerCollect * mixcol)
707 if (mixcol->buffer) {
708 gst_buffer_unref (mixcol->buffer);
709 mixcol->buffer = NULL;
714 gst_videomixer_reset (GstVideoMixer * mix)
722 mix->fps_n = mix->fps_d = 0;
723 mix->par_n = mix->par_d = 1;
724 mix->setcaps = FALSE;
725 mix->sendseg = FALSE;
727 mix->segment_position = 0;
728 gst_segment_init (&mix->segment, GST_FORMAT_TIME);
730 gst_videomixer_reset_qos (mix);
732 mix->fmt = GST_VIDEO_FORMAT_UNKNOWN;
735 mix->last_duration = -1;
737 /* clean up collect data */
738 walk = mix->collect->data;
740 GstVideoMixerCollect *data = (GstVideoMixerCollect *) walk->data;
742 gst_videomixer_collect_free (data);
743 walk = g_slist_next (walk);
746 mix->next_sinkpad = 0;
747 mix->flush_stop_pending = FALSE;
751 gst_videomixer_init (GstVideoMixer * mix, GstVideoMixerClass * g_class)
753 GstElementClass *klass = GST_ELEMENT_GET_CLASS (mix);
756 gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
758 gst_pad_set_getcaps_function (GST_PAD (mix->srcpad),
759 GST_DEBUG_FUNCPTR (gst_videomixer_getcaps));
760 gst_pad_set_setcaps_function (GST_PAD (mix->srcpad),
761 GST_DEBUG_FUNCPTR (gst_videomixer_setcaps));
762 gst_pad_set_query_function (GST_PAD (mix->srcpad),
763 GST_DEBUG_FUNCPTR (gst_videomixer_query));
764 gst_pad_set_event_function (GST_PAD (mix->srcpad),
765 GST_DEBUG_FUNCPTR (gst_videomixer_src_event));
766 gst_element_add_pad (GST_ELEMENT (mix), mix->srcpad);
768 mix->collect = gst_collect_pads_new ();
769 mix->background = DEFAULT_BACKGROUND;
771 gst_collect_pads_set_function (mix->collect,
772 (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_videomixer_collected),
775 mix->state_lock = g_mutex_new ();
776 /* initialize variables */
777 gst_videomixer_reset (mix);
781 gst_videomixer_finalize (GObject * object)
783 GstVideoMixer *mix = GST_VIDEO_MIXER (object);
785 gst_object_unref (mix->collect);
786 g_mutex_free (mix->state_lock);
788 G_OBJECT_CLASS (parent_class)->finalize (object);
792 gst_videomixer_query_duration (GstVideoMixer * mix, GstQuery * query)
801 gst_query_parse_duration (query, &format, NULL);
807 /* Take maximum of all durations */
808 it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
810 GstIteratorResult ires;
813 ires = gst_iterator_next (it, &item);
815 case GST_ITERATOR_DONE:
818 case GST_ITERATOR_OK:
820 GstPad *pad = GST_PAD_CAST (item);
823 /* ask sink peer for duration */
824 res &= gst_pad_query_peer_duration (pad, &format, &duration);
825 /* take max from all valid return values */
827 /* valid unknown length, stop searching */
828 if (duration == -1) {
832 /* else see if bigger than current max */
833 else if (duration > max)
836 gst_object_unref (pad);
839 case GST_ITERATOR_RESYNC:
842 gst_iterator_resync (it);
850 gst_iterator_free (it);
853 /* and store the max */
854 GST_DEBUG_OBJECT (mix, "Total duration in format %s: %"
855 GST_TIME_FORMAT, gst_format_get_name (format), GST_TIME_ARGS (max));
856 gst_query_set_duration (query, format, max);
863 gst_videomixer_query_latency (GstVideoMixer * mix, GstQuery * query)
865 GstClockTime min, max;
875 max = GST_CLOCK_TIME_NONE;
877 /* Take maximum of all latency values */
878 it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
880 GstIteratorResult ires;
883 ires = gst_iterator_next (it, &item);
885 case GST_ITERATOR_DONE:
888 case GST_ITERATOR_OK:
890 GstPad *pad = GST_PAD_CAST (item);
894 GstClockTime min_cur, max_cur;
898 peerquery = gst_query_new_latency ();
900 /* Ask peer for latency */
901 res &= gst_pad_peer_query (pad, peerquery);
903 /* take max from all valid return values */
905 gst_query_parse_latency (peerquery, &live_cur, &min_cur, &max_cur);
910 if (max_cur != GST_CLOCK_TIME_NONE &&
911 ((max != GST_CLOCK_TIME_NONE && max_cur > max) ||
912 (max == GST_CLOCK_TIME_NONE)))
915 live = live || live_cur;
918 gst_query_unref (peerquery);
919 gst_object_unref (pad);
922 case GST_ITERATOR_RESYNC:
925 max = GST_CLOCK_TIME_NONE;
927 gst_iterator_resync (it);
935 gst_iterator_free (it);
938 /* store the results */
939 GST_DEBUG_OBJECT (mix, "Calculated total latency: live %s, min %"
940 GST_TIME_FORMAT ", max %" GST_TIME_FORMAT,
941 (live ? "yes" : "no"), GST_TIME_ARGS (min), GST_TIME_ARGS (max));
942 gst_query_set_latency (query, live, min, max);
949 gst_videomixer_query (GstPad * pad, GstQuery * query)
951 GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
952 gboolean res = FALSE;
954 switch (GST_QUERY_TYPE (query)) {
955 case GST_QUERY_POSITION:
959 gst_query_parse_position (query, &format, NULL);
962 case GST_FORMAT_TIME:
963 /* FIXME, bring to stream time, might be tricky */
964 gst_query_set_position (query, format, mix->last_ts);
972 case GST_QUERY_DURATION:
973 res = gst_videomixer_query_duration (mix, query);
975 case GST_QUERY_LATENCY:
976 res = gst_videomixer_query_latency (mix, query);
979 /* FIXME, needs a custom query handler because we have multiple
980 * sinkpads, send to the master pad until then */
981 res = gst_pad_query (GST_PAD_CAST (mix->master), query);
985 gst_object_unref (mix);
990 gst_videomixer_getcaps (GstPad * pad)
992 GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
994 GstStructure *structure;
999 gst_caps_copy (gst_pad_get_pad_template_caps (GST_PAD (mix->master)));
1001 caps = gst_caps_copy (gst_pad_get_pad_template_caps (mix->srcpad));
1004 numCaps = gst_caps_get_size (caps) - 1;
1005 for (; numCaps >= 0; numCaps--) {
1006 structure = gst_caps_get_structure (caps, numCaps);
1007 if (mix->out_width != 0) {
1008 gst_structure_set (structure, "width", G_TYPE_INT, mix->out_width, NULL);
1010 if (mix->out_height != 0) {
1011 gst_structure_set (structure, "height", G_TYPE_INT, mix->out_height,
1014 if (mix->fps_d != 0) {
1015 gst_structure_set (structure,
1016 "framerate", GST_TYPE_FRACTION, mix->fps_n, mix->fps_d, NULL);
1020 gst_object_unref (mix);
1026 gst_videomixer_setcaps (GstPad * pad, GstCaps * caps)
1028 GstVideoMixer *mixer = GST_VIDEO_MIXER (gst_pad_get_parent_element (pad));
1029 gboolean ret = FALSE;
1031 GST_INFO_OBJECT (mixer, "set src caps: %" GST_PTR_FORMAT, caps);
1033 mixer->blend = NULL;
1034 mixer->overlay = NULL;
1035 mixer->fill_checker = NULL;
1036 mixer->fill_color = NULL;
1038 if (!gst_video_format_parse_caps (caps, &mixer->fmt, NULL, NULL))
1041 switch (mixer->fmt) {
1042 case GST_VIDEO_FORMAT_AYUV:
1043 mixer->blend = gst_video_mixer_blend_ayuv;
1044 mixer->overlay = gst_video_mixer_overlay_ayuv;
1045 mixer->fill_checker = gst_video_mixer_fill_checker_ayuv;
1046 mixer->fill_color = gst_video_mixer_fill_color_ayuv;
1049 case GST_VIDEO_FORMAT_ARGB:
1050 mixer->blend = gst_video_mixer_blend_argb;
1051 mixer->overlay = gst_video_mixer_overlay_argb;
1052 mixer->fill_checker = gst_video_mixer_fill_checker_argb;
1053 mixer->fill_color = gst_video_mixer_fill_color_argb;
1056 case GST_VIDEO_FORMAT_BGRA:
1057 mixer->blend = gst_video_mixer_blend_bgra;
1058 mixer->overlay = gst_video_mixer_overlay_bgra;
1059 mixer->fill_checker = gst_video_mixer_fill_checker_bgra;
1060 mixer->fill_color = gst_video_mixer_fill_color_bgra;
1063 case GST_VIDEO_FORMAT_ABGR:
1064 mixer->blend = gst_video_mixer_blend_abgr;
1065 mixer->overlay = gst_video_mixer_overlay_abgr;
1066 mixer->fill_checker = gst_video_mixer_fill_checker_abgr;
1067 mixer->fill_color = gst_video_mixer_fill_color_abgr;
1070 case GST_VIDEO_FORMAT_RGBA:
1071 mixer->blend = gst_video_mixer_blend_rgba;
1072 mixer->overlay = gst_video_mixer_overlay_rgba;
1073 mixer->fill_checker = gst_video_mixer_fill_checker_rgba;
1074 mixer->fill_color = gst_video_mixer_fill_color_rgba;
1077 case GST_VIDEO_FORMAT_Y444:
1078 mixer->blend = gst_video_mixer_blend_y444;
1079 mixer->overlay = mixer->blend;
1080 mixer->fill_checker = gst_video_mixer_fill_checker_y444;
1081 mixer->fill_color = gst_video_mixer_fill_color_y444;
1084 case GST_VIDEO_FORMAT_Y42B:
1085 mixer->blend = gst_video_mixer_blend_y42b;
1086 mixer->overlay = mixer->blend;
1087 mixer->fill_checker = gst_video_mixer_fill_checker_y42b;
1088 mixer->fill_color = gst_video_mixer_fill_color_y42b;
1091 case GST_VIDEO_FORMAT_YUY2:
1092 mixer->blend = gst_video_mixer_blend_yuy2;
1093 mixer->overlay = mixer->blend;
1094 mixer->fill_checker = gst_video_mixer_fill_checker_yuy2;
1095 mixer->fill_color = gst_video_mixer_fill_color_yuy2;
1098 case GST_VIDEO_FORMAT_UYVY:
1099 mixer->blend = gst_video_mixer_blend_uyvy;
1100 mixer->overlay = mixer->blend;
1101 mixer->fill_checker = gst_video_mixer_fill_checker_uyvy;
1102 mixer->fill_color = gst_video_mixer_fill_color_uyvy;
1105 case GST_VIDEO_FORMAT_YVYU:
1106 mixer->blend = gst_video_mixer_blend_yvyu;
1107 mixer->overlay = mixer->blend;
1108 mixer->fill_checker = gst_video_mixer_fill_checker_yvyu;
1109 mixer->fill_color = gst_video_mixer_fill_color_yvyu;
1112 case GST_VIDEO_FORMAT_I420:
1113 mixer->blend = gst_video_mixer_blend_i420;
1114 mixer->overlay = mixer->blend;
1115 mixer->fill_checker = gst_video_mixer_fill_checker_i420;
1116 mixer->fill_color = gst_video_mixer_fill_color_i420;
1119 case GST_VIDEO_FORMAT_YV12:
1120 mixer->blend = gst_video_mixer_blend_yv12;
1121 mixer->overlay = mixer->blend;
1122 mixer->fill_checker = gst_video_mixer_fill_checker_yv12;
1123 mixer->fill_color = gst_video_mixer_fill_color_yv12;
1126 case GST_VIDEO_FORMAT_Y41B:
1127 mixer->blend = gst_video_mixer_blend_y41b;
1128 mixer->overlay = mixer->blend;
1129 mixer->fill_checker = gst_video_mixer_fill_checker_y41b;
1130 mixer->fill_color = gst_video_mixer_fill_color_y41b;
1133 case GST_VIDEO_FORMAT_RGB:
1134 mixer->blend = gst_video_mixer_blend_rgb;
1135 mixer->overlay = mixer->blend;
1136 mixer->fill_checker = gst_video_mixer_fill_checker_rgb;
1137 mixer->fill_color = gst_video_mixer_fill_color_rgb;
1140 case GST_VIDEO_FORMAT_BGR:
1141 mixer->blend = gst_video_mixer_blend_bgr;
1142 mixer->overlay = mixer->blend;
1143 mixer->fill_checker = gst_video_mixer_fill_checker_bgr;
1144 mixer->fill_color = gst_video_mixer_fill_color_bgr;
1147 case GST_VIDEO_FORMAT_xRGB:
1148 mixer->blend = gst_video_mixer_blend_xrgb;
1149 mixer->overlay = mixer->blend;
1150 mixer->fill_checker = gst_video_mixer_fill_checker_xrgb;
1151 mixer->fill_color = gst_video_mixer_fill_color_xrgb;
1154 case GST_VIDEO_FORMAT_xBGR:
1155 mixer->blend = gst_video_mixer_blend_xbgr;
1156 mixer->overlay = mixer->blend;
1157 mixer->fill_checker = gst_video_mixer_fill_checker_xbgr;
1158 mixer->fill_color = gst_video_mixer_fill_color_xbgr;
1161 case GST_VIDEO_FORMAT_RGBx:
1162 mixer->blend = gst_video_mixer_blend_rgbx;
1163 mixer->overlay = mixer->blend;
1164 mixer->fill_checker = gst_video_mixer_fill_checker_rgbx;
1165 mixer->fill_color = gst_video_mixer_fill_color_rgbx;
1168 case GST_VIDEO_FORMAT_BGRx:
1169 mixer->blend = gst_video_mixer_blend_bgrx;
1170 mixer->overlay = mixer->blend;
1171 mixer->fill_checker = gst_video_mixer_fill_checker_bgrx;
1172 mixer->fill_color = gst_video_mixer_fill_color_bgrx;
1180 gst_object_unref (mixer);
1186 gst_videomixer_request_new_pad (GstElement * element,
1187 GstPadTemplate * templ, const gchar * req_name)
1189 GstVideoMixer *mix = NULL;
1190 GstVideoMixerPad *mixpad = NULL;
1191 GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
1193 g_return_val_if_fail (templ != NULL, NULL);
1195 if (G_UNLIKELY (templ->direction != GST_PAD_SINK)) {
1196 g_warning ("videomixer: request pad that is not a SINK pad");
1200 g_return_val_if_fail (GST_IS_VIDEO_MIXER (element), NULL);
1202 mix = GST_VIDEO_MIXER (element);
1204 if (templ == gst_element_class_get_pad_template (klass, "sink_%d")) {
1207 GstVideoMixerCollect *mixcol = NULL;
1209 GST_VIDEO_MIXER_STATE_LOCK (mix);
1210 if (req_name == NULL || strlen (req_name) < 6
1211 || !g_str_has_prefix (req_name, "sink_")) {
1212 /* no name given when requesting the pad, use next available int */
1213 serial = mix->next_sinkpad++;
1215 /* parse serial number from requested padname */
1216 serial = atoi (&req_name[5]);
1217 if (serial >= mix->next_sinkpad)
1218 mix->next_sinkpad = serial + 1;
1220 /* create new pad with the name */
1221 name = g_strdup_printf ("sink_%d", serial);
1222 mixpad = g_object_new (GST_TYPE_VIDEO_MIXER_PAD, "name", name, "direction",
1223 templ->direction, "template", templ, NULL);
1226 mixpad->zorder = mix->numpads;
1227 mixpad->xpos = DEFAULT_PAD_XPOS;
1228 mixpad->ypos = DEFAULT_PAD_YPOS;
1229 mixpad->alpha = DEFAULT_PAD_ALPHA;
1231 mixcol = (GstVideoMixerCollect *)
1232 gst_collect_pads_add_pad (mix->collect, GST_PAD (mixpad),
1233 sizeof (GstVideoMixerCollect));
1235 /* FIXME: hacked way to override/extend the event function of
1236 * GstCollectPads; because it sets its own event function giving the
1237 * element no access to events */
1238 mix->collect_event =
1239 (GstPadEventFunction) GST_PAD_EVENTFUNC (GST_PAD (mixpad));
1240 gst_pad_set_event_function (GST_PAD (mixpad),
1241 GST_DEBUG_FUNCPTR (gst_videomixer_sink_event));
1243 /* Keep track of each other */
1244 mixcol->mixpad = mixpad;
1245 mixpad->mixcol = mixcol;
1247 /* Keep an internal list of mixpads for zordering */
1248 mix->sinkpads = g_slist_append (mix->sinkpads, mixpad);
1250 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
1252 g_warning ("videomixer: this is not our template!");
1256 /* add the pad to the element */
1257 gst_element_add_pad (element, GST_PAD (mixpad));
1258 gst_child_proxy_child_added (GST_OBJECT (mix), GST_OBJECT (mixpad));
1260 return GST_PAD (mixpad);
1264 gst_videomixer_release_pad (GstElement * element, GstPad * pad)
1266 GstVideoMixer *mix = NULL;
1267 GstVideoMixerPad *mixpad;
1269 mix = GST_VIDEO_MIXER (element);
1270 GST_VIDEO_MIXER_STATE_LOCK (mix);
1271 if (G_UNLIKELY (g_slist_find (mix->sinkpads, pad) == NULL)) {
1272 g_warning ("Unknown pad %s", GST_PAD_NAME (pad));
1276 mixpad = GST_VIDEO_MIXER_PAD (pad);
1278 mix->sinkpads = g_slist_remove (mix->sinkpads, pad);
1279 gst_videomixer_collect_free (mixpad->mixcol);
1280 gst_collect_pads_remove_pad (mix->collect, pad);
1281 gst_child_proxy_child_removed (GST_OBJECT (mix), GST_OBJECT (mixpad));
1282 /* determine possibly new geometry and master */
1283 gst_videomixer_set_master_geometry (mix);
1285 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
1287 gst_element_remove_pad (element, pad);
1290 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
1294 pad_zorder_compare (const GstVideoMixerPad * pad1,
1295 const GstVideoMixerPad * pad2)
1297 return pad1->zorder - pad2->zorder;
1301 gst_videomixer_sort_pads (GstVideoMixer * mix)
1303 mix->sinkpads = g_slist_sort (mix->sinkpads,
1304 (GCompareFunc) pad_zorder_compare);
1307 /* try to get a buffer on all pads. As long as the queued value is
1308 * negative, we skip buffers */
1310 gst_videomixer_fill_queues (GstVideoMixer * mix)
1312 GSList *walk = NULL;
1313 gboolean eos = TRUE;
1315 g_return_val_if_fail (GST_IS_VIDEO_MIXER (mix), FALSE);
1317 /* try to make sure we have a buffer from each usable pad first */
1318 walk = mix->collect->data;
1320 GstCollectData *data = (GstCollectData *) walk->data;
1321 GstVideoMixerCollect *mixcol = (GstVideoMixerCollect *) data;
1322 GstVideoMixerPad *mixpad = mixcol->mixpad;
1324 walk = g_slist_next (walk);
1326 if (mixcol->buffer == NULL) {
1327 GstBuffer *buf = NULL;
1329 GST_LOG_OBJECT (mix, "we need a new buffer");
1331 buf = gst_collect_pads_peek (mix->collect, data);
1336 mixcol->buffer = buf;
1337 duration = GST_BUFFER_DURATION (mixcol->buffer);
1339 GST_LOG_OBJECT (mix, "we have a buffer with duration %" GST_TIME_FORMAT
1340 ", queued %" GST_TIME_FORMAT, GST_TIME_ARGS (duration),
1341 GST_TIME_ARGS (mixpad->queued));
1343 /* no duration on the buffer, use the framerate */
1344 if (!GST_CLOCK_TIME_IS_VALID (duration)) {
1345 if (mixpad->fps_n == 0) {
1346 duration = GST_CLOCK_TIME_NONE;
1349 gst_util_uint64_scale_int (GST_SECOND, mixpad->fps_d,
1353 if (GST_CLOCK_TIME_IS_VALID (duration))
1354 mixpad->queued += duration;
1355 else if (!mixpad->queued)
1356 mixpad->queued = GST_CLOCK_TIME_NONE;
1358 GST_LOG_OBJECT (mix, "now queued: %" GST_TIME_FORMAT,
1359 GST_TIME_ARGS (mixpad->queued));
1361 GST_LOG_OBJECT (mix, "pop returned a NULL buffer");
1364 if (mix->sendseg && (mixpad == mix->master)) {
1367 GstSegment *segment = &data->segment;
1369 /* FIXME, use rate/applied_rate as set on all sinkpads.
1370 * - currently we just set rate as received from last seek-event
1371 * We could potentially figure out the duration as well using
1372 * the current segment positions and the stated stop positions.
1373 * Also we just start from stream time 0 which is rather
1374 * weird. For non-synchronized mixing, the time should be
1375 * the min of the stream times of all received segments,
1376 * rationale being that the duration is at least going to
1377 * be as long as the earliest stream we start mixing. This
1378 * would also be correct for synchronized mixing but then
1379 * the later streams would be delayed until the stream times
1382 GST_INFO_OBJECT (mix, "_sending play segment");
1384 start = segment->accum;
1386 /* get the duration of the segment if we can and add it to the accumulated
1387 * time on the segment. */
1388 if (segment->stop != -1 && segment->start != -1)
1389 stop = start + (segment->stop - segment->start);
1393 gst_segment_set_newsegment (&mix->segment, FALSE, segment->rate,
1394 segment->format, start, stop, start + mix->segment_position);
1396 gst_event_new_new_segment_full (FALSE, segment->rate, 1.0,
1397 segment->format, start, stop, start + mix->segment_position);
1398 gst_pad_push_event (mix->srcpad, event);
1399 mix->sendseg = FALSE;
1402 if (mixcol->buffer != NULL && GST_CLOCK_TIME_IS_VALID (mixpad->queued)) {
1403 /* got a buffer somewhere so we're not eos */
1411 /* blend all buffers present on the pads */
1413 gst_videomixer_blend_buffers (GstVideoMixer * mix, GstBuffer * outbuf)
1416 BlendFunction blend;
1417 if (mix->background == VIDEO_MIXER_BACKGROUND_TRANSPARENT) {
1418 blend = mix->overlay;
1424 walk = mix->sinkpads;
1425 while (walk) { /* We walk with this list because it's ordered */
1426 GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data);
1427 GstVideoMixerCollect *mixcol = pad->mixcol;
1429 walk = g_slist_next (walk);
1431 if (mixcol->buffer != NULL) {
1432 GstClockTime timestamp;
1436 seg = &mixcol->collect.segment;
1438 timestamp = GST_BUFFER_TIMESTAMP (mixcol->buffer);
1441 gst_segment_to_stream_time (seg, GST_FORMAT_TIME, timestamp);
1443 /* sync object properties on stream time */
1444 if (GST_CLOCK_TIME_IS_VALID (stream_time))
1445 gst_object_sync_values (G_OBJECT (pad), stream_time);
1447 blend (GST_BUFFER_DATA (mixcol->buffer),
1448 pad->xpos, pad->ypos, pad->in_width, pad->in_height, pad->alpha,
1449 GST_BUFFER_DATA (outbuf), mix->out_width, mix->out_height);
1454 /* remove buffers from the queue that were expired in the
1455 * interval of the master, we also prepare the queued value
1456 * in the pad so that we can skip and fill buffers later on */
1458 gst_videomixer_update_queues (GstVideoMixer * mix)
1463 interval = mix->master->queued;
1464 if (interval <= 0) {
1465 if (mix->fps_n == 0) {
1466 interval = G_MAXINT64;
1468 interval = gst_util_uint64_scale_int (GST_SECOND, mix->fps_d, mix->fps_n);
1470 GST_LOG_OBJECT (mix, "set interval to %" G_GINT64_FORMAT " nanoseconds",
1474 walk = mix->sinkpads;
1476 GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data);
1477 GstVideoMixerCollect *mixcol = pad->mixcol;
1479 walk = g_slist_next (walk);
1481 if (mixcol->buffer != NULL) {
1482 pad->queued -= interval;
1483 GST_LOG_OBJECT (pad, "queued now %" G_GINT64_FORMAT, pad->queued);
1484 if (pad->queued <= 0) {
1486 gst_collect_pads_pop (mix->collect, &mixcol->collect);
1488 GST_LOG_OBJECT (pad, "unreffing buffer");
1490 gst_buffer_unref (buffer);
1492 GST_WARNING_OBJECT (pad,
1493 "Buffer was removed by GstCollectPads in the meantime");
1495 gst_buffer_unref (mixcol->buffer);
1496 mixcol->buffer = NULL;
1502 static GstFlowReturn
1503 gst_videomixer_collected (GstCollectPads * pads, GstVideoMixer * mix)
1505 GstFlowReturn ret = GST_FLOW_OK;
1506 GstBuffer *outbuf = NULL;
1508 gboolean eos = FALSE;
1509 GstClockTime timestamp = GST_CLOCK_TIME_NONE;
1510 GstClockTime duration = GST_CLOCK_TIME_NONE;
1512 g_return_val_if_fail (GST_IS_VIDEO_MIXER (mix), GST_FLOW_ERROR);
1514 /* This must be set, otherwise we have no caps */
1515 if (G_UNLIKELY (mix->in_width == 0))
1516 return GST_FLOW_NOT_NEGOTIATED;
1518 if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending, TRUE, FALSE)) {
1519 GST_DEBUG_OBJECT (mix, "pending flush stop");
1520 gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ());
1523 GST_LOG_OBJECT (mix, "all pads are collected");
1524 GST_VIDEO_MIXER_STATE_LOCK (mix);
1526 eos = gst_videomixer_fill_queues (mix);
1529 /* Push EOS downstream */
1530 GST_LOG_OBJECT (mix, "all our sinkpads are EOS, pushing downstream");
1531 gst_pad_push_event (mix->srcpad, gst_event_new_eos ());
1532 ret = GST_FLOW_WRONG_STATE;
1536 /* If geometry has changed we need to set new caps on the buffer */
1537 if (mix->in_width != mix->out_width || mix->in_height != mix->out_height
1539 GstCaps *newcaps = NULL;
1541 newcaps = gst_caps_make_writable
1542 (gst_pad_get_negotiated_caps (GST_PAD (mix->master)));
1543 gst_caps_set_simple (newcaps,
1544 "width", G_TYPE_INT, mix->in_width,
1545 "height", G_TYPE_INT, mix->in_height,
1546 "pixel-aspect-ratio", GST_TYPE_FRACTION, mix->par_n, mix->par_d, NULL);
1548 mix->out_width = mix->in_width;
1549 mix->out_height = mix->in_height;
1550 mix->setcaps = FALSE;
1552 /* Calculating out buffer size from input size */
1553 gst_pad_set_caps (mix->srcpad, newcaps);
1554 gst_caps_unref (newcaps);
1557 /* Get timestamp & duration */
1558 if (mix->master->mixcol->buffer != NULL) {
1561 GstVideoMixerCollect *mixcol = mix->master->mixcol;
1563 seg = &mixcol->collect.segment;
1564 in_ts = GST_BUFFER_TIMESTAMP (mixcol->buffer);
1566 timestamp = gst_segment_to_running_time (seg, GST_FORMAT_TIME, in_ts);
1567 duration = GST_BUFFER_DURATION (mixcol->buffer);
1569 mix->last_ts = timestamp;
1570 mix->last_duration = duration;
1572 timestamp = mix->last_ts;
1573 duration = mix->last_duration;
1576 if (GST_CLOCK_TIME_IS_VALID (duration))
1577 mix->last_ts += duration;
1579 if (!gst_videomixer_do_qos (mix, timestamp)) {
1580 gst_videomixer_update_queues (mix);
1581 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
1586 /* allocate an output buffer */
1588 gst_video_format_get_size (mix->fmt, mix->out_width, mix->out_height);
1590 gst_pad_alloc_buffer_and_set_caps (mix->srcpad, GST_BUFFER_OFFSET_NONE,
1591 outsize, GST_PAD_CAPS (mix->srcpad), &outbuf);
1593 /* This must be set at this point, otherwise we have no src caps */
1594 g_assert (mix->blend != NULL);
1596 if (ret != GST_FLOW_OK) {
1600 GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
1601 GST_BUFFER_DURATION (outbuf) = duration;
1603 switch (mix->background) {
1604 case VIDEO_MIXER_BACKGROUND_CHECKER:
1605 mix->fill_checker (GST_BUFFER_DATA (outbuf), mix->out_width,
1608 case VIDEO_MIXER_BACKGROUND_BLACK:
1609 mix->fill_color (GST_BUFFER_DATA (outbuf), mix->out_width,
1610 mix->out_height, 16, 128, 128);
1612 case VIDEO_MIXER_BACKGROUND_WHITE:
1613 mix->fill_color (GST_BUFFER_DATA (outbuf), mix->out_width,
1614 mix->out_height, 240, 128, 128);
1616 case VIDEO_MIXER_BACKGROUND_TRANSPARENT:
1617 orc_memset (GST_BUFFER_DATA (outbuf), 0,
1618 gst_video_format_get_row_stride (mix->fmt, 0,
1619 mix->out_width) * mix->out_height);
1623 gst_videomixer_blend_buffers (mix, outbuf);
1625 gst_videomixer_update_queues (mix);
1626 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
1628 ret = gst_pad_push (mix->srcpad, outbuf);
1637 gst_buffer_unref (outbuf);
1639 GST_VIDEO_MIXER_STATE_UNLOCK (mix);
1645 forward_event_func (GstPad * pad, GValue * ret, GstEvent * event)
1647 gst_event_ref (event);
1648 GST_LOG_OBJECT (pad, "About to send event %s", GST_EVENT_TYPE_NAME (event));
1649 if (!gst_pad_push_event (pad, event)) {
1650 g_value_set_boolean (ret, FALSE);
1651 GST_WARNING_OBJECT (pad, "Sending event %p (%s) failed.",
1652 event, GST_EVENT_TYPE_NAME (event));
1654 GST_LOG_OBJECT (pad, "Sent event %p (%s).",
1655 event, GST_EVENT_TYPE_NAME (event));
1657 gst_object_unref (pad);
1661 /* forwards the event to all sinkpads, takes ownership of the
1664 * Returns: TRUE if the event could be forwarded on all
1668 forward_event (GstVideoMixer * mix, GstEvent * event)
1671 GValue vret = { 0 };
1673 GST_LOG_OBJECT (mix, "Forwarding event %p (%s)", event,
1674 GST_EVENT_TYPE_NAME (event));
1676 g_value_init (&vret, G_TYPE_BOOLEAN);
1677 g_value_set_boolean (&vret, TRUE);
1678 it = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mix));
1679 gst_iterator_fold (it, (GstIteratorFoldFunction) forward_event_func, &vret,
1681 gst_iterator_free (it);
1682 gst_event_unref (event);
1684 return g_value_get_boolean (&vret);
1688 gst_videomixer_src_event (GstPad * pad, GstEvent * event)
1690 GstVideoMixer *mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
1693 switch (GST_EVENT_TYPE (event)) {
1694 case GST_EVENT_QOS:{
1695 GstClockTimeDiff diff;
1696 GstClockTime timestamp;
1699 gst_event_parse_qos (event, &proportion, &diff, ×tamp);
1701 gst_videomixer_update_qos (mix, proportion, diff, timestamp);
1702 gst_event_unref (event);
1704 /* TODO: The QoS event should be transformed and send upstream */
1708 case GST_EVENT_SEEK:
1711 GstSeekType curtype;
1714 /* parse the seek parameters */
1715 gst_event_parse_seek (event, NULL, NULL, &flags, &curtype,
1718 /* check if we are flushing */
1719 if (flags & GST_SEEK_FLAG_FLUSH) {
1720 /* make sure we accept nothing anymore and return WRONG_STATE */
1721 gst_collect_pads_set_flushing (mix->collect, TRUE);
1723 /* flushing seek, start flush downstream, the flush will be done
1724 * when all pads received a FLUSH_STOP. */
1725 gst_pad_push_event (mix->srcpad, gst_event_new_flush_start ());
1728 /* now wait for the collected to be finished and mark a new
1730 GST_OBJECT_LOCK (mix->collect);
1731 if (curtype == GST_SEEK_TYPE_SET)
1732 mix->segment_position = cur;
1734 mix->segment_position = 0;
1735 mix->sendseg = TRUE;
1737 if (flags & GST_SEEK_FLAG_FLUSH) {
1738 gst_collect_pads_set_flushing (mix->collect, FALSE);
1740 /* we can't send FLUSH_STOP here since upstream could start pushing data
1741 * after we unlock mix->collect.
1742 * We set flush_stop_pending to TRUE instead and send FLUSH_STOP after
1743 * forwarding the seek upstream or from gst_videomixer_collected,
1744 * whichever happens first.
1746 mix->flush_stop_pending = TRUE;
1749 GST_OBJECT_UNLOCK (mix->collect);
1750 gst_videomixer_reset_qos (mix);
1752 result = forward_event (mix, event);
1754 if (g_atomic_int_compare_and_exchange (&mix->flush_stop_pending,
1756 GST_DEBUG_OBJECT (mix, "pending flush stop");
1757 gst_pad_push_event (mix->srcpad, gst_event_new_flush_stop ());
1762 case GST_EVENT_NAVIGATION:
1763 /* navigation is rather pointless. */
1767 /* just forward the rest for now */
1768 result = forward_event (mix, event);
1771 gst_object_unref (mix);
1777 gst_videomixer_sink_event (GstPad * pad, GstEvent * event)
1779 GstVideoMixerPad *vpad = GST_VIDEO_MIXER_PAD (pad);
1780 GstVideoMixer *videomixer = GST_VIDEO_MIXER (gst_pad_get_parent (pad));
1783 GST_DEBUG_OBJECT (pad, "Got %s event on pad %s:%s",
1784 GST_EVENT_TYPE_NAME (event), GST_DEBUG_PAD_NAME (pad));
1786 switch (GST_EVENT_TYPE (event)) {
1787 case GST_EVENT_FLUSH_STOP:
1788 /* mark a pending new segment. This event is synchronized
1789 * with the streaming thread so we can safely update the
1790 * variable without races. It's somewhat weird because we
1791 * assume the collectpads forwarded the FLUSH_STOP past us
1792 * and downstream (using our source pad, the bastard!).
1794 videomixer->sendseg = TRUE;
1795 videomixer->flush_stop_pending = FALSE;
1796 gst_videomixer_reset_qos (videomixer);
1798 /* Reset pad state after FLUSH_STOP */
1799 if (vpad->mixcol->buffer)
1800 gst_buffer_unref (vpad->mixcol->buffer);
1801 vpad->mixcol->buffer = NULL;
1804 case GST_EVENT_NEWSEGMENT:
1805 if (!videomixer->master || vpad == videomixer->master) {
1806 videomixer->sendseg = TRUE;
1807 gst_videomixer_reset_qos (videomixer);
1814 /* now GstCollectPads can take care of the rest, e.g. EOS */
1815 ret = videomixer->collect_event (pad, event);
1817 gst_object_unref (videomixer);
1823 gst_videomixer_get_property (GObject * object,
1824 guint prop_id, GValue * value, GParamSpec * pspec)
1826 GstVideoMixer *mix = GST_VIDEO_MIXER (object);
1829 case PROP_BACKGROUND:
1830 g_value_set_enum (value, mix->background);
1833 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1839 gst_videomixer_set_property (GObject * object,
1840 guint prop_id, const GValue * value, GParamSpec * pspec)
1842 GstVideoMixer *mix = GST_VIDEO_MIXER (object);
1845 case PROP_BACKGROUND:
1846 mix->background = g_value_get_enum (value);
1849 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
1854 static GstStateChangeReturn
1855 gst_videomixer_change_state (GstElement * element, GstStateChange transition)
1858 GstStateChangeReturn ret;
1860 g_return_val_if_fail (GST_IS_VIDEO_MIXER (element), GST_STATE_CHANGE_FAILURE);
1862 mix = GST_VIDEO_MIXER (element);
1864 switch (transition) {
1865 case GST_STATE_CHANGE_READY_TO_PAUSED:
1866 GST_LOG_OBJECT (mix, "starting collectpads");
1867 gst_collect_pads_start (mix->collect);
1869 case GST_STATE_CHANGE_PAUSED_TO_READY:
1870 GST_LOG_OBJECT (mix, "stopping collectpads");
1871 gst_collect_pads_stop (mix->collect);
1877 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1879 switch (transition) {
1880 case GST_STATE_CHANGE_PAUSED_TO_READY:
1881 gst_videomixer_reset (mix);
1891 plugin_init (GstPlugin * plugin)
1893 GST_DEBUG_CATEGORY_INIT (gst_videomixer_debug, "videomixer", 0,
1896 gst_video_mixer_init_blend ();
1898 return gst_element_register (plugin, "videomixer", GST_RANK_PRIMARY,
1899 GST_TYPE_VIDEO_MIXER) && gst_videomixer2_register (plugin);
1902 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1905 "Video mixer", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME,