1 /* Copyright (C) <2018> Philippe Normand <philn@igalia.com>
2 * Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
21 * SECTION:element-wpevideosrc
24 * The wpevideosrc element is used to produce a video texture representing a web page
25 * rendered off-screen by WPE.
27 * Starting from WPEBackend-FDO 1.6.x, software rendering support is available. This
28 * features allows wpevideosrc to be used on machines without GPU, and/or for testing
29 * purpose. To enable it, set the `LIBGL_ALWAYS_SOFTWARE=true` environment
30 * variable and make sure `video/x-raw, format=BGRA` caps are negotiated by the
31 * wpevideosrc element.
33 * As the webview loading is usually not instantaneous, the wpevideosrc element emits
34 * messages indicating the load progress, in percent. The value is an estimate
35 * based on the total number of bytes expected to be received for a document,
36 * including all its possible subresources and child documents. The application
37 * can handle these `element` messages synchronously for instance, in order to
38 * display a progress bar or other visual load indicator. The load percent value
39 * is stored in the message structure as a double value named
40 * `estimated-load-progress` and the structure name is `wpe-stats`.
42 * ## Example launch lines
45 * gst-launch-1.0 -v wpevideosrc location="https://gstreamer.freedesktop.org" ! queue ! glimagesink
47 * Shows the GStreamer website homepage
50 * LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -v wpevideosrc num-buffers=50 location="https://gstreamer.freedesktop.org" \
51 * videoconvert ! pngenc ! multifilesink location=/tmp/snapshot-%05d.png
53 * Saves the first 50 video frames generated for the GStreamer website as PNG files in /tmp.
56 * gst-play-1.0 --videosink gtkglsink wpe://https://gstreamer.freedesktop.org
58 * Shows the GStreamer website homepage as played with GstPlayer in a GTK+ window.
61 * gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 ! glimagesink wpevideosrc location="file:///tmp/asset.html" draw-background=0 \
62 * ! m. videotestsrc ! queue ! glupload ! glcolorconvert ! m.
64 * Composite WPE with a video stream in a single OpenGL scene.
67 * gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 sink_0::height=818 sink_0::width=1920 ! gtkglsink \
68 * wpevideosrc location="file:///tmp/asset.html" draw-background=0 ! m.
69 * uridecodebin uri="http://example.com/Sintel.2010.1080p.mkv" name=d d. ! queue ! glupload ! glcolorconvert ! m.
71 * Composite WPE with a video stream, sink_0 pad properties have to match the video dimensions.
78 * - Audio support (requires an AudioSession implementation in WebKit and a WPEBackend-fdo API for it)
79 * - DMABuf support (requires changes in WPEBackend-fdo to expose DMABuf planes and fds)
80 * - Custom EGLMemory allocator
81 * - Better navigation events handling (would require a new GstNavigation API)
88 #include "gstwpevideosrc.h"
89 #include "gstwpe-private.h"
90 #include <gst/gl/gl.h>
91 #include <gst/gl/egl/gstglmemoryegl.h>
92 #include <gst/gl/wayland/gstgldisplay_wayland.h>
93 #include <gst/video/video.h>
94 #include <xkbcommon/xkbcommon.h>
96 #include "WPEThreadedView.h"
98 #define DEFAULT_WIDTH 1920
99 #define DEFAULT_HEIGHT 1080
100 #define DEFAULT_FPS_N 30
101 #define DEFAULT_FPS_D 1
112 SIGNAL_CONFIGURE_WEB_VIEW,
116 static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 };
118 struct _GstWpeVideoSrc
124 gboolean draw_background;
129 gint64 n_frames; /* total frames sent */
132 const struct wpe_audio_receiver *audio_receiver;
133 gpointer audio_receiver_data;
138 #define WPE_LOCK(o) g_mutex_lock(&(o)->lock)
139 #define WPE_UNLOCK(o) g_mutex_unlock(&(o)->lock)
141 #define gst_wpe_video_src_parent_class parent_class
142 G_DEFINE_TYPE(GstWpeVideoSrc, gst_wpe_video_src, GST_TYPE_GL_BASE_SRC);
144 #if ENABLE_SHM_BUFFER_SUPPORT
145 #define WPE_RAW_CAPS "; video/x-raw, " \
146 "format = (string) BGRA, " \
147 "width = " GST_VIDEO_SIZE_RANGE ", " \
148 "height = " GST_VIDEO_SIZE_RANGE ", " \
149 "framerate = " GST_VIDEO_FPS_RANGE ", " \
150 "pixel-aspect-ratio = (fraction)1/1"
152 #define WPE_RAW_CAPS ""
155 #define WPE_BASIC_CAPS "video/x-raw(memory:GLMemory), " \
156 "format = (string) RGBA, " \
157 "width = " GST_VIDEO_SIZE_RANGE ", " \
158 "height = " GST_VIDEO_SIZE_RANGE ", " \
159 "framerate = " GST_VIDEO_FPS_RANGE ", " \
160 "pixel-aspect-ratio = (fraction)1/1, texture-target = (string)2D"
162 #define WPE_VIDEO_SRC_CAPS WPE_BASIC_CAPS WPE_RAW_CAPS
163 #define WPE_VIDEO_SRC_DOC_CAPS WPE_BASIC_CAPS "; video/x-raw, format = (string) BGRA"
165 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
168 GST_STATIC_CAPS (WPE_VIDEO_SRC_CAPS));
171 gst_wpe_video_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, GstBuffer ** buf)
173 GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (bsrc);
174 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (bsrc);
175 GstFlowReturn ret = GST_FLOW_ERROR;
176 GstBuffer *locked_buffer;
177 GstClockTime next_time;
178 gint64 ts_offset = 0;
181 if (src->gl_enabled) {
183 return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, create, (bsrc, offset, length, buf), ret);
186 locked_buffer = src->view->buffer ();
187 if (locked_buffer == NULL) {
189 GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
190 ("WPE View did not render a buffer"), (NULL));
193 *buf = gst_buffer_copy_deep (locked_buffer);
195 g_object_get(gl_src, "timestamp-offset", &ts_offset, NULL);
197 /* The following code mimics the behaviour of GLBaseSrc::fill */
198 GST_BUFFER_TIMESTAMP (*buf) = ts_offset + gl_src->running_time;
199 GST_BUFFER_OFFSET (*buf) = src->n_frames;
201 GST_BUFFER_OFFSET_END (*buf) = src->n_frames;
202 if (gl_src->out_info.fps_n) {
203 next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
204 gl_src->out_info.fps_d, gl_src->out_info.fps_n);
205 GST_BUFFER_DURATION (*buf) = next_time - gl_src->running_time;
207 next_time = ts_offset;
208 GST_BUFFER_DURATION (*buf) = GST_CLOCK_TIME_NONE;
211 GST_LOG_OBJECT (src, "Created buffer from SHM %" GST_PTR_FORMAT, *buf);
213 gl_src->running_time = next_time;
221 gst_wpe_video_src_fill_memory (GstGLBaseSrc * bsrc, GstGLMemory * memory)
223 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (bsrc);
224 const GstGLFuncs *gl;
226 GstEGLImage *locked_image;
228 if (!gst_gl_context_check_feature (GST_GL_CONTEXT (bsrc->context),
229 "EGL_KHR_image_base")) {
230 GST_ERROR_OBJECT (src, "EGL_KHR_image_base is not supported");
236 gl = bsrc->context->gl_vtable;
237 tex_id = gst_gl_memory_get_texture_id (memory);
238 locked_image = src->view->image ();
245 gl->ActiveTexture (GL_TEXTURE0 + memory->plane);
246 gl->BindTexture (GL_TEXTURE_2D, tex_id);
247 gl->EGLImageTargetTexture2D (GL_TEXTURE_2D,
248 gst_egl_image_get_image (locked_image));
255 gst_wpe_video_src_start (GstWpeVideoSrc * src)
257 GstGLContext *context = NULL;
258 GstGLDisplay *display = NULL;
259 GstGLBaseSrc *base_src = GST_GL_BASE_SRC (src);
260 gboolean created_view = FALSE;
263 GST_INFO_OBJECT (src, "Starting up");
266 if (src->gl_enabled) {
267 context = base_src->context;
268 display = base_src->display;
271 GST_DEBUG_OBJECT (src, "Will %sfill GLMemories", src->gl_enabled ? "" : "NOT ");
273 auto & thread = WPEContextThread::singleton ();
276 src->view = thread.createWPEView (src, context, display,
277 GST_VIDEO_INFO_WIDTH (&base_src->out_info),
278 GST_VIDEO_INFO_HEIGHT (&base_src->out_info));
280 GST_DEBUG_OBJECT (src, "created view %p", src->view);
285 GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
286 ("WPEBackend-FDO EGL display initialisation failed"), (NULL));
290 GST_OBJECT_LOCK (src);
293 GST_OBJECT_UNLOCK (src);
296 src->view->loadData (bytes);
297 g_bytes_unref (bytes);
302 if (src->audio_receiver) {
303 src->view->registerAudioReceiver(src->audio_receiver, src->audio_receiver_data);
304 src->audio_receiver = NULL,
305 src->audio_receiver_data = NULL;
313 gst_wpe_video_src_decide_allocation (GstBaseSrc * base_src, GstQuery * query)
315 GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (base_src);
316 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
317 GstCapsFeatures *caps_features;
320 caps_features = gst_caps_get_features (gl_src->out_caps, 0);
321 if (caps_features != NULL && gst_caps_features_contains (caps_features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
322 src->gl_enabled = TRUE;
324 src->gl_enabled = FALSE;
327 if (src->gl_enabled) {
329 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SRC_CLASS, decide_allocation, (base_src, query), FALSE);
332 return gst_wpe_video_src_start (src);
336 gst_wpe_video_src_gl_start (GstGLBaseSrc * base_src)
338 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
339 return gst_wpe_video_src_start (src);
343 gst_wpe_video_src_stop_unlocked (GstWpeVideoSrc * src)
346 GST_DEBUG_OBJECT (src, "deleting view %p", src->view);
353 gst_wpe_video_src_gl_stop (GstGLBaseSrc * base_src)
355 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
358 gst_wpe_video_src_stop_unlocked (src);
363 gst_wpe_video_src_stop (GstBaseSrc * base_src)
365 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
367 /* we can call this always, GstGLBaseSrc is smart enough to not crash if
368 * gst_gl_base_src_gl_start() has not been called from chaining up
369 * gst_wpe_video_src_decide_allocation() */
370 if (!GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SRC_CLASS, stop, (base_src), FALSE))
375 /* if gl-enabled, gst_wpe_video_src_stop_unlocked() would have already been called
376 * inside gst_wpe_video_src_gl_stop() from the base class stopping the OpenGL
378 if (!src->gl_enabled)
379 gst_wpe_video_src_stop_unlocked (src);
386 gst_wpe_video_src_fixate (GstBaseSrc * base_src, GstCaps * caps)
388 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
389 GstStructure *structure;
392 caps = gst_caps_make_writable (caps);
393 structure = gst_caps_get_structure (caps, 0);
395 gst_structure_fixate_field_nearest_int (structure, "width", DEFAULT_WIDTH);
396 gst_structure_fixate_field_nearest_int (structure, "height", DEFAULT_HEIGHT);
398 if (gst_structure_has_field (structure, "framerate"))
399 gst_structure_fixate_field_nearest_fraction (structure, "framerate",
400 DEFAULT_FPS_N, DEFAULT_FPS_D);
402 gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, DEFAULT_FPS_N,
403 DEFAULT_FPS_D, NULL);
405 caps = GST_BASE_SRC_CLASS (parent_class)->fixate (base_src, caps);
406 GST_INFO_OBJECT (base_src, "Fixated caps to %" GST_PTR_FORMAT, caps);
409 gst_structure_get (structure, "width", G_TYPE_INT, &width, "height", G_TYPE_INT, &height, NULL);
410 src->view->resize (width, height);
416 gst_wpe_video_src_configure_web_view (GstWpeVideoSrc * src, WebKitWebView * webview)
418 GValue args[2] = { {0}, {0} };
420 g_value_init (&args[0], GST_TYPE_ELEMENT);
421 g_value_set_object (&args[0], src);
422 g_value_init (&args[1], G_TYPE_OBJECT);
423 g_value_set_object (&args[1], webview);
425 g_signal_emitv (args, gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW], 0,
428 g_value_unset (&args[0]);
429 g_value_unset (&args[1]);
433 gst_wpe_video_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes)
435 if (src->view && GST_STATE (GST_ELEMENT_CAST (src)) > GST_STATE_NULL) {
436 src->view->loadData (bytes);
438 GST_OBJECT_LOCK (src);
440 g_bytes_unref (src->bytes);
441 src->bytes = g_bytes_ref (bytes);
442 GST_OBJECT_UNLOCK (src);
447 gst_wpe_video_src_set_location (GstWpeVideoSrc * src, const gchar * location,
450 GST_OBJECT_LOCK (src);
451 g_free (src->location);
452 src->location = g_strdup (location);
453 GST_OBJECT_UNLOCK (src);
456 src->view->loadUri (location);
462 gst_wpe_video_src_set_draw_background (GstWpeVideoSrc * src, gboolean draw_background)
464 GST_OBJECT_LOCK (src);
465 src->draw_background = draw_background;
466 GST_OBJECT_UNLOCK (src);
469 src->view->setDrawBackground (draw_background);
473 gst_wpe_video_src_set_property (GObject * object, guint prop_id, const GValue * value,
476 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
481 const gchar *location;
483 location = g_value_get_string (value);
484 if (location == NULL) {
485 GST_WARNING_OBJECT (src, "location property cannot be NULL");
489 if (!gst_wpe_video_src_set_location (src, location, NULL)) {
490 GST_WARNING_OBJECT (src, "badly formatted location");
495 case PROP_DRAW_BACKGROUND:
496 gst_wpe_video_src_set_draw_background (src, g_value_get_boolean (value));
504 gst_wpe_video_src_get_property (GObject * object, guint prop_id, GValue * value,
507 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
511 GST_OBJECT_LOCK (src);
512 g_value_set_string (value, src->location);
513 GST_OBJECT_UNLOCK (src);
515 case PROP_DRAW_BACKGROUND:
516 GST_OBJECT_LOCK (src);
517 g_value_set_boolean (value, src->draw_background);
518 GST_OBJECT_UNLOCK (src);
521 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
527 gst_wpe_video_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
529 gboolean ret = FALSE;
530 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (parent);
532 if (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION) {
535 gdouble x, y, delta_x, delta_y;
537 GST_DEBUG_OBJECT (src, "Processing event %" GST_PTR_FORMAT, event);
541 switch (gst_navigation_event_get_type (event)) {
542 case GST_NAVIGATION_EVENT_KEY_PRESS:
543 case GST_NAVIGATION_EVENT_KEY_RELEASE:
544 if (gst_navigation_event_parse_key_event (event, &key)) {
545 /* FIXME: This is wrong... The GstNavigation API should pass
546 hardware-level information, not high-level keysym strings */
548 (uint32_t) xkb_keysym_from_name (key, XKB_KEYSYM_NO_FLAGS);
549 struct wpe_input_keyboard_event wpe_event;
550 wpe_event.key_code = keysym;
552 gst_navigation_event_get_type (event) ==
553 GST_NAVIGATION_EVENT_KEY_PRESS;
554 src->view->dispatchKeyboardEvent (wpe_event);
558 case GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS:
559 case GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE:
560 if (gst_navigation_event_parse_mouse_button_event (event, &button, &x,
562 struct wpe_input_pointer_event wpe_event;
563 wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
564 wpe_event.type = wpe_input_pointer_event_type_button;
565 wpe_event.x = (int) x;
566 wpe_event.y = (int) y;
568 wpe_event.modifiers = wpe_input_pointer_modifier_button1;
569 } else if (button == 2) {
570 wpe_event.modifiers = wpe_input_pointer_modifier_button2;
571 } else if (button == 3) {
572 wpe_event.modifiers = wpe_input_pointer_modifier_button3;
573 } else if (button == 4) {
574 wpe_event.modifiers = wpe_input_pointer_modifier_button4;
575 } else if (button == 5) {
576 wpe_event.modifiers = wpe_input_pointer_modifier_button5;
578 wpe_event.button = button;
580 gst_navigation_event_get_type (event) ==
581 GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS;
582 src->view->dispatchPointerEvent (wpe_event);
586 case GST_NAVIGATION_EVENT_MOUSE_MOVE:
587 if (gst_navigation_event_parse_mouse_move_event (event, &x, &y)) {
588 struct wpe_input_pointer_event wpe_event;
589 wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
590 wpe_event.type = wpe_input_pointer_event_type_motion;
591 wpe_event.x = (int) x;
592 wpe_event.y = (int) y;
593 src->view->dispatchPointerEvent (wpe_event);
597 case GST_NAVIGATION_EVENT_MOUSE_SCROLL:
598 if (gst_navigation_event_parse_mouse_scroll_event (event, &x, &y,
599 &delta_x, &delta_y)) {
600 #if WPE_CHECK_VERSION(1, 6, 0)
601 struct wpe_input_axis_2d_event wpe_event;
603 wpe_event.x_axis = delta_x;
605 wpe_event.y_axis = delta_y;
607 wpe_event.base.time =
608 GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
609 wpe_event.base.type =
610 static_cast < wpe_input_axis_event_type >
611 (wpe_input_axis_event_type_mask_2d |
612 wpe_input_axis_event_type_motion_smooth);
613 wpe_event.base.x = (int) x;
614 wpe_event.base.y = (int) y;
615 src->view->dispatchAxisEvent (wpe_event.base);
617 struct wpe_input_axis_event wpe_event;
620 wpe_event.value = delta_x;
623 wpe_event.value = delta_y;
625 wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
626 wpe_event.type = wpe_input_axis_event_type_motion;
627 wpe_event.x = (int) x;
628 wpe_event.y = (int) y;
629 src->view->dispatchAxisEvent (wpe_event);
637 /* FIXME: No touch events handling support in GstNavigation */
641 ret = gst_pad_event_default (pad, parent, event);
643 gst_event_unref (event);
649 gst_wpe_video_src_init (GstWpeVideoSrc * src)
651 GstPad *pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src), "src");
653 gst_pad_set_event_function (pad, gst_wpe_video_src_event);
654 gst_object_unref (pad);
656 src->draw_background = TRUE;
658 gst_base_src_set_live (GST_BASE_SRC_CAST (src), TRUE);
660 g_mutex_init (&src->lock);
664 gst_wpe_video_src_finalize (GObject * object)
666 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
668 g_free (src->location);
669 g_clear_pointer (&src->bytes, g_bytes_unref);
670 g_mutex_clear (&src->lock);
672 G_OBJECT_CLASS (parent_class)->finalize (object);
676 gst_wpe_video_src_class_init (GstWpeVideoSrcClass * klass)
678 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
679 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
680 GstGLBaseSrcClass *gl_base_src_class = GST_GL_BASE_SRC_CLASS (klass);
681 GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
682 GstPadTemplate *tmpl;
685 gobject_class->set_property = gst_wpe_video_src_set_property;
686 gobject_class->get_property = gst_wpe_video_src_get_property;
687 gobject_class->finalize = gst_wpe_video_src_finalize;
689 g_object_class_install_property (gobject_class, PROP_LOCATION,
690 g_param_spec_string ("location", "location",
691 "The URL to display",
692 "", (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
693 g_object_class_install_property (gobject_class, PROP_DRAW_BACKGROUND,
694 g_param_spec_boolean ("draw-background", "Draws the background",
695 "Whether to draw the WebView background", TRUE,
696 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
698 gst_element_class_set_static_metadata (gstelement_class,
699 "WPE source", "Source/Video",
700 "Creates a video stream from a WPE browser",
701 "Philippe Normand <philn@igalia.com>, Žan Doberšek <zdobersek@igalia.com>");
703 tmpl = gst_static_pad_template_get (&src_factory);
704 gst_element_class_add_pad_template (gstelement_class, tmpl);
706 base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_wpe_video_src_fixate);
707 base_src_class->create = GST_DEBUG_FUNCPTR (gst_wpe_video_src_create);
708 base_src_class->decide_allocation = GST_DEBUG_FUNCPTR (gst_wpe_video_src_decide_allocation);
709 base_src_class->stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_stop);
711 gl_base_src_class->supported_gl_api =
712 static_cast < GstGLAPI >
713 (GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2);
714 gl_base_src_class->gl_start = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_start);
715 gl_base_src_class->gl_stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_stop);
716 gl_base_src_class->fill_gl_memory =
717 GST_DEBUG_FUNCPTR (gst_wpe_video_src_fill_memory);
719 doc_caps = gst_caps_from_string (WPE_VIDEO_SRC_DOC_CAPS);
720 gst_pad_template_set_documentation_caps (tmpl, doc_caps);
721 gst_clear_caps (&doc_caps);
724 * GstWpeVideoSrc::configure-web-view:
725 * @src: the object which received the signal
726 * @webview: the webView
728 * Allow application to configure the webView settings.
730 gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW] =
731 g_signal_new ("configure-web-view", G_TYPE_FROM_CLASS (klass),
732 G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_OBJECT);
735 * GstWpeVideoSrc::load-bytes:
736 * @src: the object which received the signal
737 * @bytes: the GBytes data to load
739 * Load the specified bytes into the internal webView.
741 gst_wpe_video_src_signals[SIGNAL_LOAD_BYTES] =
742 g_signal_new_class_handler ("load-bytes", G_TYPE_FROM_CLASS (klass),
743 static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
744 G_CALLBACK (gst_wpe_video_src_load_bytes), NULL, NULL, NULL,
745 G_TYPE_NONE, 1, G_TYPE_BYTES);
749 gst_wpe_video_src_register_audio_receiver(GstElement* video_src, const struct wpe_audio_receiver* receiver, gpointer user_data)
751 GstWpeVideoSrc* src = GST_WPE_VIDEO_SOURCE(video_src);
754 src->audio_receiver = receiver;
755 src->audio_receiver_data = user_data;
758 src->view->registerAudioReceiver(receiver, user_data);