1 /* Copyright (C) <2018> Philippe Normand <philn@igalia.com>
2 * Copyright (C) <2018> Žan Doberšek <zdobersek@igalia.com>
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
21 * SECTION:element-wpevideosrc
24 * The wpevideosrc element is used to produce a video texture representing a web page
25 * rendered off-screen by WPE.
27 * Starting from WPEBackend-FDO 1.6.x, software rendering support is available. This
28 * features allows wpevideosrc to be used on machines without GPU, and/or for testing
29 * purpose. To enable it, set the `LIBGL_ALWAYS_SOFTWARE=true` environment
30 * variable and make sure `video/x-raw, format=BGRA` caps are negotiated by the
31 * wpevideosrc element.
33 * As the webview loading is usually not instantaneous, the wpevideosrc element emits
34 * messages indicating the load progress, in percent. The value is an estimate
35 * based on the total number of bytes expected to be received for a document,
36 * including all its possible subresources and child documents. The application
37 * can handle these `element` messages synchronously for instance, in order to
38 * display a progress bar or other visual load indicator. The load percent value
39 * is stored in the message structure as a double value named
40 * `estimated-load-progress` and the structure name is `wpe-stats`.
42 * ## Example launch lines
45 * gst-launch-1.0 -v wpevideosrc location="https://gstreamer.freedesktop.org" ! queue ! glimagesink
47 * Shows the GStreamer website homepage
50 * LIBGL_ALWAYS_SOFTWARE=true gst-launch-1.0 -v wpevideosrc num-buffers=50 location="https://gstreamer.freedesktop.org" \
51 * videoconvert ! pngenc ! multifilesink location=/tmp/snapshot-%05d.png
53 * Saves the first 50 video frames generated for the GStreamer website as PNG files in /tmp.
56 * gst-play-1.0 --videosink gtkglsink wpe://https://gstreamer.freedesktop.org
58 * Shows the GStreamer website homepage as played with GstPlayer in a GTK+ window.
61 * gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 ! glimagesink wpevideosrc location="file:///tmp/asset.html" draw-background=0 \
62 * ! m. videotestsrc ! queue ! glupload ! glcolorconvert ! m.
64 * Composite WPE with a video stream in a single OpenGL scene.
67 * gst-launch-1.0 glvideomixer name=m sink_1::zorder=0 sink_0::height=818 sink_0::width=1920 ! gtkglsink \
68 * wpevideosrc location="file:///tmp/asset.html" draw-background=0 ! m.
69 * uridecodebin uri="http://example.com/Sintel.2010.1080p.mkv" name=d d. ! queue ! glupload ! glcolorconvert ! m.
71 * Composite WPE with a video stream, sink_0 pad properties have to match the video dimensions.
78 * - Audio support (requires an AudioSession implementation in WebKit and a WPEBackend-fdo API for it)
79 * - DMABuf support (requires changes in WPEBackend-fdo to expose DMABuf planes and fds)
80 * - Custom EGLMemory allocator
81 * - Better navigation events handling (would require a new GstNavigation API)
88 #include "gstwpevideosrc.h"
89 #include <gst/gl/gl.h>
90 #include <gst/gl/egl/gstglmemoryegl.h>
91 #include <gst/gl/wayland/gstgldisplay_wayland.h>
92 #include <gst/video/video.h>
93 #include <xkbcommon/xkbcommon.h>
95 #include "WPEThreadedView.h"
97 #define DEFAULT_WIDTH 1920
98 #define DEFAULT_HEIGHT 1080
99 #define DEFAULT_FPS_N 30
100 #define DEFAULT_FPS_D 1
111 SIGNAL_CONFIGURE_WEB_VIEW,
115 static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 };
117 struct _GstWpeVideoSrc
123 gboolean draw_background;
128 gint64 n_frames; /* total frames sent */
135 #define WPE_LOCK(o) g_mutex_lock(&(o)->lock)
136 #define WPE_UNLOCK(o) g_mutex_unlock(&(o)->lock)
138 #define gst_wpe_video_src_parent_class parent_class
139 G_DEFINE_TYPE(GstWpeVideoSrc, gst_wpe_video_src, GST_TYPE_GL_BASE_SRC);
141 #if ENABLE_SHM_BUFFER_SUPPORT
142 #define WPE_RAW_CAPS "; video/x-raw, " \
143 "format = (string) BGRA, " \
144 "width = " GST_VIDEO_SIZE_RANGE ", " \
145 "height = " GST_VIDEO_SIZE_RANGE ", " \
146 "framerate = " GST_VIDEO_FPS_RANGE ", " \
147 "pixel-aspect-ratio = (fraction)1/1"
149 #define WPE_RAW_CAPS ""
152 #define WPE_BASIC_CAPS "video/x-raw(memory:GLMemory), " \
153 "format = (string) RGBA, " \
154 "width = " GST_VIDEO_SIZE_RANGE ", " \
155 "height = " GST_VIDEO_SIZE_RANGE ", " \
156 "framerate = " GST_VIDEO_FPS_RANGE ", " \
157 "pixel-aspect-ratio = (fraction)1/1, texture-target = (string)2D"
159 #define WPE_VIDEO_SRC_CAPS WPE_BASIC_CAPS WPE_RAW_CAPS
160 #define WPE_VIDEO_SRC_DOC_CAPS WPE_BASIC_CAPS "; video/x-raw, format = (string) BGRA"
162 static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
165 GST_STATIC_CAPS (WPE_VIDEO_SRC_CAPS));
168 gst_wpe_video_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, GstBuffer ** buf)
170 GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (bsrc);
171 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (bsrc);
172 GstFlowReturn ret = GST_FLOW_ERROR;
173 GstBuffer *locked_buffer;
174 GstClockTime next_time;
175 gint64 ts_offset = 0;
178 if (src->gl_enabled) {
180 return GST_CALL_PARENT_WITH_DEFAULT (GST_BASE_SRC_CLASS, create, (bsrc, offset, length, buf), ret);
183 locked_buffer = src->view->buffer ();
184 if (locked_buffer == NULL) {
186 GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
187 ("WPE View did not render a buffer"), (NULL));
190 *buf = gst_buffer_copy_deep (locked_buffer);
192 g_object_get(gl_src, "timestamp-offset", &ts_offset, NULL);
194 /* The following code mimics the behaviour of GLBaseSrc::fill */
195 GST_BUFFER_TIMESTAMP (*buf) = ts_offset + gl_src->running_time;
196 GST_BUFFER_OFFSET (*buf) = src->n_frames;
198 GST_BUFFER_OFFSET_END (*buf) = src->n_frames;
199 if (gl_src->out_info.fps_n) {
200 next_time = gst_util_uint64_scale_int (src->n_frames * GST_SECOND,
201 gl_src->out_info.fps_d, gl_src->out_info.fps_n);
202 GST_BUFFER_DURATION (*buf) = next_time - gl_src->running_time;
204 next_time = ts_offset;
205 GST_BUFFER_DURATION (*buf) = GST_CLOCK_TIME_NONE;
208 GST_LOG_OBJECT (src, "Created buffer from SHM %" GST_PTR_FORMAT, *buf);
210 gl_src->running_time = next_time;
218 gst_wpe_video_src_fill_memory (GstGLBaseSrc * bsrc, GstGLMemory * memory)
220 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (bsrc);
221 const GstGLFuncs *gl;
223 GstEGLImage *locked_image;
225 if (!gst_gl_context_check_feature (GST_GL_CONTEXT (bsrc->context),
226 "EGL_KHR_image_base")) {
227 GST_ERROR_OBJECT (src, "EGL_KHR_image_base is not supported");
233 gl = bsrc->context->gl_vtable;
234 tex_id = gst_gl_memory_get_texture_id (memory);
235 locked_image = src->view->image ();
242 gl->ActiveTexture (GL_TEXTURE0 + memory->plane);
243 gl->BindTexture (GL_TEXTURE_2D, tex_id);
244 gl->EGLImageTargetTexture2D (GL_TEXTURE_2D,
245 gst_egl_image_get_image (locked_image));
252 gst_wpe_video_src_start (GstWpeVideoSrc * src)
254 GstGLContext *context = NULL;
255 GstGLDisplay *display = NULL;
256 GstGLBaseSrc *base_src = GST_GL_BASE_SRC (src);
257 gboolean created_view = FALSE;
260 GST_INFO_OBJECT (src, "Starting up");
263 if (src->gl_enabled) {
264 context = base_src->context;
265 display = base_src->display;
268 GST_DEBUG_OBJECT (src, "Will %sfill GLMemories", src->gl_enabled ? "" : "NOT ");
270 auto & thread = WPEContextThread::singleton ();
273 src->view = thread.createWPEView (src, context, display,
274 GST_VIDEO_INFO_WIDTH (&base_src->out_info),
275 GST_VIDEO_INFO_HEIGHT (&base_src->out_info));
277 GST_DEBUG_OBJECT (src, "created view %p", src->view);
282 GST_ELEMENT_ERROR (src, RESOURCE, FAILED,
283 ("WPEBackend-FDO EGL display initialisation failed"), (NULL));
287 GST_OBJECT_LOCK (src);
290 GST_OBJECT_UNLOCK (src);
293 src->view->loadData (bytes);
294 g_bytes_unref (bytes);
305 gst_wpe_video_src_decide_allocation (GstBaseSrc * base_src, GstQuery * query)
307 GstGLBaseSrc *gl_src = GST_GL_BASE_SRC (base_src);
308 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
309 GstCapsFeatures *caps_features;
312 caps_features = gst_caps_get_features (gl_src->out_caps, 0);
313 if (caps_features != NULL && gst_caps_features_contains (caps_features, GST_CAPS_FEATURE_MEMORY_GL_MEMORY)) {
314 src->gl_enabled = TRUE;
316 src->gl_enabled = FALSE;
319 if (src->gl_enabled) {
321 return GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SRC_CLASS, decide_allocation, (base_src, query), FALSE);
324 return gst_wpe_video_src_start (src);
328 gst_wpe_video_src_gl_start (GstGLBaseSrc * base_src)
330 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
331 return gst_wpe_video_src_start (src);
335 gst_wpe_video_src_stop_unlocked (GstWpeVideoSrc * src)
338 GST_DEBUG_OBJECT (src, "deleting view %p", src->view);
345 gst_wpe_video_src_gl_stop (GstGLBaseSrc * base_src)
347 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
350 gst_wpe_video_src_stop_unlocked (src);
355 gst_wpe_video_src_stop (GstBaseSrc * base_src)
357 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
359 /* we can call this always, GstGLBaseSrc is smart enough to not crash if
360 * gst_gl_base_src_gl_start() has not been called from chaining up
361 * gst_wpe_video_src_decide_allocation() */
362 if (!GST_CALL_PARENT_WITH_DEFAULT(GST_BASE_SRC_CLASS, stop, (base_src), FALSE))
367 /* if gl-enabled, gst_wpe_video_src_stop_unlocked() would have already been called
368 * inside gst_wpe_video_src_gl_stop() from the base class stopping the OpenGL
370 if (!src->gl_enabled)
371 gst_wpe_video_src_stop_unlocked (src);
378 gst_wpe_video_src_fixate (GstBaseSrc * base_src, GstCaps * caps)
380 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (base_src);
381 GstStructure *structure;
384 caps = gst_caps_make_writable (caps);
385 structure = gst_caps_get_structure (caps, 0);
387 gst_structure_fixate_field_nearest_int (structure, "width", DEFAULT_WIDTH);
388 gst_structure_fixate_field_nearest_int (structure, "height", DEFAULT_HEIGHT);
390 if (gst_structure_has_field (structure, "framerate"))
391 gst_structure_fixate_field_nearest_fraction (structure, "framerate",
392 DEFAULT_FPS_N, DEFAULT_FPS_D);
394 gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, DEFAULT_FPS_N,
395 DEFAULT_FPS_D, NULL);
397 caps = GST_BASE_SRC_CLASS (parent_class)->fixate (base_src, caps);
398 GST_INFO_OBJECT (base_src, "Fixated caps to %" GST_PTR_FORMAT, caps);
401 gst_structure_get (structure, "width", G_TYPE_INT, &width, "height", G_TYPE_INT, &height, NULL);
402 src->view->resize (width, height);
408 gst_wpe_video_src_configure_web_view (GstWpeVideoSrc * src, WebKitWebView * webview)
410 GValue args[2] = { {0}, {0} };
412 g_value_init (&args[0], GST_TYPE_ELEMENT);
413 g_value_set_object (&args[0], src);
414 g_value_init (&args[1], G_TYPE_OBJECT);
415 g_value_set_object (&args[1], webview);
417 g_signal_emitv (args, gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW], 0,
420 g_value_unset (&args[0]);
421 g_value_unset (&args[1]);
425 gst_wpe_video_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes)
427 if (src->view && GST_STATE (GST_ELEMENT_CAST (src)) > GST_STATE_NULL) {
428 src->view->loadData (bytes);
430 GST_OBJECT_LOCK (src);
432 g_bytes_unref (src->bytes);
433 src->bytes = g_bytes_ref (bytes);
434 GST_OBJECT_UNLOCK (src);
439 gst_wpe_video_src_set_location (GstWpeVideoSrc * src, const gchar * location,
442 GST_OBJECT_LOCK (src);
443 g_free (src->location);
444 src->location = g_strdup (location);
445 GST_OBJECT_UNLOCK (src);
448 src->view->loadUri (location);
454 gst_wpe_video_src_set_draw_background (GstWpeVideoSrc * src, gboolean draw_background)
456 GST_OBJECT_LOCK (src);
457 src->draw_background = draw_background;
458 GST_OBJECT_UNLOCK (src);
461 src->view->setDrawBackground (draw_background);
465 gst_wpe_video_src_set_property (GObject * object, guint prop_id, const GValue * value,
468 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
473 const gchar *location;
475 location = g_value_get_string (value);
476 if (location == NULL) {
477 GST_WARNING_OBJECT (src, "location property cannot be NULL");
481 if (!gst_wpe_video_src_set_location (src, location, NULL)) {
482 GST_WARNING_OBJECT (src, "badly formatted location");
487 case PROP_DRAW_BACKGROUND:
488 gst_wpe_video_src_set_draw_background (src, g_value_get_boolean (value));
496 gst_wpe_video_src_get_property (GObject * object, guint prop_id, GValue * value,
499 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
503 GST_OBJECT_LOCK (src);
504 g_value_set_string (value, src->location);
505 GST_OBJECT_UNLOCK (src);
507 case PROP_DRAW_BACKGROUND:
508 GST_OBJECT_LOCK (src);
509 g_value_set_boolean (value, src->draw_background);
510 GST_OBJECT_UNLOCK (src);
513 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
519 gst_wpe_video_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
521 gboolean ret = FALSE;
522 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (parent);
524 if (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION) {
527 gdouble x, y, delta_x, delta_y;
529 GST_DEBUG_OBJECT (src, "Processing event %" GST_PTR_FORMAT, event);
533 switch (gst_navigation_event_get_type (event)) {
534 case GST_NAVIGATION_EVENT_KEY_PRESS:
535 case GST_NAVIGATION_EVENT_KEY_RELEASE:
536 if (gst_navigation_event_parse_key_event (event, &key)) {
537 /* FIXME: This is wrong... The GstNavigation API should pass
538 hardware-level information, not high-level keysym strings */
540 (uint32_t) xkb_keysym_from_name (key, XKB_KEYSYM_NO_FLAGS);
541 struct wpe_input_keyboard_event wpe_event;
542 wpe_event.key_code = keysym;
544 gst_navigation_event_get_type (event) ==
545 GST_NAVIGATION_EVENT_KEY_PRESS;
546 src->view->dispatchKeyboardEvent (wpe_event);
550 case GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS:
551 case GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE:
552 if (gst_navigation_event_parse_mouse_button_event (event, &button, &x,
554 struct wpe_input_pointer_event wpe_event;
555 wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
556 wpe_event.type = wpe_input_pointer_event_type_button;
557 wpe_event.x = (int) x;
558 wpe_event.y = (int) y;
560 wpe_event.modifiers = wpe_input_pointer_modifier_button1;
561 } else if (button == 2) {
562 wpe_event.modifiers = wpe_input_pointer_modifier_button2;
563 } else if (button == 3) {
564 wpe_event.modifiers = wpe_input_pointer_modifier_button3;
565 } else if (button == 4) {
566 wpe_event.modifiers = wpe_input_pointer_modifier_button4;
567 } else if (button == 5) {
568 wpe_event.modifiers = wpe_input_pointer_modifier_button5;
570 wpe_event.button = button;
572 gst_navigation_event_get_type (event) ==
573 GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS;
574 src->view->dispatchPointerEvent (wpe_event);
578 case GST_NAVIGATION_EVENT_MOUSE_MOVE:
579 if (gst_navigation_event_parse_mouse_move_event (event, &x, &y)) {
580 struct wpe_input_pointer_event wpe_event;
581 wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
582 wpe_event.type = wpe_input_pointer_event_type_motion;
583 wpe_event.x = (int) x;
584 wpe_event.y = (int) y;
585 src->view->dispatchPointerEvent (wpe_event);
589 case GST_NAVIGATION_EVENT_MOUSE_SCROLL:
590 if (gst_navigation_event_parse_mouse_scroll_event (event, &x, &y,
591 &delta_x, &delta_y)) {
592 #if WPE_CHECK_VERSION(1, 6, 0)
593 struct wpe_input_axis_2d_event wpe_event;
595 wpe_event.x_axis = delta_x;
597 wpe_event.y_axis = delta_y;
599 wpe_event.base.time =
600 GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
601 wpe_event.base.type =
602 static_cast < wpe_input_axis_event_type >
603 (wpe_input_axis_event_type_mask_2d |
604 wpe_input_axis_event_type_motion_smooth);
605 wpe_event.base.x = (int) x;
606 wpe_event.base.y = (int) y;
607 src->view->dispatchAxisEvent (wpe_event.base);
609 struct wpe_input_axis_event wpe_event;
612 wpe_event.value = delta_x;
615 wpe_event.value = delta_y;
617 wpe_event.time = GST_TIME_AS_MSECONDS (GST_EVENT_TIMESTAMP (event));
618 wpe_event.type = wpe_input_axis_event_type_motion;
619 wpe_event.x = (int) x;
620 wpe_event.y = (int) y;
621 src->view->dispatchAxisEvent (wpe_event);
629 /* FIXME: No touch events handling support in GstNavigation */
633 ret = gst_pad_event_default (pad, parent, event);
635 gst_event_unref (event);
641 gst_wpe_video_src_init (GstWpeVideoSrc * src)
643 GstPad *pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src), "src");
645 gst_pad_set_event_function (pad, gst_wpe_video_src_event);
646 gst_object_unref (pad);
648 src->draw_background = TRUE;
650 gst_base_src_set_live (GST_BASE_SRC_CAST (src), TRUE);
652 g_mutex_init (&src->lock);
656 gst_wpe_video_src_finalize (GObject * object)
658 GstWpeVideoSrc *src = GST_WPE_VIDEO_SRC (object);
660 g_free (src->location);
661 g_clear_pointer (&src->bytes, g_bytes_unref);
662 g_mutex_clear (&src->lock);
664 G_OBJECT_CLASS (parent_class)->finalize (object);
668 gst_wpe_video_src_class_init (GstWpeVideoSrcClass * klass)
670 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
671 GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
672 GstGLBaseSrcClass *gl_base_src_class = GST_GL_BASE_SRC_CLASS (klass);
673 GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
674 GstPadTemplate *tmpl;
677 gobject_class->set_property = gst_wpe_video_src_set_property;
678 gobject_class->get_property = gst_wpe_video_src_get_property;
679 gobject_class->finalize = gst_wpe_video_src_finalize;
681 g_object_class_install_property (gobject_class, PROP_LOCATION,
682 g_param_spec_string ("location", "location",
683 "The URL to display",
684 "", (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
685 g_object_class_install_property (gobject_class, PROP_DRAW_BACKGROUND,
686 g_param_spec_boolean ("draw-background", "Draws the background",
687 "Whether to draw the WebView background", TRUE,
688 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
690 gst_element_class_set_static_metadata (gstelement_class,
691 "WPE source", "Source/Video",
692 "Creates a video stream from a WPE browser",
693 "Philippe Normand <philn@igalia.com>, Žan Doberšek <zdobersek@igalia.com>");
695 tmpl = gst_static_pad_template_get (&src_factory);
696 gst_element_class_add_pad_template (gstelement_class, tmpl);
698 base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_wpe_video_src_fixate);
699 base_src_class->create = GST_DEBUG_FUNCPTR (gst_wpe_video_src_create);
700 base_src_class->decide_allocation = GST_DEBUG_FUNCPTR (gst_wpe_video_src_decide_allocation);
701 base_src_class->stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_stop);
703 gl_base_src_class->supported_gl_api =
704 static_cast < GstGLAPI >
705 (GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2);
706 gl_base_src_class->gl_start = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_start);
707 gl_base_src_class->gl_stop = GST_DEBUG_FUNCPTR (gst_wpe_video_src_gl_stop);
708 gl_base_src_class->fill_gl_memory =
709 GST_DEBUG_FUNCPTR (gst_wpe_video_src_fill_memory);
711 doc_caps = gst_caps_from_string (WPE_VIDEO_SRC_DOC_CAPS);
712 gst_pad_template_set_documentation_caps (tmpl, doc_caps);
713 gst_clear_caps (&doc_caps);
716 * GstWpeVideoSrc::configure-web-view:
717 * @src: the object which received the signal
718 * @webview: the webView
720 * Allow application to configure the webView settings.
722 gst_wpe_video_src_signals[SIGNAL_CONFIGURE_WEB_VIEW] =
723 g_signal_new ("configure-web-view", G_TYPE_FROM_CLASS (klass),
724 G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_OBJECT);
727 * GstWpeVideoSrc::load-bytes:
728 * @src: the object which received the signal
729 * @bytes: the GBytes data to load
731 * Load the specified bytes into the internal webView.
733 gst_wpe_video_src_signals[SIGNAL_LOAD_BYTES] =
734 g_signal_new_class_handler ("load-bytes", G_TYPE_FROM_CLASS (klass),
735 static_cast < GSignalFlags > (G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION),
736 G_CALLBACK (gst_wpe_video_src_load_bytes), NULL, NULL, NULL,
737 G_TYPE_NONE, 1, G_TYPE_BYTES);