X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=ext%2Fwpe%2Fgstwpesrcbin.cpp;h=2a39a811248283d9db9d04f49d3aa09a368ef34d;hb=f4bc5c6c65f17831302f4b99a56b9dc3b5424cf8;hp=58349eeef64eb70af786bddacb834720b7ac15c1;hpb=cb4f6c877e593d0e2aeea16f5c80b84e31c5927a;p=platform%2Fupstream%2Fgstreamer.git diff --git a/ext/wpe/gstwpesrcbin.cpp b/ext/wpe/gstwpesrcbin.cpp index 58349ee..2a39a81 100644 --- a/ext/wpe/gstwpesrcbin.cpp +++ b/ext/wpe/gstwpesrcbin.cpp @@ -1,5 +1,5 @@ -/* Copyright (C) <2018> Philippe Normand - * Copyright (C) <2018> Žan Doberšek +/* Copyright (C) <2018, 2019> Philippe Normand + * Copyright (C) <2018, 2019> Žan Doberšek * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public @@ -21,20 +21,65 @@ * SECTION:element-wpesrc * @title: wpesrc * - * FIXME The wpesrc element is used to produce a video texture representing a - * web page rendered off-screen by WPE. + * The wpesrc element is used to produce a video texture representing a web page + * rendered off-screen by WPE. * + * Starting from WPEBackend-FDO 1.6.x, software rendering support is available. + * This features allows wpesrc to be used on machines without GPU, and/or for + * testing purpose. To enable it, set the `LIBGL_ALWAYS_SOFTWARE=true` + * environment variable and make sure `video/x-raw, format=BGRA` caps are + * negotiated by the wpesrc element. + * + * Additionally, any audio stream created by WPE is exposed as "sometimes" audio + * source pads. */ #include "gstwpesrcbin.h" #include "gstwpevideosrc.h" +#include "gstwpe-private.h" #include "WPEThreadedView.h" +#include +#include +#include + +G_DEFINE_TYPE (GstWpeAudioPad, gst_wpe_audio_pad, GST_TYPE_GHOST_PAD); + +static void +gst_wpe_audio_pad_class_init (GstWpeAudioPadClass * klass) +{ +} + +static void +gst_wpe_audio_pad_init (GstWpeAudioPad * pad) +{ + gst_audio_info_init (&pad->info); + pad->discont_pending = FALSE; + pad->buffer_time = GST_CLOCK_TIME_NONE; +} + +static GstWpeAudioPad * +gst_wpe_audio_pad_new (const gchar * name) +{ + GstWpeAudioPad *pad = GST_WPE_AUDIO_PAD (g_object_new (gst_wpe_audio_pad_get_type (), + "name", name, "direction", GST_PAD_SRC, NULL)); + + if (!gst_ghost_pad_construct (GST_GHOST_PAD (pad))) { + gst_object_unref (pad); + return NULL; + } + + return pad; +} + struct _GstWpeSrc { GstBin parent; + GstAllocator *fd_allocator; GstElement *video_src; + GHashTable *audio_src_pads; + GstFlowCombiner *flow_combiner; }; enum @@ -54,12 +99,15 @@ static guint gst_wpe_video_src_signals[LAST_SIGNAL] = { 0 }; static void gst_wpe_src_uri_handler_init (gpointer iface, gpointer data); +GST_DEBUG_CATEGORY_EXTERN (wpe_src_debug); +#define GST_CAT_DEFAULT wpe_src_debug + #define gst_wpe_src_parent_class parent_class G_DEFINE_TYPE_WITH_CODE (GstWpeSrc, gst_wpe_src, GST_TYPE_BIN, G_IMPLEMENT_INTERFACE (GST_TYPE_URI_HANDLER, gst_wpe_src_uri_handler_init)); static GstStaticPadTemplate video_src_factory = -GST_STATIC_PAD_TEMPLATE ("video_src", GST_PAD_SRC, GST_PAD_ALWAYS, +GST_STATIC_PAD_TEMPLATE ("video", GST_PAD_SRC, GST_PAD_SOMETIMES, GST_STATIC_CAPS ("video/x-raw(memory:GLMemory), " "format = (string) RGBA, " "width = " GST_VIDEO_SIZE_RANGE ", " @@ -77,6 +125,166 @@ GST_STATIC_PAD_TEMPLATE ("video_src", GST_PAD_SRC, GST_PAD_ALWAYS, #endif )); +static GstStaticPadTemplate audio_src_factory = +GST_STATIC_PAD_TEMPLATE ("audio_%u", GST_PAD_SRC, GST_PAD_SOMETIMES, + GST_STATIC_CAPS ( \ + GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F32)) ", layout=(string)interleaved; " \ + GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (F64)) ", layout=(string)interleaved; " \ + GST_AUDIO_CAPS_MAKE (GST_AUDIO_NE (S16)) ", layout=(string)interleaved" \ +)); + +static GstFlowReturn +gst_wpe_src_chain_buffer (GstPad * pad, GstObject * parent, GstBuffer * buffer) +{ + GstWpeSrc *src = GST_WPE_SRC (gst_object_get_parent (parent)); + GstFlowReturn result, chain_result; + + chain_result = gst_proxy_pad_chain_default (pad, GST_OBJECT_CAST (src), buffer); + result = gst_flow_combiner_update_pad_flow (src->flow_combiner, pad, chain_result); + gst_object_unref (src); + + if (result == GST_FLOW_FLUSHING) + return chain_result; + + return result; +} + +static void +on_audio_receiver_handle_start(void* data, uint32_t id, int32_t channels, const char* format, int32_t sampleRate) +{ + GstWpeSrc* src = GST_WPE_SRC (data); + GstWpeAudioPad *audio_pad; + GstPad *pad; + gchar *name; + GstEvent *stream_start; + GstSegment segment; + GstCaps *caps; + + GST_DEBUG_OBJECT (src, "Exposing audio pad for stream %u", id); + name = g_strdup_printf ("audio_%u", id); + audio_pad = gst_wpe_audio_pad_new (name); + pad = GST_PAD_CAST (audio_pad); + g_free (name); + + gst_pad_set_active (pad, TRUE); + gst_element_add_pad (GST_ELEMENT_CAST (src), pad); + gst_flow_combiner_add_pad (src->flow_combiner, pad); + + name = gst_pad_create_stream_id_printf(pad, GST_ELEMENT_CAST (src), "%03u", id); + stream_start = gst_event_new_stream_start (name); + gst_pad_push_event (pad, stream_start); + g_free (name); + + caps = gst_caps_new_simple ("audio/x-raw", "format", G_TYPE_STRING, format, + "rate", G_TYPE_INT, sampleRate, + "channels", G_TYPE_INT, channels, + "channel-mask", GST_TYPE_BITMASK, gst_audio_channel_get_fallback_mask (channels), + "layout", G_TYPE_STRING, "interleaved", NULL); + gst_audio_info_from_caps (&audio_pad->info, caps); + gst_pad_push_event (pad, gst_event_new_caps (caps)); + gst_caps_unref (caps); + + gst_segment_init (&segment, GST_FORMAT_TIME); + gst_pad_push_event (pad, gst_event_new_segment (&segment)); + + g_hash_table_insert (src->audio_src_pads, GUINT_TO_POINTER (id), audio_pad); +} + +static void +on_audio_receiver_handle_packet(void* data, struct wpe_audio_packet_export* packet_export, uint32_t id, int32_t fd, uint32_t size) +{ + GstWpeSrc* src = GST_WPE_SRC (data); + GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id))); + GstPad *pad = GST_PAD_CAST (audio_pad); + GstBuffer *buffer; + GstClock *clock; + + g_return_if_fail (GST_IS_PAD (pad)); + g_return_if_fail (fd >= 0); + + GST_TRACE_OBJECT (pad, "Handling incoming audio packet"); + buffer = gst_buffer_new (); + + GstMemory *mem = gst_fd_allocator_alloc (src->fd_allocator, dup (fd), size, GST_FD_MEMORY_FLAG_KEEP_MAPPED); + gst_buffer_append_memory (buffer, mem); + gst_buffer_add_audio_meta (buffer, &audio_pad->info, size, NULL); + + clock = gst_element_get_clock (GST_ELEMENT_CAST (src)); + if (clock) { + GstClockTime now; + GstClockTime base_time = gst_element_get_base_time (GST_ELEMENT_CAST (src)); + + now = gst_clock_get_time (clock); + if (now > base_time) + now -= base_time; + else + now = 0; + gst_object_unref (clock); + + audio_pad->buffer_time = now; + GST_BUFFER_DTS (buffer) = audio_pad->buffer_time; + } + + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); + if (audio_pad->discont_pending) { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); + audio_pad->discont_pending = FALSE; + } + + gst_flow_combiner_update_pad_flow (src->flow_combiner, pad, gst_pad_push (pad, buffer)); + wpe_audio_packet_export_release (packet_export); + close (fd); +} + +static void +on_audio_receiver_handle_stop(void* data, uint32_t id) +{ + GstWpeSrc* src = GST_WPE_SRC (data); + GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id))); + GstPad *pad = GST_PAD_CAST (audio_pad); + g_return_if_fail (GST_IS_PAD (pad)); + + GST_INFO_OBJECT(pad, "Stopping"); + gst_pad_push_event (pad, gst_event_new_eos ()); + gst_element_remove_pad (GST_ELEMENT_CAST (src), pad); + gst_flow_combiner_remove_pad (src->flow_combiner, pad); + g_hash_table_remove (src->audio_src_pads, GUINT_TO_POINTER (id)); +} + +static void +on_audio_receiver_handle_pause(void* data, uint32_t id) +{ + GstWpeSrc* src = GST_WPE_SRC (data); + GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id))); + GstPad *pad = GST_PAD_CAST (audio_pad); + g_return_if_fail (GST_IS_PAD (pad)); + + GST_INFO_OBJECT(pad, "Pausing"); + gst_pad_push_event (pad, gst_event_new_gap (audio_pad->buffer_time, GST_CLOCK_TIME_NONE)); + + audio_pad->discont_pending = TRUE; +} + +static void +on_audio_receiver_handle_resume(void* data, uint32_t id) +{ + GstWpeSrc* src = GST_WPE_SRC (data); + GstWpeAudioPad *audio_pad = GST_WPE_AUDIO_PAD (g_hash_table_lookup (src->audio_src_pads, GUINT_TO_POINTER (id))); + GstPad *pad = GST_PAD_CAST (audio_pad); + g_return_if_fail (GST_IS_PAD (pad)); + + GST_INFO_OBJECT(pad, "Resuming"); +} + + +static const struct wpe_audio_receiver audio_receiver = { + .handle_start = on_audio_receiver_handle_start, + .handle_packet = on_audio_receiver_handle_packet, + .handle_stop = on_audio_receiver_handle_stop, + .handle_pause = on_audio_receiver_handle_pause, + .handle_resume = on_audio_receiver_handle_resume +}; + static void gst_wpe_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes) { @@ -87,6 +295,37 @@ gst_wpe_src_load_bytes (GstWpeVideoSrc * src, GBytes * bytes) } static void +gst_wpe_src_set_location (GstWpeSrc * src, const gchar * location) +{ + GstPad *pad; + GstPad *ghost_pad; + GstProxyPad *proxy_pad; + + g_object_set (src->video_src, "location", location, NULL); + + ghost_pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src), "video"); + if (GST_IS_PAD (ghost_pad)) { + gst_object_unref (ghost_pad); + return; + } + + gst_bin_add (GST_BIN_CAST (src), src->video_src); + + pad = gst_element_get_static_pad (GST_ELEMENT_CAST (src->video_src), "src"); + ghost_pad = gst_ghost_pad_new_from_template ("video", pad, + gst_static_pad_template_get (&video_src_factory)); + proxy_pad = gst_proxy_pad_get_internal (GST_PROXY_PAD (ghost_pad)); + gst_pad_set_active (GST_PAD_CAST (proxy_pad), TRUE); + + gst_element_add_pad (GST_ELEMENT_CAST (src), GST_PAD_CAST (ghost_pad)); + gst_flow_combiner_add_pad (src->flow_combiner, GST_PAD_CAST (ghost_pad)); + gst_pad_set_chain_function (GST_PAD_CAST (proxy_pad), gst_wpe_src_chain_buffer); + + gst_object_unref (proxy_pad); + gst_object_unref (pad); +} + +static void gst_wpe_src_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { @@ -102,8 +341,12 @@ gst_wpe_src_set_property (GObject * object, guint prop_id, { GstWpeSrc *self = GST_WPE_SRC (object); - if (self->video_src) - g_object_set_property (G_OBJECT (self->video_src), pspec->name, value); + if (self->video_src) { + if (prop_id == PROP_LOCATION) + gst_wpe_src_set_location (self, g_value_get_string (value)); + else + g_object_set_property (G_OBJECT (self->video_src), pspec->name, value); + } } static GstURIType @@ -134,7 +377,7 @@ gst_wpe_src_set_uri (GstURIHandler * handler, const gchar * uri, { GstWpeSrc *src = GST_WPE_SRC (handler); - g_object_set (src->video_src, "location", uri + 6, NULL); + gst_wpe_src_set_location(src, uri + 6); return TRUE; } @@ -152,23 +395,49 @@ gst_wpe_src_uri_handler_init (gpointer iface_ptr, gpointer data) static void gst_wpe_src_init (GstWpeSrc * src) { + gst_bin_set_suppressed_flags (GST_BIN_CAST (src), + static_cast(GST_ELEMENT_FLAG_SOURCE | GST_ELEMENT_FLAG_SINK)); + GST_OBJECT_FLAG_SET (src, GST_ELEMENT_FLAG_SOURCE); + + src->fd_allocator = gst_fd_allocator_new (); + src->audio_src_pads = g_hash_table_new (g_direct_hash, g_direct_equal); + src->flow_combiner = gst_flow_combiner_new (); src->video_src = gst_element_factory_make ("wpevideosrc", NULL); - gst_bin_add (GST_BIN_CAST (src), src->video_src); + gst_wpe_video_src_register_audio_receiver (src->video_src, &audio_receiver, src); +} - GstPad *pad = - gst_element_get_static_pad (GST_ELEMENT_CAST (src->video_src), "src"); +static GstStateChangeReturn +gst_wpe_src_change_state (GstElement * element, GstStateChange transition) +{ + GstStateChangeReturn result; + GstWpeSrc *src = GST_WPE_SRC (element); + + GST_DEBUG_OBJECT (src, "%s", gst_state_change_get_name (transition)); + result = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS , change_state, (element, transition), GST_STATE_CHANGE_FAILURE); + + switch (transition) { + case GST_STATE_CHANGE_PAUSED_TO_READY:{ + gst_flow_combiner_reset (src->flow_combiner); + break; + } + default: + break; + } + + return result; +} - GstPad *ghost_pad = gst_ghost_pad_new_from_template ("video_src", pad, - gst_static_pad_template_get (&video_src_factory)); - GstProxyPad *proxy_pad = - gst_proxy_pad_get_internal (GST_PROXY_PAD (ghost_pad)); - gst_pad_set_active (GST_PAD_CAST (proxy_pad), TRUE); - gst_object_unref (proxy_pad); +static void +gst_wpe_src_dispose (GObject *object) +{ + GstWpeSrc *src = GST_WPE_SRC (object); - gst_element_add_pad (GST_ELEMENT_CAST (src), GST_PAD_CAST (ghost_pad)); + g_hash_table_unref (src->audio_src_pads); + gst_flow_combiner_free (src->flow_combiner); + gst_object_unref (src->fd_allocator); - gst_object_unref (pad); + GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object)); } static void @@ -179,6 +448,7 @@ gst_wpe_src_class_init (GstWpeSrcClass * klass) gobject_class->set_property = gst_wpe_src_set_property; gobject_class->get_property = gst_wpe_src_get_property; + gobject_class->dispose = gst_wpe_src_dispose; g_object_class_install_property (gobject_class, PROP_LOCATION, g_param_spec_string ("location", "location", "The URL to display", "", @@ -189,7 +459,7 @@ gst_wpe_src_class_init (GstWpeSrcClass * klass) (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); gst_element_class_set_static_metadata (element_class, "WPE source", - "Source/Video", "Creates a video stream from a WPE browser", + "Source/Video/Audio", "Creates a video stream from a WPE browser", "Philippe Normand , Žan Doberšek " ""); @@ -206,10 +476,8 @@ gst_wpe_src_class_init (GstWpeSrcClass * klass) G_CALLBACK (gst_wpe_src_load_bytes), NULL, NULL, NULL, G_TYPE_NONE, 1, G_TYPE_BYTES); - gst_element_class_set_static_metadata (element_class, "WPE source", - "Source/Video", "Creates a video stream from a WPE browser", - "Philippe Normand , Žan Doberšek " - ""); + element_class->change_state = GST_DEBUG_FUNCPTR (gst_wpe_src_change_state); gst_element_class_add_static_pad_template (element_class, &video_src_factory); + gst_element_class_add_static_pad_template (element_class, &audio_src_factory); }