Add API for video source loopback rendering 08/261008/11
authorSangchul Lee <sc11.lee@samsung.com>
Wed, 7 Jul 2021 10:51:19 +0000 (19:51 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Tue, 13 Jul 2021 04:22:35 +0000 (13:22 +0900)
webrtc_media_source_set_video_loopback() is added.

This will be used to render the video source to the particular
display surface before sending the data to the remote peer.

[Version] 0.2.40
[Issue Type] API

Change-Id: Ia6c63fd5da758c35dd337c2ab0a12347a06cd0fc
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
include/webrtc.h
include/webrtc_private.h
packaging/capi-media-webrtc.spec
src/webrtc.c
src/webrtc_display.c
src/webrtc_sink.c
src/webrtc_source.c
test/webrtc_test.c

index df670367ad92ee40160b796a2e4351e764321656..1a4d16bf66b5551a5973aaa0e917639e17a570e9 100644 (file)
@@ -1101,6 +1101,27 @@ int webrtc_set_encoded_video_frame_cb(webrtc_h webrtc, webrtc_encoded_frame_cb c
  */
 int webrtc_unset_encoded_video_frame_cb(webrtc_h webrtc);
 
+/**
+ * @brief Sets a video loopback to render the video frames of the media source.
+ * @details The following media source types are available for this function:\n
+ *          #WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST\n
+ *          #WEBRTC_MEDIA_SOURCE_TYPE_CAMERA\n
+ *          #WEBRTC_MEDIA_SOURCE_TYPE_SCREEN\n
+ *          #WEBRTC_MEDIA_SOURCE_TYPE_FILE
+ * @since_tizen 6.5
+ * @param[in] webrtc      WebRTC handle
+ * @param[in] source_id   The video source id
+ * @param[in] type        The display type
+ * @param[in] display     The display handle
+ * @return @c 0 on success,
+ *         otherwise a negative error value
+ * @retval #WEBRTC_ERROR_NONE    Successful
+ * @retval #WEBRTC_ERROR_INVALID_PARAMETER Invalid parameter
+ * @retval #WEBRTC_ERROR_INVALID_OPERATION Invalid operation
+ * @pre Add media source to @a webrtc to get @a source_id by calling webrtc_add_media_source().
+ */
+int webrtc_media_source_set_video_loopback(webrtc_h webrtc, unsigned source_id, webrtc_display_type_e type, webrtc_display_h display);
+
 /**
  * @}
  */
index 58f863e06f658058f1979af554a2fd340d98f67a..0e705a698f036d86f67872df24debee01c576679 100644 (file)
@@ -207,6 +207,8 @@ do { \
        g_free(dot_name); \
 } while (0)
 
+#define DEFAULT_ELEMENT_FAKESINK        "fakesink"
+#define DEFAULT_ELEMENT_VIDEOCONVERT    "videoconvert"
 #define DEFAULT_VIDEO_SINK_ELEMENT      "tizenwlsink"
 #define DEFAULT_AUDIO_SINK_ELEMENT      "pulsesink"
 
@@ -433,6 +435,14 @@ typedef struct _webrtc_gst_slot_s {
                gulong src_pad_probe_id;
                bool pause;
                unsigned int payload_id;
+               struct {
+                       bool need_decoding;
+                       GstPad *src_pad;
+                       gulong src_pad_probe_id;
+                       GstElement *pipeline;
+                       GstElement *appsrc;
+                       GstCaps *appsrc_caps;
+               } render;
        } av[AV_IDX_MAX];
        struct {
                int width;
@@ -553,6 +563,8 @@ int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad);
 int _add_forwarding_sink_bin(webrtc_s *webrtc, GstPad *src_pad, bool is_video);
 int _set_stream_info_to_sink(webrtc_s *webrtc, unsigned int track_id, sound_stream_info_h stream_info);
 int _set_display_to_sink(webrtc_s *webrtc, unsigned int track_id, unsigned int type, void *display);
+int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display);
+int _decodebin_autoplug_select_cb(GstElement *decodebin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, gpointer user_data);
 bool _is_owner_of_track_build_context(webrtc_s *webrtc, unsigned int track_id);
 void _track_build_context_destroy_cb(gpointer data);
 void _sink_slot_destroy_cb(gpointer data);
index 8ecc855f6587d4ad0be9c8a8b614d7bf13ee7b6a..f105a60e6994477bb1b92651496d71cf70c994cd 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.2.39
+Version:    0.2.40
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
index 22a91aed41e2aa65c276272ee95bbdb7efb3554e..0384723bbd4046d0338d635b92bba0a4496fe5d1 100644 (file)
@@ -733,6 +733,25 @@ int webrtc_unset_encoded_video_frame_cb(webrtc_h webrtc)
        return WEBRTC_ERROR_NONE;
 }
 
+int webrtc_media_source_set_video_loopback(webrtc_h webrtc, unsigned source_id, webrtc_display_type_e type, webrtc_display_h display)
+{
+       int ret = WEBRTC_ERROR_NONE;
+       webrtc_s *_webrtc = (webrtc_s*)webrtc;
+
+       RET_VAL_IF(_webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+       RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+       RET_VAL_IF(type > WEBRTC_DISPLAY_TYPE_EVAS, WEBRTC_ERROR_INVALID_PARAMETER, "invalid display type(%d)", type);
+       RET_VAL_IF(display == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "display is NULL");
+
+       g_mutex_lock(&_webrtc->mutex);
+
+       ret = _set_video_loopback(webrtc, source_id, (unsigned int)type, (void *)display);
+
+       g_mutex_unlock(&_webrtc->mutex);
+
+       return ret;
+}
+
 int webrtc_set_stun_server(webrtc_h webrtc, const char *stun_server)
 {
        webrtc_s *_webrtc = (webrtc_s*)webrtc;
index d5cb07f4ef817047abcbf0cc0006a52bd6ee96a2..6ce206151c954dbccf5c3390a046385d6dbddb46 100644 (file)
@@ -350,7 +350,7 @@ static bool __swcodec_set_bo(webrtc_display_s *display, video_decoded_data_info_
        RET_VAL_IF(display == NULL, false, "display is NULL");
        RET_VAL_IF(display->tbm == NULL, false, "display->tbm is NULL");
 
-       is_mapped = gst_memory_map(mem, &mapinfo, GST_MAP_READWRITE);
+       is_mapped = gst_memory_map(mem, &mapinfo, GST_MAP_READ);
        RET_VAL_IF(is_mapped == FALSE, false, "is_mapped is FALSE");
 
        if (mapinfo.data == NULL) {
index caed09bd93d91c366fe57cc0da96e6a47d843a8b..584dcb4c6e2dbdda18b09d6f1d5003ad607973bd 100644 (file)
 #define GST_KLASS_NAME_DEPAYLOADER_RTP "Codec/Depayloader/Network/RTP"
 
 #define DEFAULT_ELEMENT_CAPSFILTER    "capsfilter"
-#define DEFAULT_ELEMENT_VIDEOCONVERT  "videoconvert"
 #define DEFAULT_ELEMENT_AUDIOCONVERT  "audioconvert"
 #define DEFAULT_ELEMENT_AUDIORESAMPLE "audioresample"
-#define DEFAULT_ELEMENT_FAKESINK      "fakesink"
 
 bool _is_owner_of_track_build_context(webrtc_s *webrtc, unsigned int track_id)
 {
@@ -360,7 +358,7 @@ static bool __is_factory_name_for_hw(gchar *factory_name)
        return false;
 }
 
-static int __decodebin_autoplug_select_cb(GstElement *decodebin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, gpointer user_data)
+int _decodebin_autoplug_select_cb(GstElement *decodebin, GstPad *pad, GstCaps *caps, GstElementFactory *factory, gpointer user_data)
 {
        /* NOTE : Similar enum is defined with GstAutoplugSelectResult in gstplay-enum.h but not exposed */
        enum {
@@ -480,7 +478,7 @@ int _add_rendering_sink_bin(webrtc_s *webrtc, GstPad *src_pad)
        gst_bin_add(sink->bin, decodebin);
 
        g_signal_connect(decodebin, "pad-added", G_CALLBACK(__decodebin_pad_added_cb), webrtc);
-       g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(__decodebin_autoplug_select_cb), webrtc);
+       g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(_decodebin_autoplug_select_cb), webrtc);
        g_signal_connect(decodebin, "element-added", G_CALLBACK(__decodebin_element_added_cb), NULL);
 
        ret = _add_no_target_ghostpad_to_slot(sink, false, &sink_pad);
index e2c2ad6b6eddc38285b6ba558a6240b17946a707..0534de2774980f2fd379f4c16818585f3874ba03 100644 (file)
@@ -701,6 +701,81 @@ static void __return_payload_id(webrtc_s *webrtc, unsigned int payload_id)
        webrtc->payload_ids ^= bitmask;
 }
 
+static GstPadProbeReturn __source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+       probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+       GstBuffer *buffer;
+       GstElement *appsrc;
+       GstFlowReturn gst_ret = GST_FLOW_OK;
+
+       RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
+       RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
+       RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+       RET_VAL_IF(probe_data->source == NULL, GST_PAD_PROBE_REMOVE, "probe_data->source is NULL");
+
+       switch (probe_data->av_idx) {
+       case AV_IDX_AUDIO:
+               /* TODO: implementation */
+               break;
+       case AV_IDX_VIDEO:
+               if (!probe_data->source->display)
+                       return GST_PAD_PROBE_OK;
+               appsrc = probe_data->source->av[probe_data->av_idx].render.appsrc;
+               if (appsrc) {
+                       buffer = gst_pad_probe_info_get_buffer(info);
+                       LOG_DEBUG("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
+                       g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+                       if (gst_ret != GST_FLOW_OK)
+                               LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
+               }
+               break;
+       default:
+               break;
+       }
+
+       return GST_PAD_PROBE_OK;
+}
+
+static void __add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+{
+       probe_userdata_s *probe_userdata;
+
+       RET_IF(source == NULL, "source is NULL");
+       RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+       RET_IF(pad == NULL, "pad is NULL");
+       RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+       probe_userdata = g_new0(probe_userdata_s, 1);
+       probe_userdata->source = source;
+       probe_userdata->av_idx = idx;
+       source->av[idx].render.src_pad = pad;
+       source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+                       probe_cb, probe_userdata, g_free);
+
+       LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
+               source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb);
+}
+
+static void __remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx)
+{
+       RET_IF(source == NULL, "source is NULL");
+       RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+
+       if (source->av[idx].render.src_pad_probe_id == 0)
+               return;
+
+       LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe_id[%lu]",
+               source->id, idx, source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+       gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+       source->av[idx].render.src_pad_probe_id = 0;
+       gst_object_unref(source->av[idx].render.src_pad);
+       source->av[idx].render.src_pad = NULL;
+       if (source->av[idx].render.appsrc_caps) {
+               gst_caps_unref(source->av[idx].render.appsrc_caps);
+               source->av[idx].render.appsrc_caps = NULL;
+       }
+}
+
 static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list)
 {
        GstElement *capsfilter = NULL;
@@ -714,6 +789,7 @@ static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source
        gchar *media_type = NULL;
        gchar *encoder_name = NULL;
        unsigned int payload_id;
+       int idx;
 
        RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
        RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
@@ -727,6 +803,7 @@ static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source
                LOG_ERROR("not ready for this media_types[0x%x]", source->media_types);
                return WEBRTC_ERROR_INVALID_OPERATION;
        }
+       idx = (source->media_types == MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
 
        if (need_capsfilter) {
                if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_FIRST_CAPSFILTER)))
@@ -739,6 +816,10 @@ static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source
                                g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
                                gst_caps_unref(sink_caps);
                        }
+
+                       source->av[idx].render.need_decoding = true;
+                       __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+
                        goto skip_encoder;
                }
 
@@ -756,8 +837,10 @@ static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source
                                                        __make_default_raw_caps(source, &webrtc->ini),
                                                        __make_default_encoded_caps(source, &webrtc->ini, NULL),
                                                        encoder);
-       if (encoder == NULL)
+       if (encoder == NULL) {
+               __remove_probe_from_pad_for_render(source, idx);
                return WEBRTC_ERROR_INVALID_OPERATION;
+       }
        APPEND_ELEMENT(*element_list, encoder);
 
        encoder_name = gst_element_get_name(encoder);
@@ -773,35 +856,30 @@ static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source
        }
        g_free(encoder_name);
 
+       source->av[idx].render.need_decoding = false;
+       __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+
 skip_encoder:
        CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
                                                __make_default_encoded_caps(source, &webrtc->ini, &media_type),
                                                NULL,
                                                payloader);
-       if (payloader == NULL) {
-               g_free(media_type);
-               return WEBRTC_ERROR_INVALID_OPERATION;
-       }
+       if (payloader == NULL)
+               goto error;
        APPEND_ELEMENT(*element_list, payloader);
 
-       if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL))) {
-               g_free(media_type);
-               return WEBRTC_ERROR_INVALID_OPERATION;
-       }
+       if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL)))
+               goto error;
        APPEND_ELEMENT(*element_list, queue);
 
-       if (!(capsfilter2 = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER))) {
-               g_free(media_type);
-               return WEBRTC_ERROR_INVALID_OPERATION;
-       }
+       if (!(capsfilter2 = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER)))
+               goto error;
        APPEND_ELEMENT(*element_list, capsfilter2);
 
-       payload_id = __get_available_payload_id(webrtc);
-       if (payload_id == 0) {
-               g_free(media_type);
-               return WEBRTC_ERROR_INVALID_OPERATION;
-       }
-       source->av[source->media_types == MEDIA_TYPE_VIDEO ? AV_IDX_VIDEO : AV_IDX_AUDIO].payload_id = payload_id;
+       if ((payload_id = __get_available_payload_id(webrtc)) == 0)
+               goto error;
+
+       source->av[idx].payload_id = payload_id;
 
        if ((sink_caps = __make_rtp_caps(media_type, payload_id))) {
                g_object_set(G_OBJECT(capsfilter2), "caps", sink_caps, NULL);
@@ -810,7 +888,17 @@ skip_encoder:
 
        g_free(media_type);
 
+       g_object_get(G_OBJECT(capsfilter), "caps", &sink_caps, NULL);
+       if (sink_caps)
+               source->av[idx].render.appsrc_caps = sink_caps;
+
        return WEBRTC_ERROR_NONE;
+
+error:
+       __remove_probe_from_pad_for_render(source, idx);
+       g_free(media_type);
+
+       return WEBRTC_ERROR_INVALID_OPERATION;
 }
 
 static int __create_rest_of_elements_for_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source,
@@ -933,7 +1021,7 @@ static GstPadProbeReturn __payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo
        return GST_PAD_PROBE_OK;
 }
 
-static void __add_probe_to_pad(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+static void __add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
 {
        probe_userdata_s *probe_userdata;
 
@@ -953,7 +1041,7 @@ static void __add_probe_to_pad(webrtc_gst_slot_s *source, unsigned int idx, GstP
                source->id, idx, pad, source->av[idx].src_pad_probe_id, probe_cb);
 }
 
-static void __remove_probe_from_pad(webrtc_gst_slot_s *source, unsigned int idx)
+static void __remove_probe_from_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx)
 {
        RET_IF(source == NULL, "source is NULL");
        RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
@@ -1161,7 +1249,7 @@ static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
        if (ret != WEBRTC_ERROR_NONE)
                goto exit_with_remove_from_bin;
 
-       __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+       __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
 
        SAFE_G_LIST_FREE(switch_src_list);
        SAFE_G_LIST_FREE(element_list);
@@ -1234,7 +1322,7 @@ static int __build_camerasrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
        if (ret != WEBRTC_ERROR_NONE)
                goto exit_with_remove_from_bin;
 
-       __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+       __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
 
        SAFE_G_LIST_FREE(element_list);
 
@@ -1302,7 +1390,7 @@ static int __build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool us
        if (ret != WEBRTC_ERROR_NONE)
                goto exit_with_remove_from_bin;
 
-       __add_probe_to_pad(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
+       __add_probe_to_pad_for_pause(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
 
        SAFE_G_LIST_FREE(element_list);
 
@@ -1362,7 +1450,7 @@ static int __build_videotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
        if (ret != WEBRTC_ERROR_NONE)
                goto exit_with_remove_from_bin;
 
-       __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+       __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
 
        SAFE_G_LIST_FREE(element_list);
 
@@ -1420,7 +1508,7 @@ static int __build_custom_videosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
        if (ret != WEBRTC_ERROR_NONE)
                goto exit_with_remove_from_bin;
 
-       __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+       __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
 
        SAFE_G_LIST_FREE(element_list);
 
@@ -1485,7 +1573,7 @@ static int __build_custom_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
        if (ret != WEBRTC_ERROR_NONE)
                goto exit_with_remove_from_bin;
 
-       __add_probe_to_pad(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
+       __add_probe_to_pad_for_pause(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
 
        SAFE_G_LIST_FREE(element_list);
 
@@ -1783,10 +1871,16 @@ void _source_slot_destroy_cb(gpointer data)
        gst_element_foreach_src_pad(GST_ELEMENT(source->bin), __foreach_src_pad_cb, source);
 
        for (i = 0; i < AV_IDX_MAX; i++) {
-               __remove_probe_from_pad(source, i);
+               __remove_probe_from_pad_for_pause(source, i);
+               __remove_probe_from_pad_for_render(source, i);
 
                if (source->av[i].payload_id > 0)
                        __return_payload_id(source->webrtc, source->av[i].payload_id);
+
+               if (source->av[i].render.pipeline) {
+                       gst_element_set_state(source->av[i].render.pipeline, GST_STATE_NULL);
+                       SAFE_GST_OBJECT_UNREF(source->av[i].render.pipeline);
+               }
        }
 
        gst_bin_remove(GST_BIN(gst_element_get_parent(source->bin)), GST_ELEMENT(source->bin));
@@ -1797,6 +1891,9 @@ void _source_slot_destroy_cb(gpointer data)
        if (source->allocator)
                gst_object_unref(source->allocator);
 
+       if (source->display)
+               _release_display(source->display);
+
        g_free(source);
 }
 
@@ -2842,3 +2939,218 @@ int _get_audio_mute(webrtc_s *webrtc, unsigned int source_id, bool *muted)
 
        return WEBRTC_ERROR_NONE;
 }
+
+static int __build_loopback_videosink(webrtc_gst_slot_s *source, GstElement *link_with)
+{
+       webrtc_s *webrtc;
+       const char *videosink_factory_name;
+       GstElement *videosink;
+       GstElement *videoconvert;
+       int ret = WEBRTC_ERROR_NONE;
+
+       RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+       RET_VAL_IF(link_with == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "link_with is NULL");
+       RET_VAL_IF(source->webrtc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "webrtc is NULL");
+       RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
+       RET_VAL_IF(source->display->object == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->object is NULL");
+
+       webrtc = source->webrtc;
+
+       switch (source->display->type) {
+       case WEBRTC_DISPLAY_TYPE_OVERLAY:
+       case WEBRTC_DISPLAY_TYPE_ECORE_WL:
+               videosink_factory_name = webrtc->ini.rendering_sink.v_sink_element;
+               break;
+       case WEBRTC_DISPLAY_TYPE_EVAS:
+               videosink_factory_name = DEFAULT_ELEMENT_FAKESINK;
+               break;
+       default:
+               LOG_ERROR_IF_REACHED("invalid display type(%d)", source->display->type);
+               return WEBRTC_ERROR_INVALID_OPERATION;
+       }
+
+       if (!(videosink = _create_element(videosink_factory_name, NULL)))
+               return WEBRTC_ERROR_INVALID_OPERATION;
+
+       if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL))) {
+               SAFE_GST_OBJECT_UNREF(videosink);
+               return WEBRTC_ERROR_INVALID_OPERATION;
+       }
+
+#ifndef TIZEN_TV
+       if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY && webrtc->ini.resource_acquisition.video_overlay) {
+               if ((ret = _acquire_resource_for_type(webrtc, MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY))) {
+                       SAFE_GST_OBJECT_UNREF(videosink);
+                       SAFE_GST_OBJECT_UNREF(videoconvert);
+                       return ret;
+               }
+       }
+#endif
+       if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY ||
+               source->display->type == WEBRTC_DISPLAY_TYPE_ECORE_WL) {
+               gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(videosink), source->display->overlay_surface_id);
+
+       } else if (source->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
+               g_object_set(videosink, "signal-handoffs", TRUE, NULL);
+               _connect_and_append_signal(&source->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), source->display);
+       }
+
+       gst_bin_add_many(GST_BIN(source->av[AV_IDX_VIDEO].render.pipeline), videoconvert, videosink, NULL);
+
+       if (!gst_element_sync_state_with_parent(videoconvert)) {
+               LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(videoconvert));
+               goto error;
+       }
+
+       if (!gst_element_sync_state_with_parent(videosink)) {
+               LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(videosink));
+               goto error;
+       }
+
+       if (!gst_element_link_many(link_with, videoconvert, videosink, NULL)) {
+               LOG_ERROR("failed to gst_element_link_many()");
+               goto error;
+       }
+
+       return WEBRTC_ERROR_NONE;
+
+error:
+       SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
+       return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new_pad, gpointer user_data)
+{
+       int ret = WEBRTC_ERROR_NONE;
+       webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
+       const gchar *media_type;
+
+       RET_IF(source == NULL, "source is NULL");
+
+       if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC)
+               return;
+
+       media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(new_pad), 0));
+       LOG_INFO("source_id[%u], media_type[%s], new_pad[%s]", source->id, media_type, GST_PAD_NAME(new_pad));
+
+       if (g_strrstr(media_type, "video")) {
+               ret = __build_loopback_videosink(source, decodebin);
+
+       } else if (g_strrstr(media_type, "audio")) {
+               /* TODO : Implementation */
+
+       } else {
+               LOG_ERROR("not supported media type[%s]", media_type);
+               return;
+       }
+
+       if (ret != WEBRTC_ERROR_NONE) {
+               LOG_ERROR("failed to build loopback rendering sink, ret[0x%x]", ret);
+               _post_error_cb_in_idle(source->webrtc, ret);
+       }
+}
+
+static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source, media_type_e type)
+{
+       int idx = (type == MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
+       GstElement *appsrc;
+       gchar *pipeline_name;
+
+       RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+       RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+       RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
+       RET_VAL_IF(source->display->object == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->object is NULL");
+
+       pipeline_name = g_strdup_printf("webrtc-source_%u-%s-render-pipeline", source->id, type == MEDIA_TYPE_AUDIO ? "audio" : "video");
+       source->av[idx].render.pipeline = gst_pipeline_new(pipeline_name);
+       g_free(pipeline_name);
+       RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");
+
+       appsrc = _create_element("appsrc", NULL);
+       if (!appsrc)
+               goto error;
+
+       g_object_set(G_OBJECT(appsrc),
+               "is-live", TRUE,
+               "format", GST_FORMAT_TIME,
+               "caps", source->av[idx].render.appsrc_caps,
+               NULL);
+
+       if (source->av[idx].render.need_decoding) {
+               GstElement *decodebin = _create_element("decodebin", NULL);
+               if (!decodebin) {
+                       SAFE_GST_OBJECT_UNREF(appsrc);
+                       goto error;
+               }
+
+               gst_bin_add_many(GST_BIN(source->av[idx].render.pipeline), appsrc, decodebin, NULL);
+               if (!gst_element_link(appsrc, decodebin)) {
+                       LOG_ERROR("failed to gst_element_link()");
+                       goto error;
+               }
+
+               g_signal_connect(decodebin, "pad-added", G_CALLBACK(__loopback_decodebin_pad_added_cb), source);
+               g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(_decodebin_autoplug_select_cb), webrtc);
+
+       } else {
+               gst_bin_add(GST_BIN(source->av[idx].render.pipeline), appsrc);
+               if (__build_loopback_videosink(source, appsrc) != WEBRTC_ERROR_NONE) {
+                       SAFE_GST_OBJECT_UNREF(appsrc);
+                       goto error;
+               }
+       }
+
+       source->av[idx].render.appsrc = appsrc;
+
+       LOG_INFO("source_id[%u] pipeline[%p, %s] appsrc[%p]", source->id, source->av[idx].render.pipeline,
+               GST_ELEMENT_NAME(source->av[idx].render.pipeline), source->av[idx].render.appsrc);
+
+       gst_element_set_state(source->av[idx].render.pipeline, GST_STATE_PLAYING);
+
+       return WEBRTC_ERROR_NONE;
+
+error:
+       SAFE_GST_OBJECT_UNREF(source->av[idx].render.pipeline);
+       return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display)
+{
+       int ret = WEBRTC_ERROR_NONE;
+       webrtc_gst_slot_s *source;
+
+       RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+       RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+       RET_VAL_IF(display == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "display is NULL");
+       RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
+               WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+       RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
+               "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+       RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
+               "this API does not support the media packet source");
+
+       if (source->display == NULL) {
+               source->display = _alloc_display();
+               RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "source->display is NULL");
+       }
+
+       LOG_INFO("source_id[%u] type[%d] display[%p]", source_id, type, display);
+
+       source->display->type = type;
+       source->display->object = display;
+
+       ret = _apply_display(source->display);
+       if (ret != WEBRTC_ERROR_NONE)
+               goto error;
+
+       ret = __build_loopback_render_pipeline(webrtc, source, MEDIA_TYPE_VIDEO);
+       if (ret != WEBRTC_ERROR_NONE)
+               goto error;
+
+       return WEBRTC_ERROR_NONE;
+
+error:
+       _release_display(source->display);
+       source->display = NULL;
+       return ret;
+}
index 1c48c895226214a64834c6427ce82ca2130d292f..456b49e038abed8b53647c8b38be584a04cefbc8 100644 (file)
@@ -72,6 +72,7 @@ enum {
        CURRENT_STATUS_MEDIA_PACKET_SOURCE_UNSET_BUFFER_STATE_CHANGED_CB,
        CURRENT_STATUS_MEDIA_PACKET_SOURCE_SET_FORMAT,
        CURRENT_STATUS_SET_DISPLAY_TYPE,
+       CURRENT_STATUS_MEDIA_SOURCE_SET_VIDEO_LOOPBACK,
        CURRENT_STATUS_DATA_CHANNEL_SEND_STRING,
        CURRENT_STATUS_DATA_CHANNEL_SEND_STRING_AS_BYTES,
        CURRENT_STATUS_DATA_CHANNEL_SEND_FILE,
@@ -832,6 +833,16 @@ static void _webrtc_set_display_type(int index, int type)
        g_print("display type[%d] is set, it'll be applied when starting rendering video.\n", type);
 }
 
+static void _webrtc_media_source_set_video_loopback(int index, unsigned int source_id)
+{
+       int ret = WEBRTC_ERROR_NONE;
+
+       ret = webrtc_media_source_set_video_loopback(g_conns[index].webrtc, source_id, WEBRTC_DISPLAY_TYPE_EVAS, g_eo_mine);
+       RET_IF(ret != WEBRTC_ERROR_NONE, "ret[0x%x]", ret);
+
+       g_print("webrtc_media_source_set_video_loopback() success, source_id[%u]\n", source_id);
+}
+
 static int __copy_string_arr(gchar *dest_arr, char *string)
 {
        int len = 0;
@@ -3085,6 +3096,9 @@ void _interpret_main_menu(char *cmd)
                } else if (strncmp(cmd, "dt", 2) == 0) {
                        g_conns[g_conn_index].menu_state = CURRENT_STATUS_SET_DISPLAY_TYPE;
 
+               } else if (strncmp(cmd, "vl", 2) == 0) {
+                       g_conns[g_conn_index].menu_state = CURRENT_STATUS_MEDIA_SOURCE_SET_VIDEO_LOOPBACK;
+
                } else if (strncmp(cmd, "cd", 2) == 0) {
                        _webrtc_create_data_channel(g_conn_index);
 
@@ -3339,6 +3353,7 @@ void display_sub_basic()
        g_print("gd. Get transceiver direction\n");
        g_print("sf. Set media format to media packet source\n");
        g_print("dt. Set display type\n");
+       g_print("vl. Set video loopback\n");
        g_print("cd. Create data channel\t");
        g_print("dd. Destroy data channel\n");
        g_print("zs. Send string via data channel\n");
@@ -3464,6 +3479,9 @@ static void displaymenu()
        } else if (g_conns[g_conn_index].menu_state == CURRENT_STATUS_SET_DISPLAY_TYPE) {
                g_print("*** input display type.(1:overlay, 2:evas)\n");
 
+       } else if (g_conns[g_conn_index].menu_state == CURRENT_STATUS_MEDIA_SOURCE_SET_VIDEO_LOOPBACK) {
+               g_print("*** input source id.\n");
+
        } else if (g_conns[g_conn_index].menu_state == CURRENT_STATUS_DATA_CHANNEL_SEND_STRING) {
                g_print("*** input string to send.\n");
 
@@ -3720,6 +3738,12 @@ static void interpret(char *cmd)
                reset_menu_state();
                break;
        }
+       case CURRENT_STATUS_MEDIA_SOURCE_SET_VIDEO_LOOPBACK: {
+               value = atoi(cmd);
+               _webrtc_media_source_set_video_loopback(g_conn_index, value);
+               reset_menu_state();
+               break;
+       }
        case CURRENT_STATUS_DATA_CHANNEL_SEND_STRING: {
                _webrtc_data_channel_send_string(g_conn_index, cmd);
                reset_menu_state();