WebRTC node is added.
- MEDIA_STREAMER_NODE_TYPE_WEBRTC
New parameter is added for WebRTC node to determine
this peer is for offer or answer.
- MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE
Handling SDP messages regarding WebRTC node are added.
media_streamer_gst_webrtc.c[h] are added.
[Version] 0.1.51
[Issue Type] New feature
Change-Id: Ic63e1993f75bb598db6e7bc85e861b6f35678e69
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
MEDIA_STREAMER_NODE_TYPE_INPUT_SELECTOR, /**< N-to-1 input stream selector */
MEDIA_STREAMER_NODE_TYPE_OUTPUT_SELECTOR, /**< 1-to-N output stream selector */
MEDIA_STREAMER_NODE_TYPE_INTERLEAVE, /**< Folds many mono channel into one interleaved audio stream */
- MEDIA_STREAMER_NODE_TYPE_DEINTERLEAVE /**< Splits multi channel audio into many mono audio */
+ MEDIA_STREAMER_NODE_TYPE_DEINTERLEAVE, /**< Splits multi channel audio into many mono audio */
+ MEDIA_STREAMER_NODE_TYPE_WEBRTC /**< WebRTC component to send and receive data */
} media_streamer_node_type_e;
/**
#define MEDIA_STREAMER_PARAM_IP_ADDRESS "address"
/**
+ * @brief Definition for the peer type of WebRTC node.
+ * @details The type of WebRTC peer, it can be "answer" or "offer".
+ * Data type is string and default value is "answer".
+ * @since_tizen 6.0
+ * @see media_streamer_node_get_params()
+ */
+#define MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE "webrtc-peer-type"
+
+/**
* @brief Definition for audio device name parameter of source or sink node.
* @details ALSA device, as defined in an asound configuration file.
* ex) "hw:0,0", "hw:0,1"
GstElement *ms_node_element_create(node_plug_s *plug_info, media_streamer_node_type_e type);
/**
- * @brief Creates rtp container GstElement.
+ * @brief Creates RTP container GstElement.
*/
GstElement *ms_rtp_element_create(void);
GstElement *ms_find_element_in_bin_by_type(GstElement *bin, node_info_s *node_klass_type);
+int ms_add_no_target_ghostpad(GstElement *gst_bin, const char *ghost_pad_name, GstPadDirection pad_direction);
+
void ms_rtpbin_pad_added_cb(GstElement *element, GstPad *new_pad, gpointer user_data);
void ms_demux_pad_added_cb(GstElement *element, GstPad *new_pad, gpointer user_data);
--- /dev/null
+/*
+ * Copyright (c) 2020 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __TIZEN_MEDIA_STREAMER_GST_WEBRTC_H__
+#define __TIZEN_MEDIA_STREAMER_GST_WEBRTC_H__
+
+#include <gst/gst.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @brief Creates WebRTC container GstElement.
+ */
+GstElement *ms_webrtc_element_create(void);
+
+void ms_webrtcbin_pad_added_cb(GstElement *src, GstPad *new_pad, gpointer user_data);
+
+void ms_webrtcbin_on_data_channel_cb(GstElement *webrtcbin, GObject *data_channel, gpointer user_data);
+
+void ms_webrtcbin_notify_ice_gathering_state_cb(GstElement *webrtcbin, GParamSpec *pspec, gpointer user_data);
+
+void ms_webrtcbin_on_ice_candidate_cb(GstElement *webrtcbin, guint mlineindex, gchar *candidate, gpointer user_data);
+
+void ms_webrtcbin_on_negotiation_needed_cb(GstElement *webrtcbin, gpointer user_data);
+
+void ms_webrtcbin_on_negotiation_process_answer(GstElement *webrtcbin, media_streamer_node_s *webrtc_node);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __TIZEN_MEDIA_STREAMER_GST_WEBRTC_H__ */
int ms_adaptive_sink_node_prepare(media_streamer_s *ms_streamer, media_streamer_node_s *node);
/**
- * @brief Prepares rtp node.
+ * @brief Prepares RTP node.
*/
int ms_rtp_node_prepare(media_streamer_node_s *node);
/**
+ * @brief Prepares WebRTC node.
+ */
+int ms_webrtc_node_prepare(media_streamer_s *ms_streamer, media_streamer_node_s *node);
+
+/**
* @brief Prepares demux node.
*/
int ms_demux_node_prepare(media_streamer_s *ms_streamer, media_streamer_node_s *node);
#include <dlog.h>
#include <iniparser.h>
#include <Evas.h>
+#include <json-glib/json-glib.h>
#ifdef __cplusplus
extern "C" {
#define DEFAULT_ADAPTIVE_SOURCE "hlsdemux"
#define DEFAULT_ADAPTIVE_SINK "hlssink"
+/* WebRTC */
+#define WEBRTC_PEER_ANSWER "answer"
+#define WEBRTC_PEER_OFFER "offer"
+#define DEFAULT_WEBRTC_PEER WEBRTC_PEER_ANSWER
+
#define MEDIA_STREAMER_DEFAULT_CAMERA_FORMAT "video/x-raw,format=I420,width=352,height=288"
#define MEDIA_STREAMER_DEFAULT_AUDIO_RAW_FORMAT "audio/x-raw,channels=1,rate=8000,format=S16LE"
#define MEDIA_STREAMER_DEFAULT_VIDEO_FORMAT "video/x-h263,width=352,height=288,framerate = 3/1"
#define MEDIA_STREAMER_DEFAULT_VIDEO_RTP_FORMAT "application/x-rtp,media=video"
#define MEDIA_STREAMER_DEFAULT_AUDIO_RTP_FORMAT "application/x-rtp,media=(string)audio,clock-rate=(int)8000,encoding-name=(string)AMR,encoding-params=(string)1,octet-align=(string)1"
-
-
#define MS_ELEMENT_IS_OUTPUT(el) g_strrstr(el, "out")
#define MS_ELEMENT_IS_INPUT(el) g_strrstr(el, "in")
#define MS_ELEMENT_IS_AUDIO(el) g_strrstr(el, "audio")
*/
int ms_util_uri_path_check(const char *file_uri);
+gchar* ms_get_string_from_json_object(JsonObject *object);
+
/**
* @brief Iterates func over all elements contained within a bin.
*/
Name: capi-media-streamer
Summary: A Media Streamer API
-Version: 0.1.50
+Version: 0.1.51
Release: 0
Group: Multimedia/API
License: Apache-2.0
#include <bundle.h>
#include "media_streamer_gst.h"
+#include "media_streamer_gst_webrtc.h"
#include "media_streamer_node.h"
#define MS_PADS_UNLINK(pad, peer) GST_PAD_IS_SRC(pad) ? \
MS_SAFE_GFREE(dot_name);
}
-static int __ms_add_no_target_ghostpad(GstElement *gst_bin, const char *ghost_pad_name, GstPadDirection pad_direction)
+int ms_add_no_target_ghostpad(GstElement *gst_bin, const char *ghost_pad_name, GstPadDirection pad_direction)
{
int ret = MEDIA_STREAMER_ERROR_NONE;
gchar *bin_name = NULL;
void ms_demux_pad_added_cb(GstElement *element, GstPad *new_pad, gpointer user_data)
{
- media_streamer_s *ms_streamer = (media_streamer_s *) user_data;
+ media_streamer_s *ms_streamer = (media_streamer_s *)user_data;
ms_debug_fenter();
adaptive_bin = gst_bin_new("adaptive_src");
ms_retvm_if(!adaptive_bin, (GstElement *) NULL, "Error: creating elements for adaptive source");
- __ms_add_no_target_ghostpad(adaptive_bin, "src", GST_PAD_SRC);
+ ms_add_no_target_ghostpad(adaptive_bin, "src", GST_PAD_SRC);
/* Add adaptive node parameters as GObject data with destroy function */
MS_SET_INT_STATIC_STRING_PARAM(adaptive_bin, MEDIA_STREAMER_PARAM_URI, "http://localhost");
else if (g_strrstr(MEDIA_STREAMER_STRICT, plug_info->info->klass_name) || (!src_type && !sink_type)) {
if (type == MEDIA_STREAMER_NODE_TYPE_RTP)
gst_element = ms_rtp_element_create();
+ else if (type == MEDIA_STREAMER_NODE_TYPE_WEBRTC)
+ gst_element = ms_webrtc_element_create();
else
gst_element = ms_element_create(plug_info->info->default_name, NULL);
} else {
rtp_container = gst_bin_new("rtp_container");
ms_retvm_if(!rtp_container, (GstElement *) NULL, "Error: creating elements for rtp container");
- __ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_VIDEO_OUT, GST_PAD_SRC);
- __ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_AUDIO_OUT, GST_PAD_SRC);
- __ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_VIDEO_IN, GST_PAD_SINK);
- __ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_AUDIO_IN, GST_PAD_SINK);
+ ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_VIDEO_OUT, GST_PAD_SRC);
+ ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_AUDIO_OUT, GST_PAD_SRC);
+ ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_VIDEO_IN, GST_PAD_SINK);
+ ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_AUDIO_IN, GST_PAD_SINK);
- __ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_VIDEO_IN"_rtp", GST_PAD_SINK);
- __ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_AUDIO_IN"_rtp", GST_PAD_SINK);
+ ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_VIDEO_IN"_rtp", GST_PAD_SINK);
+ ms_add_no_target_ghostpad(rtp_container, MS_RTP_PAD_AUDIO_IN"_rtp", GST_PAD_SINK);
/* Add RTP node parameters as GObject data with destroy function */
MS_SET_INT_PARAM(rtp_container, MEDIA_STREAMER_PARAM_VIDEO_IN_PORT, RTP_STREAM_DISABLED);
{
int ret = MEDIA_STREAMER_ERROR_NONE;
media_streamer_node_s *rtp = NULL;
+ media_streamer_node_s *webrtc = NULL;
media_streamer_node_s *demux = NULL;
media_streamer_node_s *adaptive_src = NULL;
media_streamer_node_s *adaptive_sink = NULL;
ms_retvm_if(ms_streamer->transform_bin == NULL, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "ms_streamer->transform_bin is NULL");
rtp = (media_streamer_node_s *)g_hash_table_lookup(ms_streamer->nodes_table, "rtp_container");
+ webrtc = (media_streamer_node_s *)g_hash_table_lookup(ms_streamer->nodes_table, "webrtc_container");
demux = (media_streamer_node_s *)g_hash_table_find(ms_streamer->nodes_table, (GHRFunc)__find_node, find_node_name);
adaptive_src = (media_streamer_node_s *)g_hash_table_lookup(ms_streamer->nodes_table, "adaptive_src");
adaptive_sink = (media_streamer_node_s *)g_hash_table_lookup(ms_streamer->nodes_table, "adaptive_sink");
if (rtp) {
ret = ms_rtp_node_prepare(rtp);
+ } else if (webrtc) {
+ ret = ms_webrtc_node_prepare(ms_streamer, webrtc);
} else if (demux) {
ret = ms_demux_node_prepare(ms_streamer, demux);
if (MEDIA_STREAMER_ERROR_NONE != ret)
--- /dev/null
+/*
+ * Copyright (c) 2020 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef GST_USE_UNSTABLE_API
+#define GST_USE_UNSTABLE_API
+#include <gst/webrtc/webrtc.h>
+#endif
+#include "media_streamer_util.h"
+#include "media_streamer_priv.h"
+#include "media_streamer_gst.h"
+#include "media_streamer_gst_webrtc.h"
+#include "media_streamer_node.h"
+
+static gchar* __make_sdp_message(GstWebRTCSessionDescription *desc)
+{
+ gchar *text;
+ JsonObject *msg, *sdp;
+
+ text = gst_sdp_message_as_text(desc->sdp);
+ sdp = json_object_new();
+
+ if (desc->type == GST_WEBRTC_SDP_TYPE_OFFER) {
+ ms_info("Making offer message:\n%s", text);
+ json_object_set_string_member(sdp, "type", "offer");
+ } else if (desc->type == GST_WEBRTC_SDP_TYPE_ANSWER) {
+ ms_info("Making answer message:\n%s", text);
+ json_object_set_string_member(sdp, "type", "answer");
+ } else {
+ ms_error("invalid description type");
+ return NULL;
+ }
+
+ json_object_set_string_member(sdp, "sdp", text);
+ g_free(text);
+
+ msg = json_object_new();
+ json_object_set_object_member(msg, "sdp", sdp);
+
+ text = ms_get_string_from_json_object(msg);
+
+ json_object_unref(msg);
+
+ return text;
+}
+
+static void __on_answer_created_cb(GstPromise *promise, gpointer user_data)
+{
+ GstWebRTCSessionDescription *answer = NULL;
+ const GstStructure *reply;
+ media_streamer_node_s *webrtc_node = (media_streamer_node_s *)user_data;
+ node_info_s *node_klass_type = NULL;
+ GstElement *webrtcbin = NULL;
+ gchar *sdp_msg;
+
+ ms_retm_if(promise == NULL, "promise is NULL");
+ ms_retm_if(webrtc_node == NULL, "webrtc_node is NULL");
+ ms_retm_if(gst_promise_wait(promise) != GST_PROMISE_RESULT_REPLIED, "promise is not for replied result");
+
+ ms_debug_fenter();
+
+ node_klass_type = ms_node_get_klass_by_its_type(MEDIA_STREAMER_NODE_TYPE_WEBRTC);
+ if (!(webrtcbin = ms_find_element_in_bin_by_type(webrtc_node->gst_element, node_klass_type))) {
+ ms_error("Could not find webrtcbin by type[%s, %s]", node_klass_type->klass_name, node_klass_type->default_name);
+ return;
+ }
+
+ reply = gst_promise_get_reply(promise);
+ gst_structure_get(reply, "answer",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &answer, NULL);
+ gst_promise_unref(promise);
+
+ promise = gst_promise_new();
+ g_signal_emit_by_name(G_OBJECT(webrtcbin), "set-local-description", answer, promise);
+ gst_promise_interrupt(promise);
+ gst_promise_unref(promise);
+
+ sdp_msg = __make_sdp_message(answer);
+
+ /* TODO: need to add to send this message to signalling server */
+ ms_debug("SDP message is sent: %s", sdp_msg);
+
+ g_free(sdp_msg);
+
+ gst_webrtc_session_description_free(answer);
+
+ ms_debug_fleave();
+}
+
+static void __on_offer_created_cb(GstPromise *promise, gpointer user_data)
+{
+ GstWebRTCSessionDescription *offer = NULL;
+ const GstStructure *reply;
+ GstElement *webrtcbin = (GstElement *)user_data;
+ gchar *sdp_msg;
+
+ ms_retm_if(promise == NULL, "promise is NULL");
+ ms_retm_if(webrtcbin == NULL, "webrtcbin is NULL");
+ ms_retm_if(gst_promise_wait(promise) != GST_PROMISE_RESULT_REPLIED, "promise is not for replied result");
+
+ ms_debug_fenter();
+
+ reply = gst_promise_get_reply(promise);
+ gst_structure_get(reply, "offer",
+ GST_TYPE_WEBRTC_SESSION_DESCRIPTION, &offer, NULL);
+ gst_promise_unref(promise);
+
+ promise = gst_promise_new();
+ g_signal_emit_by_name(G_OBJECT(webrtcbin), "set-local-description", offer, promise);
+ gst_promise_interrupt(promise);
+ gst_promise_unref(promise);
+
+ sdp_msg = __make_sdp_message(offer);
+
+ /* TODO: need to add to send this message to signalling server */
+ ms_debug("SDP message is sent: %s", sdp_msg);
+
+ g_free(sdp_msg);
+
+ gst_webrtc_session_description_free(offer);
+
+ ms_debug_fleave();
+}
+
+void ms_webrtcbin_on_negotiation_process_answer(GstElement *webrtcbin, media_streamer_node_s *webrtc_node)
+{
+ GstPromise *promise;
+
+ ms_retm_if(webrtcbin == NULL, "webrtcbin is NULL");
+ ms_retm_if(webrtc_node == NULL, "webrtc_node is NULL");
+
+ ms_debug_fenter();
+
+ promise = gst_promise_new_with_change_func(__on_answer_created_cb, webrtc_node, NULL);
+ g_signal_emit_by_name(G_OBJECT(webrtcbin), "create-answer", NULL, promise);
+
+ ms_debug_fleave();
+}
+
+void ms_webrtcbin_on_negotiation_needed_cb(GstElement *webrtcbin, gpointer user_data)
+{
+ GstPromise *promise;
+
+ ms_retm_if(webrtcbin == NULL, "webrtcbin is NULL");
+
+ ms_debug_fenter();
+
+ promise = gst_promise_new_with_change_func(__on_offer_created_cb, webrtcbin, NULL);
+ g_signal_emit_by_name(G_OBJECT(webrtcbin), "create-offer", NULL, promise);
+
+ ms_debug_fleave();
+}
+
+static gchar *__make_ice_candidate_message(guint mlineindex, gchar *candidate)
+{
+ JsonObject *ice, *msg;
+ gchar *text;
+
+ ms_retvm_if(candidate == NULL, NULL, "candidate is NULL");
+
+ ice = json_object_new();
+ json_object_set_string_member(ice, "candidate", candidate);
+ json_object_set_int_member(ice, "sdpMLineIndex", mlineindex);
+
+ msg = json_object_new();
+ json_object_set_object_member(msg, "ice", ice);
+
+ text = ms_get_string_from_json_object(msg);
+
+ json_object_unref(msg);
+
+ return text;
+}
+
+void ms_webrtcbin_on_ice_candidate_cb(GstElement *webrtcbin, guint mlineindex, gchar *candidate, gpointer user_data)
+{
+ gchar *ice_candidate_msg = NULL;
+
+ ms_retm_if(webrtcbin == NULL, "webrtcbin is NULL");
+ ms_retm_if(candidate == NULL, "candidate is NULL");
+
+ ice_candidate_msg = __make_ice_candidate_message(mlineindex, candidate);
+
+ /* TODO: need to add to send this message to signalling server */
+ ms_debug("ICE candidate message is sent: %s", ice_candidate_msg);
+
+ g_free(ice_candidate_msg);
+}
+
+void ms_webrtcbin_notify_ice_gathering_state_cb(GstElement *webrtcbin, GParamSpec * pspec, gpointer user_data)
+{
+ GstWebRTCICEGatheringState ice_gather_state;
+ const gchar *new_state = "UNKNOWN";
+
+ g_object_get(webrtcbin, "ice-gathering-state", &ice_gather_state, NULL);
+
+ switch (ice_gather_state) {
+ case GST_WEBRTC_ICE_GATHERING_STATE_NEW:
+ new_state = "NEW";
+ break;
+ case GST_WEBRTC_ICE_GATHERING_STATE_GATHERING:
+ new_state = "GATHERING";
+ break;
+ case GST_WEBRTC_ICE_GATHERING_STATE_COMPLETE:
+ new_state = "COMPLETE";
+ break;
+ }
+
+ ms_info("ICE gathering state changed to [%s]", new_state);
+}
+
+static void __data_channel_on_error_cb(GObject *data_channel, gpointer user_data)
+{
+ ms_retm_if(data_channel == NULL, "data_channel is NULL");
+
+ ms_debug_fenter();
+
+ ms_debug_fleave();
+}
+
+static void __data_channel_on_open_cb(GObject *data_channel, gpointer user_data)
+{
+ GBytes *bytes = NULL;
+
+ ms_retm_if(data_channel == NULL, "data_channel is NULL");
+
+ ms_debug_fenter();
+
+ bytes = g_bytes_new("data", strlen("data"));
+
+ g_signal_emit_by_name(data_channel, "send-string", "Hi! from GStreamer");
+ g_signal_emit_by_name(data_channel, "send-data", bytes);
+
+ g_bytes_unref(bytes);
+
+ ms_debug_fleave();
+}
+
+static void __data_channel_on_close_cb(GObject *data_channel, gpointer user_data)
+{
+ ms_retm_if(data_channel == NULL, "data_channel is NULL");
+
+ ms_debug_fenter();
+
+ ms_debug_fleave();
+}
+
+static void __data_channel_on_message_string_cb(GObject *data_channel, gchar *message, gpointer user_data)
+{
+ ms_retm_if(data_channel == NULL, "data_channel is NULL");
+ ms_retm_if(message == NULL, "message is NULL");
+
+ ms_info("Received message: %s", message);
+}
+
+static void __connect_data_channel_signals(GObject *data_channel)
+{
+ ms_retm_if(data_channel == NULL, "data_channel is NULL");
+
+ ms_debug_fenter();
+
+ g_signal_connect(data_channel, "on-error", G_CALLBACK(__data_channel_on_error_cb), NULL);
+ g_signal_connect(data_channel, "on-open", G_CALLBACK(__data_channel_on_open_cb), NULL);
+ g_signal_connect(data_channel, "on-close", G_CALLBACK(__data_channel_on_close_cb), NULL);
+ g_signal_connect(data_channel, "on-message-string", G_CALLBACK(__data_channel_on_message_string_cb), NULL);
+
+ ms_debug_fleave();
+}
+
+void ms_webrtcbin_on_data_channel_cb(GstElement *webrtcbin, GObject *data_channel, gpointer user_data)
+{
+ media_streamer_s *ms_streamer = (media_streamer_s *)user_data;
+
+ ms_retm_if(ms_streamer == NULL, "ms_streamer is NULL");
+ ms_retm_if(data_channel == NULL, "data_channel is NULL");
+
+ ms_debug_fenter();
+
+ __connect_data_channel_signals(data_channel);
+
+ ms_debug_fleave();
+}
+
+void ms_webrtcbin_pad_added_cb(GstElement *webrtcbin, GstPad *new_pad, gpointer user_data)
+{
+ media_streamer_s *ms_streamer = (media_streamer_s *)user_data;
+
+ ms_retm_if(new_pad == NULL, "new_pad is NULL");
+ ms_retm_if(ms_streamer == NULL, "ms_streamer is NULL");
+ ms_retm_if(GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC, "new_pad is not for source");
+
+ ms_debug_fenter();
+
+ ms_debug("Pad [%s] added on [%s]", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(webrtcbin));
+
+ ms_debug_fleave();
+}
+
+GstElement *ms_webrtc_element_create(void)
+{
+ GstElement *webrtc_container;
+ GstElement *webrtcbin;
+ GstGhostPad *ghost_pad_video_in;
+
+ ms_debug_fenter();
+
+ webrtc_container = gst_bin_new("webrtc_container");
+ ms_retvm_if(!webrtc_container, (GstElement *) NULL, "Error: creating elements for webrtc container");
+
+ ms_add_no_target_ghostpad(webrtc_container, MS_RTP_PAD_VIDEO_IN, GST_PAD_SINK);
+
+ MS_SET_INT_STATIC_STRING_PARAM(webrtc_container, MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE, DEFAULT_WEBRTC_PEER);
+
+ if (!(webrtcbin = ms_element_create("webrtcbin", NULL))) {
+ ms_error("Failed to create webrtcbin element");
+ return NULL;
+ }
+
+ /* FIXME: these should be set from user */
+ g_object_set(G_OBJECT(webrtcbin), "bundle-policy", 3, NULL); // 3:max-bundle
+ g_object_set(G_OBJECT(webrtcbin), "stun-server", "stun://stun.l.google.com:19302", NULL);
+
+ ms_bin_add_element(webrtc_container, webrtcbin, FALSE);
+
+ if (!(ghost_pad_video_in = (GstGhostPad *)gst_element_get_static_pad(webrtc_container, MS_RTP_PAD_VIDEO_IN))) {
+ ms_error("Failed to get ghost pad for webrtc_container");
+ return NULL;
+ }
+
+ if (!(gst_ghost_pad_set_target(ghost_pad_video_in, gst_element_get_request_pad(webrtcbin, "sink_%u")))) {
+ ms_info("Failed to gst_ghost_pad_set_target() for %s", MS_RTP_PAD_VIDEO_IN);
+ /* release resources */
+ return NULL;
+ }
+
+ ms_debug_fleave();
+
+ return webrtc_container;
+}
#include "media_streamer_node_dpm_policy.h"
#include "media_streamer_util.h"
#include "media_streamer_gst.h"
+#include "media_streamer_gst_webrtc.h"
#define SMACK_LABEL_LEN 255
#define DEFAULT_URI_SCHEME_LENGTH 10
{MEDIA_STREAMER_PARAM_VIDEO_OUT_PORT, "video_out_port"},
{MEDIA_STREAMER_PARAM_AUDIO_OUT_PORT, "audio_out_port"},
{MEDIA_STREAMER_PARAM_IP_ADDRESS, "address"},
+ {MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE, "webrtc-peer-type"},
{MEDIA_STREAMER_PARAM_AUDIO_DEVICE, "audio_device"},
{MEDIA_STREAMER_PARAM_CLOCK_SYNCHRONIZED, "sync"},
{MEDIA_STREAMER_PARAM_ROTATE, "rotate"},
{MEDIA_STREAMER_STRICT, "output-selector"}, /* MEDIA_STREAMER_NODE_TYPE_OUTPUT_SELECTOR */
{MEDIA_STREAMER_STRICT, "interleave"}, /* MEDIA_STREAMER_NODE_TYPE_INTERLEAVE */
{MEDIA_STREAMER_STRICT, "deinterleave"}, /* MEDIA_STREAMER_NODE_TYPE_DEINTERLEAVE */
+ {"Filter/Network/WebRTC", "webrtc_container"}, /* MEDIA_STREAMER_NODE_TYPE_WEBRTC */
{NULL, NULL}
};
return ret;
}
-static gboolean __ms_adaptive_src_node_has_property(media_streamer_node_s *node, const char * param_name)
+static gboolean __ms_webrtc_node_has_property(media_streamer_node_s *node, const char *param_name)
+{
+ GValue *val = NULL;
+
+ ms_debug_fenter();
+
+ ms_retvm_if(!node || !node->gst_element, FALSE, "Error: empty node");
+ ms_retvm_if(!param_name, FALSE, "Error: invalid property parameter");
+
+ if (node->type == MEDIA_STREAMER_NODE_TYPE_WEBRTC) {
+ val = (GValue *)g_object_get_data(G_OBJECT(node->gst_element), param_name);
+
+ ms_debug_fleave();
+
+ return val ? TRUE : FALSE;
+ }
+
+ ms_debug_fleave();
+
+ return FALSE;
+}
+
+static int __ms_webrtc_node_set_property(media_streamer_node_s *node, param_s *param, const char *param_value)
+{
+ GValue *val = NULL;
+
+ ms_debug_fenter();
+
+ ms_retvm_if(!node || !node->gst_element, MEDIA_STREAMER_ERROR_INVALID_OPERATION, "Error: empty node");
+ ms_retvm_if(node->type != MEDIA_STREAMER_NODE_TYPE_WEBRTC, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "Invalid node type");
+ ms_retvm_if(!param, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "Error: invalid property parameter");
+ ms_retvm_if(!param_value, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "param_value is NULL");
+
+ val = (GValue *)g_object_get_data(G_OBJECT(node->gst_element), param->param_name);
+ if (!val) {
+ ms_error("failed to get [%s] val from [%s]", param->param_name, GST_ELEMENT_NAME(node->gst_element));
+ return MEDIA_STREAMER_ERROR_INVALID_PARAMETER;
+ }
+
+ if (!strcmp(param->param_name, MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE)) {
+ if (strcmp(param_value, WEBRTC_PEER_OFFER) && strcmp(param_value, WEBRTC_PEER_ANSWER)) {
+ ms_error("failed to set property, param value should be [%s] or [%s]", WEBRTC_PEER_OFFER, WEBRTC_PEER_ANSWER);
+ return MEDIA_STREAMER_ERROR_INVALID_PARAMETER;
+ }
+ g_value_unset(val);
+ g_value_init(val, G_TYPE_STRING);
+ g_value_set_string(val, param_value);
+ } else {
+ ms_error("failed to set property, undefined param name[%s]", param->param_name);
+ return MEDIA_STREAMER_ERROR_INVALID_PARAMETER;
+ }
+
+ ms_debug_fleave();
+
+ return MEDIA_STREAMER_ERROR_NONE;
+}
+
+static gboolean __ms_adaptive_src_node_has_property(media_streamer_node_s *node, const char *param_name)
{
GValue *val = NULL;
return ret;
}
+static int __ms_webrtc_node_is_offerer(media_streamer_node_s *node, gboolean *is_offerer)
+{
+ GValue *val = NULL;
+ const gchar *type = NULL;
+
+ ms_retvm_if(!node || !node->gst_element, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "Error: empty node");
+ ms_retvm_if(!is_offerer, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "Error: is_offerer is null");
+
+ if (node->type != MEDIA_STREAMER_NODE_TYPE_WEBRTC) {
+ ms_error("Invalid node type");
+ return MEDIA_STREAMER_ERROR_INVALID_PARAMETER;
+ }
+
+ val = (GValue *)g_object_get_data(G_OBJECT(node->gst_element), MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE);
+ if (!val) {
+ ms_error("Failed to get [%s] val from [%s]", MEDIA_STREAMER_PARAM_WEBRTC_PEER_TYPE, GST_ELEMENT_NAME(node->gst_element));
+ return MEDIA_STREAMER_ERROR_INVALID_OPERATION;
+ }
+
+ if (!(type = g_value_get_string(val))) {
+ ms_error("Failed to g_value_get_string()");
+ return MEDIA_STREAMER_ERROR_INVALID_OPERATION;
+ }
+
+ ms_info("peer type is [%s]", type);
+
+ *is_offerer = (gboolean)(!strcmp(type, WEBRTC_PEER_OFFER));
+
+ return MEDIA_STREAMER_ERROR_NONE;
+}
+
int ms_node_create(media_streamer_node_s *node, media_format_h in_fmt, media_format_h out_fmt)
{
int ret = MEDIA_STREAMER_ERROR_NONE;
return MEDIA_STREAMER_ERROR_NONE;
}
+
+int ms_webrtc_node_prepare(media_streamer_s *ms_streamer, media_streamer_node_s *node)
+{
+ GstElement *webrtcbin = NULL;
+ node_info_s *node_klass_type = NULL;
+ GObject *send_channel = NULL;
+ gboolean is_offerer = FALSE;
+
+ ms_debug_fenter();
+
+ ms_retvm_if(!ms_streamer, MEDIA_STREAMER_ERROR_INVALID_PARAMETER, "ms_streamer is NULL");
+ ms_retvm_if(!node, MEDIA_STREAMER_ERROR_INVALID_OPERATION, "node is NULL");
+
+ node_klass_type = ms_node_get_klass_by_its_type(MEDIA_STREAMER_NODE_TYPE_WEBRTC);
+ if (!(webrtcbin = ms_find_element_in_bin_by_type(node->gst_element, node_klass_type))) {
+ ms_error("Could not find webrtcbin by type[%s, %s]", node_klass_type->klass_name, node_klass_type->default_name);
+ return MEDIA_STREAMER_ERROR_INVALID_OPERATION;
+ }
+
+ if (__ms_webrtc_node_is_offerer(node, &is_offerer)) {
+ ms_error("Failed to get peer type");
+ return MEDIA_STREAMER_ERROR_INVALID_OPERATION;
+ }
+
+ if (is_offerer)
+ ms_signal_create(&node->sig_list, webrtcbin, "on-negotiation-needed", G_CALLBACK(ms_webrtcbin_on_negotiation_needed_cb), NULL);
+
+ ms_signal_create(&node->sig_list, webrtcbin, "on-ice-candidate", G_CALLBACK(ms_webrtcbin_on_ice_candidate_cb), NULL);
+ ms_signal_create(&node->sig_list, webrtcbin, "notify::ice-gathering-state", G_CALLBACK(ms_webrtcbin_notify_ice_gathering_state_cb), NULL);
+
+ if (ms_element_set_state(webrtcbin, GST_STATE_READY)) {
+ ms_error("Faild to set state to READY");
+ return MEDIA_STREAMER_ERROR_INVALID_OPERATION;
+ }
+
+ g_signal_emit_by_name (webrtcbin, "create-data-channel", "channel", NULL, &send_channel);
+ if (send_channel) {
+ ms_info("data channel(%p) for sending is created", send_channel);
+ } else {
+ ms_warning("Failed to create data channel, is usrsctp available?");
+ }
+
+ ms_signal_create(&node->sig_list, webrtcbin, "on-data-channel", G_CALLBACK(ms_webrtcbin_on_data_channel_cb), ms_streamer);
+ ms_signal_create(&node->sig_list, webrtcbin, "pad-added", G_CALLBACK(ms_webrtcbin_pad_added_cb), ms_streamer);
+
+ ms_generate_dots(node->gst_element, "webrtc_prepared");
+
+ ms_debug_fleave();
+
+ return MEDIA_STREAMER_ERROR_NONE;
+}
+
//LCOV_EXCL_STOP
int ms_demux_node_prepare(media_streamer_s *ms_streamer, media_streamer_node_s *node)
return ret;
}
+ if (node->type == MEDIA_STREAMER_NODE_TYPE_WEBRTC) {
+ ret = __ms_webrtc_node_set_property(node, param, param_value);
+ ms_debug_fleave();
+ return ret;
+ }
+
if (node->type == MEDIA_STREAMER_NODE_TYPE_SRC &&
node->subtype == MEDIA_STREAMER_NODE_SRC_TYPE_ADAPTIVE) {
ret = __ms_adaptive_src_node_set_property(node, param, param_value);
if (!g_strcmp0(param_name, param_table[it_param].param_name)) {
param_spec = g_object_class_find_property(G_OBJECT_GET_CLASS(node->gst_element), param_table[it_param].origin_name);
if (param_spec || __ms_rtp_node_has_property(node, param_table[it_param].origin_name) ||
- __ms_adaptive_src_node_has_property(node, param_table[it_param].origin_name)) {
+ __ms_adaptive_src_node_has_property(node, param_table[it_param].origin_name) ||
+ __ms_webrtc_node_has_property(node, param_table[it_param].origin_name)) {
*param = &(param_table[it_param]);
ms_info("Got parameter [%s] for node [%s]", (*param)->param_name, node->name);
found_param = TRUE;
return MEDIA_STREAMER_ERROR_NONE;
}
+/* Use g_free() to free the return value. */
+gchar* ms_get_string_from_json_object(JsonObject *object)
+{
+ JsonNode *root;
+ JsonGenerator *generator;
+ gchar *text;
+
+ ms_retvm_if(object == NULL, NULL, "object is NULL");
+
+ root = json_node_init_object(json_node_alloc(), object);
+ generator = json_generator_new();
+ json_generator_set_root(generator, root);
+ text = json_generator_to_data(generator, NULL);
+
+ g_object_unref(generator);
+ json_node_free(root);
+
+ return text;
+}
+
//LCOV_EXCL_START
static void __global(void *data, struct wl_registry *registry,
uint32_t name, const char *interface, uint32_t version)