From: backto.kim Date: Thu, 22 Apr 2021 02:56:25 +0000 (+0900) Subject: Add new API set for media source mute X-Git-Tag: submit/tizen/20210729.023123~64 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fchanges%2F70%2F257270%2F44;p=platform%2Fcore%2Fapi%2Fwebrtc.git Add new API set for media source mute black frame or silent sound will be transmitted to the remote peer. Functions are added as below. -webrtc_set_media_source_mute() -webrtc_get_media_source_muted() [Version] 0.1.177 [Issue Type] API Change-Id: I4134872691f5f4a46cbd0b510a15a4fd521fb8e7 --- diff --git a/include/webrtc.h b/include/webrtc.h index ab456aec..de36e39a 100644 --- a/include/webrtc.h +++ b/include/webrtc.h @@ -659,6 +659,42 @@ int webrtc_set_media_source_pause(webrtc_h webrtc, unsigned int source_id, webrt */ int webrtc_get_media_source_pause(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, bool *paused); + /** + * @brief Sets the media source's mute status. + * @details If @a mute is set to @c true, black frame or silent sound will be transmitted to the remote peer according to the media type of the @a source_id. \n + * if @a source_id is a media source of #WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET, this function will return #WEBRTC_ERROR_INVALID_PARAMETER. + * @since_tizen 6.5 + * @param[in] webrtc WebRTC handle + * @param[in] source_id The media source id + * @param[in] media_type The media type + * @param[in] muted The new mute status (@c true = mute, @c false = not muted) + * @return @c 0 on success, + * otherwise a negative error value + * @retval #WEBRTC_ERROR_NONE Successful + * @retval #WEBRTC_ERROR_INVALID_PARAMETER Invalid parameter + * @retval #WEBRTC_ERROR_INVALID_OPERATION Invalid operation + * @see webrtc_get_media_source_muted() + */ +int webrtc_set_media_source_mute(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, bool mute); + + /** + * @brief Gets the media source's mute status. + * @details if @a source_id is a media source of #WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET, this function will return #WEBRTC_ERROR_INVALID_PARAMETER. + * @since_tizen 6.5 + * @remarks The default value is @c false. + * @param[in] webrtc WebRTC handle + * @param[in] source_id The media source id + * @param[in] media_type The media type + * @param[in] muted The current mute status: (@c true = mute, @c false = not muted) + * @return @c 0 on success, + * otherwise a negative error value + * @retval #WEBRTC_ERROR_NONE Successful + * @retval #WEBRTC_ERROR_INVALID_PARAMETER Invalid parameter + * @retval #WEBRTC_ERROR_INVALID_OPERATION Invalid operation + * @see webrtc_set_media_source_mute() + */ +int webrtc_get_media_source_muted(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, bool *muted); + /** * @brief Sets a video source resolution. * @details The following media source types are for the video source:\n diff --git a/include/webrtc_private.h b/include/webrtc_private.h index 99af1aff..a65b8fb2 100644 --- a/include/webrtc_private.h +++ b/include/webrtc_private.h @@ -417,6 +417,8 @@ typedef struct _webrtc_gst_slot_s { webrtc_callbacks_s buffer_state_changed_cb; webrtc_callbacks_s *encoded_frame_cb; webrtc_s *webrtc; + gulong probe_id; + bool video_muted; webrtc_display_s *display; } webrtc_gst_slot_s; @@ -562,6 +564,11 @@ int _stop_websocket(webrtc_websocket_s *ws); int _check_privilege(const char *privilege); int _check_feature(const char *feature); +int _set_audio_source_mute(webrtc_s *webrtc, unsigned int source_id, bool mute); +int _set_video_source_mute(webrtc_s *webrtc, unsigned int source_id, bool mute); +int _get_audio_source_muted(webrtc_s *webrtc, unsigned int source_id, bool *muted); +int _get_video_source_muted(webrtc_s *webrtc, unsigned int source_id, bool *muted); + #ifdef __cplusplus } diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 7c07207f..1388a73d 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.1.176 +Version: 0.1.177 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/src/webrtc.c b/src/webrtc.c index 42a4945f..754f78a1 100644 --- a/src/webrtc.c +++ b/src/webrtc.c @@ -213,7 +213,7 @@ int webrtc_start(webrtc_h webrtc) return ret; } #endif - ret = _gst_pipeline_set_state(webrtc, GST_STATE_PLAYING); + ret = _gst_pipeline_set_state(_webrtc, GST_STATE_PLAYING); RET_VAL_WITH_UNLOCK_IF(ret != WEBRTC_ERROR_NONE, ret, &_webrtc->mutex, "failed to change GST state to PLAYING"); _webrtc->pend_state = WEBRTC_STATE_NEGOTIATING; @@ -334,6 +334,70 @@ int webrtc_get_media_source_pause(webrtc_h webrtc, unsigned int source_id, webrt return ret; } +int webrtc_set_media_source_mute(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, bool mute) +{ + int ret = WEBRTC_ERROR_NONE; + webrtc_s *_webrtc = (webrtc_s*)webrtc; + + RET_VAL_IF(_webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + + g_mutex_lock(&_webrtc->mutex); + + switch (media_type) { + case WEBRTC_MEDIA_TYPE_AUDIO: + ret = _set_audio_source_mute(_webrtc, source_id, mute); + break; + + case WEBRTC_MEDIA_TYPE_VIDEO: + ret = _set_video_source_mute(_webrtc, source_id, mute); + break; + + default : + LOG_ERROR_IF_REACHED("media type(%d)", media_type); + ret = WEBRTC_ERROR_INVALID_PARAMETER; + break; + } + + g_mutex_unlock(&_webrtc->mutex); + + if (ret != WEBRTC_ERROR_NONE) + LOG_ERROR("fail to set media source mute. source_id[%u] media_type[%d] mute[%d]", source_id, media_type, mute); + + return ret; +} + +int webrtc_get_media_source_muted(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, bool *muted) +{ + int ret = WEBRTC_ERROR_NONE; + webrtc_s *_webrtc = (webrtc_s*)webrtc; + + RET_VAL_IF(_webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + + g_mutex_lock(&_webrtc->mutex); + + switch (media_type) { + case WEBRTC_MEDIA_TYPE_AUDIO: + ret = _get_audio_source_muted(_webrtc, source_id, muted); + break; + + case WEBRTC_MEDIA_TYPE_VIDEO: + ret = _get_video_source_muted(_webrtc, source_id, muted); + break; + + default : + LOG_ERROR_IF_REACHED("media type(%d)", media_type); + ret = WEBRTC_ERROR_INVALID_PARAMETER; + break; + } + + g_mutex_unlock(&_webrtc->mutex); + + if (ret != WEBRTC_ERROR_NONE) + LOG_ERROR("fail to get media source muted. source_id[%u] media_type[%d]", source_id, media_type); + + return ret; +} + int webrtc_set_video_source_resolution(webrtc_h webrtc, unsigned int source_id, int width, int height) { int ret = WEBRTC_ERROR_NONE; diff --git a/src/webrtc_source.c b/src/webrtc_source.c index 00088b65..98ff68f0 100644 --- a/src/webrtc_source.c +++ b/src/webrtc_source.c @@ -38,8 +38,13 @@ #define DEFAULT_ELEMENT_VIDEOCONVERT "videoconvert" #define DEFAULT_ELEMENT_CAPSFILTER "capsfilter" #define DEFAULT_ELEMENT_QUEUE "queue" +#define DEFAULT_ELEMENT_VOLUME "volume" #define ELEMENT_NAME_FIRST_CAPSFILTER "firstCapsfilter" +#define ELEMENT_NAME_VIDEO_SRC "videoSrc" +#define ELEMENT_NAME_VIDEO_SWITCH "videoSwitch" +#define ELEMENT_NAME_VIDEO_MUTE_SRC "videoMuteSrc" +#define ELEMENT_NAME_VOLUME "volume" typedef enum { CODEC_TYPE_OPUS, @@ -828,6 +833,43 @@ static void __add_probe_to_pad(GstPad *pad, media_type_e media_type, webrtc_gst_ source->av[probe_userdata->av_idx].src_pad_probe_id, pad, probe_userdata->av_idx, source); } +static GstElement * __link_video_switch(GstBin *bin, GstElement *src) +{ + GstElement *videotestsrc = NULL; + GstElement *videoswitch = NULL; + + RET_VAL_IF(bin == NULL, NULL, "bin is NULL"); + RET_VAL_IF(src == NULL, NULL, "src is NULL"); + + if (!(videotestsrc = _create_element(DEFAULT_ELEMENT_VIDEOTESTSRC, ELEMENT_NAME_VIDEO_MUTE_SRC))) + return NULL; + + g_object_set(G_OBJECT(videotestsrc), "is-live", TRUE, "pattern", 2, NULL); /* 2: black */ + + if (!(videoswitch = _create_element("input-selector", ELEMENT_NAME_VIDEO_SWITCH))) { + SAFE_GST_OBJECT_UNREF(videotestsrc); + return NULL; + } + + gst_bin_add_many(bin, videotestsrc, videoswitch, NULL); + + gst_element_link(src, videoswitch); + gst_element_link(videotestsrc, videoswitch); + + return videoswitch; +} + +static void __unlink_video_switch(GstBin *bin) +{ + GstElement *videotestsrc = NULL; + GstElement *videoswitch = NULL; + + videotestsrc = gst_bin_get_by_name(bin, ELEMENT_NAME_VIDEO_MUTE_SRC); + videoswitch = gst_bin_get_by_name(bin, ELEMENT_NAME_VIDEO_SWITCH); + + gst_bin_remove_many(bin, videotestsrc, videoswitch, NULL); +} + static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) { int ret = WEBRTC_ERROR_NONE; @@ -838,6 +880,7 @@ static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) GstElement *videopay = NULL; GstElement *queue = NULL; GstElement *capsfilter2 = NULL; + GstElement *video_switch = NULL; RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); @@ -849,7 +892,7 @@ static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) source->media_types = MEDIA_TYPE_VIDEO; source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types); - if (!(screensrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), NULL))) + if (!(screensrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), ELEMENT_NAME_VIDEO_SRC))) return WEBRTC_ERROR_INVALID_OPERATION; if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL))) @@ -859,7 +902,12 @@ static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) goto exit; gst_bin_add_many(source->bin, screensrc, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL); - if (!gst_element_link_many(screensrc, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL)) { + + video_switch = __link_video_switch(source->bin, screensrc); + if (!video_switch) + goto exit_with_remove_from_bin; + + if (!gst_element_link_many(video_switch, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL)) { LOG_ERROR("failed to gst_element_link_many()"); ret = WEBRTC_ERROR_INVALID_OPERATION; goto exit_with_remove_from_bin; @@ -876,6 +924,7 @@ static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) exit_with_remove_from_bin: /* elements will be dereferenced */ gst_bin_remove_many(source->bin, screensrc, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL); + __unlink_video_switch(source->bin); return ret; exit: SAFE_GST_OBJECT_UNREF(screensrc); @@ -910,7 +959,7 @@ static int __build_camerasrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) #ifdef TIZEN_FEATURE_RES_MGR webrtc->resource.need_to_acquire[MM_RESOURCE_MANAGER_RES_TYPE_CAMERA] = true; #endif - if (!(camerasrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CAMERA), NULL))) + if (!(camerasrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CAMERA), ELEMENT_NAME_VIDEO_SRC))) return WEBRTC_ERROR_INVALID_OPERATION; /* FIXME: set camera default setting from ini */ @@ -956,6 +1005,7 @@ static int __build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) { int ret = WEBRTC_ERROR_NONE; GstElement *audiosrc; + GstElement *volume = NULL; GstElement *capsfilter = NULL; GstElement *audioenc = NULL; GstElement *audiopay = NULL; @@ -975,11 +1025,14 @@ static int __build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) if (!(audiosrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_MIC), NULL))) return WEBRTC_ERROR_INVALID_OPERATION; + if (!(volume = _create_element(DEFAULT_ELEMENT_VOLUME, ELEMENT_NAME_VOLUME))) + goto exit; + if ((ret = __create_rest_of_elements(webrtc, source, &capsfilter, &audioenc, &audiopay, &queue, &capsfilter2)) != WEBRTC_ERROR_NONE) goto exit; - gst_bin_add_many(source->bin, audiosrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); - if (!gst_element_link_many(audiosrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) { + gst_bin_add_many(source->bin, audiosrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); + if (!gst_element_link_many(audiosrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) { LOG_ERROR("failed to gst_element_link_many()"); ret = WEBRTC_ERROR_INVALID_OPERATION; goto exit_with_remove_from_bin; @@ -995,10 +1048,11 @@ static int __build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) exit_with_remove_from_bin: /* elements will be dereferenced */ - gst_bin_remove_many(source->bin, audiosrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); + gst_bin_remove_many(source->bin, audiosrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); return ret; exit: SAFE_GST_OBJECT_UNREF(audiosrc); + SAFE_GST_OBJECT_UNREF(volume); SAFE_GST_OBJECT_UNREF(capsfilter); SAFE_GST_OBJECT_UNREF(audioenc); SAFE_GST_OBJECT_UNREF(audiopay); @@ -1027,7 +1081,7 @@ static int __build_videotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) source->media_types = MEDIA_TYPE_VIDEO; source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types); - if (!(videotestsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), NULL))) + if (!(videotestsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), ELEMENT_NAME_VIDEO_SRC))) return WEBRTC_ERROR_INVALID_OPERATION; g_object_set(G_OBJECT(videotestsrc), "is-live", TRUE, "pattern", 18, NULL); /* 18: ball */ @@ -1068,6 +1122,7 @@ static int __build_audiotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) { int ret = WEBRTC_ERROR_NONE; GstElement *audiotestsrc; + GstElement *volume = NULL; GstElement *capsfilter = NULL; GstElement *audioenc = NULL; GstElement *audiopay = NULL; @@ -1089,11 +1144,14 @@ static int __build_audiotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) g_object_set(G_OBJECT(audiotestsrc), "is-live", TRUE, NULL); + if (!(volume = _create_element(DEFAULT_ELEMENT_VOLUME, ELEMENT_NAME_VOLUME))) + goto exit; + if ((ret = __create_rest_of_elements(webrtc, source, &capsfilter, &audioenc, &audiopay, &queue, &capsfilter2)) != WEBRTC_ERROR_NONE) goto exit; - gst_bin_add_many(source->bin, audiotestsrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); - if (!gst_element_link_many(audiotestsrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) { + gst_bin_add_many(source->bin, audiotestsrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); + if (!gst_element_link_many(audiotestsrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) { LOG_ERROR("failed to gst_element_link_many()"); ret = WEBRTC_ERROR_INVALID_OPERATION; goto exit_with_remove_from_bin; @@ -1109,10 +1167,11 @@ static int __build_audiotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) exit_with_remove_from_bin: /* elements will be dereferenced */ - gst_bin_remove_many(source->bin, audiotestsrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); + gst_bin_remove_many(source->bin, audiotestsrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL); return ret; exit: SAFE_GST_OBJECT_UNREF(audiotestsrc); + SAFE_GST_OBJECT_UNREF(volume); SAFE_GST_OBJECT_UNREF(capsfilter); SAFE_GST_OBJECT_UNREF(audioenc); SAFE_GST_OBJECT_UNREF(audiopay); @@ -1500,7 +1559,7 @@ int _add_media_source(webrtc_s *webrtc, webrtc_media_source_type_e type, unsigne goto error; } - /* The gst_element_get_request_pad() of webrtcbin will trigger the transciever callback. To update the mline value of + /* The gst_element_get_request_pad() of webrtcbin will trigger the transceiver callback. To update the mline value of * new transceiver object to the source structure in the callback, hash table inserting should be preceded. */ if (!g_hash_table_insert(webrtc->gst.source_slots, bin_name, (gpointer)source)) { LOG_ERROR("should not be reached here, bin_name[%s] already exist, source id[%u] will be removed", bin_name, source->id); @@ -2032,3 +2091,266 @@ exit: return ret; } + +static GstPadProbeReturn __buffer_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data) +{ + GstBuffer *buffer = gst_pad_probe_info_get_buffer(info); + GstMemory *mem = NULL; + tbm_surface_h src_tsurface; + tbm_surface_info_s ts_info; + int ret = TBM_SURFACE_ERROR_NONE; + unsigned int i = 0; + + RET_VAL_IF(buffer == NULL, GST_PAD_PROBE_OK, "buffer is NULL"); + RET_VAL_IF(gst_buffer_get_size(buffer) == 0, GST_PAD_PROBE_OK, "empty buffer"); + + mem = gst_buffer_peek_memory(buffer, 0); + RET_VAL_IF(mem == NULL, GST_PAD_PROBE_OK, "mem is NULL"); + + src_tsurface = (tbm_surface_h)gst_tizen_memory_get_surface(mem); + if (!src_tsurface) { + LOG_ERROR("failed to gst_tizen_memory_get_surface()"); + return GST_PAD_PROBE_OK; + } + + ret = tbm_surface_get_info(src_tsurface, &ts_info); + if (ret != TBM_SURFACE_ERROR_NONE) { + LOG_ERROR("failed to tbm_surface_get_info() [%d]", ret); + return GST_PAD_PROBE_OK; + } + + /* fill the buffer with black (NV12, YUV, RGB) */ + for (i = 0 ; i < ts_info.num_planes ; i++) { + if (i == 0) + memset(ts_info.planes[i].ptr, 0x00, ts_info.planes[i].size); + else + memset(ts_info.planes[i].ptr, 0x80, ts_info.planes[i].size); + } + + return GST_PAD_PROBE_OK; +} + +static int __mute_camerasrc(webrtc_gst_slot_s *source, bool mute) +{ + int ret = WEBRTC_ERROR_NONE; + GstElement *camerasrc = NULL; + GstPad *src_pad = NULL; + + LOG_DEBUG_ENTER(); + + RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); + RET_VAL_IF(source->video_muted == mute, WEBRTC_ERROR_NONE, "Already %s", mute ? "muted" : "unmuted"); + + camerasrc = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SRC); + src_pad = gst_element_get_static_pad(camerasrc, "src"); + + if (mute) { + if (source->probe_id != 0) { + LOG_ERROR("fail to change to mute"); + ret = WEBRTC_ERROR_INVALID_OPERATION; + } else { + source->probe_id = gst_pad_add_probe(src_pad, GST_PAD_PROBE_TYPE_BUFFER, __buffer_probe_cb, NULL, NULL); + if (source->probe_id == 0) { + LOG_ERROR("failed to gst_pad_add_probe()"); + ret = WEBRTC_ERROR_INVALID_OPERATION; + } + } + } else { + if (source->probe_id == 0) { + LOG_ERROR("fail to change to unmute"); + ret = WEBRTC_ERROR_INVALID_OPERATION; + } else { + gst_pad_remove_probe(src_pad, source->probe_id); + source->probe_id = 0; + } + } + + gst_object_unref(src_pad); + + return ret; +} + +static int __mute_videosrc(webrtc_gst_slot_s *source, bool mute) +{ + GstElement *video_switch = NULL; + GstPad *new_pad = NULL; + + RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); + RET_VAL_IF(source->video_muted == mute, WEBRTC_ERROR_NONE, "Already %s", mute ? "muted" : "unmuted"); + + video_switch = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SWITCH); + + new_pad = gst_element_get_static_pad(video_switch, mute ? "sink_1" : "sink_0"); + RET_VAL_IF(new_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "new_pad is NULL"); + + g_object_set(G_OBJECT(video_switch), "active-pad", new_pad, NULL); + gst_object_unref(new_pad); + + return WEBRTC_ERROR_NONE; +} + +static int __mute_videotestsrc(webrtc_gst_slot_s *source, bool mute) +{ + GstElement *src_element = NULL; + + RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); + RET_VAL_IF(source->video_muted == mute, WEBRTC_ERROR_NONE, "Already %s", mute ? "muted" : "unmuted"); + + src_element = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SRC); + RET_VAL_IF(src_element == NULL, WEBRTC_ERROR_INVALID_OPERATION, "src_element is NULL"); + + if (!g_object_class_find_property(G_OBJECT_GET_CLASS(src_element), "pattern")) { + LOG_ERROR("there is no pattern property"); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + g_object_set(G_OBJECT(src_element), "pattern", mute ? 2 : 18, NULL); /* 2: black 18: ball */ + + return WEBRTC_ERROR_NONE; +} + +int _set_video_source_mute(webrtc_s *webrtc, unsigned int source_id, bool mute) +{ + int ret = WEBRTC_ERROR_NONE; + webrtc_gst_slot_s *source = NULL; + + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0"); + RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source"); + + if (!(source->media_types & MEDIA_TYPE_VIDEO)) { + LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + LOG_DEBUG("source_id[%u] mute[%d]", source_id, mute); + + switch (source->type) { + case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA: + ret = __mute_camerasrc(source, mute); + break; + + case WEBRTC_MEDIA_SOURCE_TYPE_SCREEN: + ret = __mute_videosrc(source, mute); + break; + + case WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST: + ret = __mute_videotestsrc(source, mute); + break; + + default : + LOG_ERROR_IF_REACHED("type(%d)", source->type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + if (ret == WEBRTC_ERROR_NONE) + source->video_muted = mute ? true : false; + + return ret; +} + +int _set_audio_source_mute(webrtc_s *webrtc, unsigned int source_id, bool mute) +{ + webrtc_gst_slot_s *source = NULL; + GstElement *volume = NULL; + + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0"); + RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source"); + + if (!(source->media_types & MEDIA_TYPE_AUDIO)) { + LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + switch (source->type) { + case WEBRTC_MEDIA_SOURCE_TYPE_MIC: + case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST: + volume = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VOLUME); + RET_VAL_IF(volume == NULL, WEBRTC_ERROR_INVALID_OPERATION, "volume is NULL"); + + if (!g_object_class_find_property(G_OBJECT_GET_CLASS(volume), "mute")) { + LOG_ERROR("there is no mute property"); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + g_object_set(G_OBJECT(volume), "mute", mute, NULL); + break; + + default : + LOG_ERROR_IF_REACHED("type(%d)", source->type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + LOG_DEBUG("source_id[%u] mute[%d]", source_id, mute); + + return WEBRTC_ERROR_NONE; +} + +int _get_video_source_muted(webrtc_s *webrtc, unsigned int source_id, bool *muted) +{ + webrtc_gst_slot_s *source = NULL; + + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0"); + RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source"); + + if (!(source->media_types & MEDIA_TYPE_VIDEO)) { + LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + switch (source->type) { + case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA: + case WEBRTC_MEDIA_SOURCE_TYPE_SCREEN: + case WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST: + *muted = source->video_muted; + break; + + default : + LOG_ERROR_IF_REACHED("type(%d)", source->type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + LOG_DEBUG("source_id[%u] muted[%d]", source_id, *muted); + + return WEBRTC_ERROR_NONE; +} + +int _get_audio_source_muted(webrtc_s *webrtc, unsigned int source_id, bool *muted) +{ + webrtc_gst_slot_s *source = NULL; + GstElement *volume = NULL; + + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0"); + RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source"); + + if (!(source->media_types & MEDIA_TYPE_AUDIO)) { + LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + switch (source->type) { + case WEBRTC_MEDIA_SOURCE_TYPE_MIC: + case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST: + volume = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VOLUME); + RET_VAL_IF(volume == NULL, WEBRTC_ERROR_INVALID_OPERATION, "volume is NULL"); + + if (!g_object_class_find_property(G_OBJECT_GET_CLASS(volume), "mute")) { + LOG_ERROR("there is no mute property"); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + g_object_get(G_OBJECT(volume), "mute", muted, NULL); + break; + + default : + LOG_ERROR_IF_REACHED("type(%d)", source->type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + LOG_DEBUG("source_id[%u] muted[%d]", source_id, *muted); + + return WEBRTC_ERROR_NONE; +} diff --git a/test/webrtc_test.c b/test/webrtc_test.c index 95cd0795..110045ec 100644 --- a/test/webrtc_test.c +++ b/test/webrtc_test.c @@ -80,6 +80,7 @@ enum { CURRENT_STATUS_STOP_PUSHING_PACKET_TO_MEDIA_PACKET_SOURCE, CURRENT_STATUS_CREATE_PRIVATE_SIGNALING_SERVER, CURRENT_STATUS_CONNECT_TO_PRIVATE_SIGNALING_SERVER, + CURRENT_STATUS_MUTE_MEDIA_SOURCE, CURRENT_STATUS_TERMINATE, }; @@ -845,6 +846,24 @@ static void _webrtc_data_channel_send_file(int index, const char *file_path) } } +static void _webrtc_set_media_source_mute(int index, unsigned int source_id, webrtc_media_type_e media_type, bool mute) +{ + int ret = 0; + bool mute_status = false; + + ret = webrtc_set_media_source_mute(g_conns[index].webrtc, source_id, media_type, mute); + if (ret != WEBRTC_ERROR_NONE) + g_print("failed to webrtc_set_media_source_mute(), source_id[%d], muted[%d], ret[0x%x]\n", source_id, mute, ret); + else + g_print("webrtc_set_media_source_mute() success, source_id[%d], muted[%d]\n", source_id, mute); + + ret = webrtc_get_media_source_muted(g_conns[index].webrtc, source_id, media_type, &mute_status); + if (ret != WEBRTC_ERROR_NONE) + g_print("failed to webrtc_get_media_source_muted(), source_id[%d], ret[0x%x]\n", source_id, ret); + else + g_print("webrtc_get_media_source_muted() success, source_id[%d], mute_status[%d]\n", source_id, mute_status); +} + static void _webrtc_set_stun_server(int index, char *uri) { int ret = 0; @@ -3005,6 +3024,9 @@ void _interpret_main_menu(char *cmd) } else if (strncmp(cmd, "zf", 2) == 0) { g_conns[g_conn_index].menu_state = CURRENT_STATUS_DATA_CHANNEL_SEND_FILE; + } else if (strncmp(cmd, "mu", 2) == 0) { + g_conns[g_conn_index].menu_state = CURRENT_STATUS_MUTE_MEDIA_SOURCE; + } else if (strncmp(cmd, "se", 2) == 0) { _webrtc_set_error_cb(g_conn_index); @@ -3243,6 +3265,7 @@ void display_sub_basic() g_print("zs. Send string via data channel\n"); g_print("zb. Send string as bytes data via data channel\t"); g_print("zf. Send file via data channel\n"); + g_print("mu. Mute media source\n"); g_print("------------------------------------- Callbacks -----------------------------------------\n"); g_print("sac. Set all callbacks below (except for the encoded frame callbacks)\n"); g_print("san. Set all the negotiation state change callbacks\t"); @@ -3371,6 +3394,14 @@ static void displaymenu() } else if (g_conns[g_conn_index].menu_state == CURRENT_STATUS_DATA_CHANNEL_SEND_FILE) { g_print("*** input file path to send.\n"); + } else if (g_conns[g_conn_index].menu_state == CURRENT_STATUS_MUTE_MEDIA_SOURCE) { + if (g_conns[g_conn_index].cnt == 0) + g_print("*** input source id.\n"); + else if (g_conns[g_conn_index].cnt == 1) + g_print("*** input media type.(1:audio 2:video)\n"); + else if (g_conns[g_conn_index].cnt == 2) + g_print("*** input mute mode.(0:unmuted 1:muted)\n"); + } else if (g_conns[g_conn_index].menu_state == CURRENT_STATUS_SET_STUN_SERVER) { g_print("*** input STUN server address.\n"); @@ -3729,6 +3760,29 @@ static void interpret(char *cmd) } break; } + case CURRENT_STATUS_MUTE_MEDIA_SOURCE: { + static unsigned int id; + static unsigned int media_type; + value = atoi(cmd); + + switch (g_conns[g_conn_index].cnt) { + case 0: + id = value; + g_conns[g_conn_index].cnt++; + break; + case 1: + media_type = value - 1; + g_conns[g_conn_index].cnt++; + break; + case 2: + _webrtc_set_media_source_mute(g_conn_index, id, media_type, value); + id = media_type = 0; + g_conns[g_conn_index].cnt = 0; + reset_menu_state(); + break; + } + break; + } } if (g_conns[g_conn_index].menu_state != CURRENT_STATUS_TERMINATE)