From: Sangchul Lee Date: Sat, 11 Mar 2023 07:04:34 +0000 (+0900) Subject: webrtc_sink: Save mute value and apply it when audiosink is created X-Git-Tag: accepted/tizen/7.0/unified/20231113.030824~1 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=5e25c01f348463ea215088f70386dcf54874dca5;p=platform%2Fcore%2Fapi%2Fwebrtc.git webrtc_sink: Save mute value and apply it when audiosink is created [Version] 0.3.291 [Issue type] Improvement Change-Id: Icb5181afd2c759756187cd2a68ce4fa315c84c15 --- diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 42094c62..7d0b0d8d 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.3.290 +Version: 0.3.291 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/src/webrtc_sink.c b/src/webrtc_sink.c index 97e8cbf6..d9c3df1f 100644 --- a/src/webrtc_sink.c +++ b/src/webrtc_sink.c @@ -317,6 +317,8 @@ static int __build_audiosink(webrtc_s *webrtc, GstElement *decodebin, GstPad *sr goto exit; } } + g_object_set(G_OBJECT(audiosink), "mute", sink->av[AV_IDX_AUDIO].mute, NULL); + LOG_DEBUG("audiosink mute[%d]", sink->av[AV_IDX_AUDIO].mute); if (!(audioresample = _create_element(DEFAULT_ELEMENT_AUDIORESAMPLE, NULL))) goto exit; @@ -1253,9 +1255,11 @@ int _set_audio_mute_to_sink(webrtc_s *webrtc, unsigned int track_id, bool mute) RET_VAL_IF(sink->encoded_frame_cb != NULL, WEBRTC_ERROR_INVALID_OPERATION, "it may be a forwarding sink for encoded frame callback"); RET_VAL_IF((sink->media_types & MEDIA_TYPE_AUDIO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not an audio track"); RET_VAL_IF(sink->sound_stream_info.type == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sound_stream_info is not set"); - RET_VAL_IF(!(audiosink = gst_bin_get_by_name(sink->bin, ELEMENT_NAME_AUDIO_SINK)), WEBRTC_ERROR_INVALID_OPERATION, "could not find audio sink element"); - g_object_set(G_OBJECT(audiosink), "mute", mute, NULL); + sink->av[AV_IDX_AUDIO].mute = mute; + + if ((audiosink = gst_bin_get_by_name(sink->bin, ELEMENT_NAME_AUDIO_SINK))) + g_object_set(G_OBJECT(audiosink), "mute", mute, NULL); LOG_INFO("webrtc[%p] track_id[%u] mute[%u]", webrtc, track_id, mute); @@ -1265,7 +1269,6 @@ int _set_audio_mute_to_sink(webrtc_s *webrtc, unsigned int track_id, bool mute) int _get_audio_mute_from_sink(webrtc_s *webrtc, unsigned int track_id, bool *muted) { webrtc_gst_slot_s *sink; - GstElement *audiosink; RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); RET_VAL_IF(track_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "track id is 0"); @@ -1277,9 +1280,8 @@ int _get_audio_mute_from_sink(webrtc_s *webrtc, unsigned int track_id, bool *mut RET_VAL_IF(sink->encoded_frame_cb != NULL, WEBRTC_ERROR_INVALID_OPERATION, "it may be a forwarding sink for encoded frame callback"); RET_VAL_IF((sink->media_types & MEDIA_TYPE_AUDIO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not an audio track"); RET_VAL_IF(sink->sound_stream_info.type == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sound_stream_info is not set"); - RET_VAL_IF(!(audiosink = gst_bin_get_by_name(sink->bin, ELEMENT_NAME_AUDIO_SINK)), WEBRTC_ERROR_INVALID_OPERATION, "could not find audio sink element"); - g_object_get(G_OBJECT(audiosink), "mute", muted, NULL); + *muted = sink->av[AV_IDX_AUDIO].mute; LOG_INFO("webrtc[%p] track_id[%u] muted[%u]", webrtc, track_id, *muted);