#define DEFAULT_ELEMENT_VIDEOCONVERT "videoconvert"
#define DEFAULT_ELEMENT_CAPSFILTER "capsfilter"
#define DEFAULT_ELEMENT_QUEUE "queue"
+#define DEFAULT_ELEMENT_VOLUME "volume"
#define ELEMENT_NAME_FIRST_CAPSFILTER "firstCapsfilter"
+#define ELEMENT_NAME_VIDEO_SRC "videoSrc"
+#define ELEMENT_NAME_VIDEO_SWITCH "videoSwitch"
+#define ELEMENT_NAME_VIDEO_MUTE_SRC "videoMuteSrc"
+#define ELEMENT_NAME_VOLUME "volume"
typedef enum {
CODEC_TYPE_OPUS,
source->av[probe_userdata->av_idx].src_pad_probe_id, pad, probe_userdata->av_idx, source);
}
+static GstElement * __link_video_switch(GstBin *bin, GstElement *src)
+{
+ GstElement *videotestsrc = NULL;
+ GstElement *videoswitch = NULL;
+
+ RET_VAL_IF(bin == NULL, NULL, "bin is NULL");
+ RET_VAL_IF(src == NULL, NULL, "src is NULL");
+
+ if (!(videotestsrc = _create_element(DEFAULT_ELEMENT_VIDEOTESTSRC, ELEMENT_NAME_VIDEO_MUTE_SRC)))
+ return NULL;
+
+ g_object_set(G_OBJECT(videotestsrc), "is-live", TRUE, "pattern", 2, NULL); /* 2: black */
+
+ if (!(videoswitch = _create_element("input-selector", ELEMENT_NAME_VIDEO_SWITCH))) {
+ SAFE_GST_OBJECT_UNREF(videotestsrc);
+ return NULL;
+ }
+
+ gst_bin_add_many(bin, videotestsrc, videoswitch, NULL);
+
+ gst_element_link(src, videoswitch);
+ gst_element_link(videotestsrc, videoswitch);
+
+ return videoswitch;
+}
+
+static void __unlink_video_switch(GstBin *bin)
+{
+ GstElement *videotestsrc = NULL;
+ GstElement *videoswitch = NULL;
+
+ videotestsrc = gst_bin_get_by_name(bin, ELEMENT_NAME_VIDEO_MUTE_SRC);
+ videoswitch = gst_bin_get_by_name(bin, ELEMENT_NAME_VIDEO_SWITCH);
+
+ gst_bin_remove_many(bin, videotestsrc, videoswitch, NULL);
+}
+
static int __build_screensrc(webrtc_s *webrtc, webrtc_gst_slot_s *source)
{
int ret = WEBRTC_ERROR_NONE;
GstElement *videopay = NULL;
GstElement *queue = NULL;
GstElement *capsfilter2 = NULL;
+ GstElement *video_switch = NULL;
RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
source->media_types = MEDIA_TYPE_VIDEO;
source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
- if (!(screensrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), NULL)))
+ if (!(screensrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL)))
goto exit;
gst_bin_add_many(source->bin, screensrc, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL);
- if (!gst_element_link_many(screensrc, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL)) {
+
+ video_switch = __link_video_switch(source->bin, screensrc);
+ if (!video_switch)
+ goto exit_with_remove_from_bin;
+
+ if (!gst_element_link_many(video_switch, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL)) {
LOG_ERROR("failed to gst_element_link_many()");
ret = WEBRTC_ERROR_INVALID_OPERATION;
goto exit_with_remove_from_bin;
exit_with_remove_from_bin:
/* elements will be dereferenced */
gst_bin_remove_many(source->bin, screensrc, videoconvert, capsfilter1, videoenc, videopay, queue, capsfilter2, NULL);
+ __unlink_video_switch(source->bin);
return ret;
exit:
SAFE_GST_OBJECT_UNREF(screensrc);
#ifdef TIZEN_FEATURE_RES_MGR
webrtc->resource.need_to_acquire[MM_RESOURCE_MANAGER_RES_TYPE_CAMERA] = true;
#endif
- if (!(camerasrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CAMERA), NULL)))
+ if (!(camerasrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_CAMERA), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
/* FIXME: set camera default setting from ini */
{
int ret = WEBRTC_ERROR_NONE;
GstElement *audiosrc;
+ GstElement *volume = NULL;
GstElement *capsfilter = NULL;
GstElement *audioenc = NULL;
GstElement *audiopay = NULL;
if (!(audiosrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_MIC), NULL)))
return WEBRTC_ERROR_INVALID_OPERATION;
+ if (!(volume = _create_element(DEFAULT_ELEMENT_VOLUME, ELEMENT_NAME_VOLUME)))
+ goto exit;
+
if ((ret = __create_rest_of_elements(webrtc, source, &capsfilter, &audioenc, &audiopay, &queue, &capsfilter2)) != WEBRTC_ERROR_NONE)
goto exit;
- gst_bin_add_many(source->bin, audiosrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
- if (!gst_element_link_many(audiosrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) {
+ gst_bin_add_many(source->bin, audiosrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
+ if (!gst_element_link_many(audiosrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) {
LOG_ERROR("failed to gst_element_link_many()");
ret = WEBRTC_ERROR_INVALID_OPERATION;
goto exit_with_remove_from_bin;
exit_with_remove_from_bin:
/* elements will be dereferenced */
- gst_bin_remove_many(source->bin, audiosrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
+ gst_bin_remove_many(source->bin, audiosrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
return ret;
exit:
SAFE_GST_OBJECT_UNREF(audiosrc);
+ SAFE_GST_OBJECT_UNREF(volume);
SAFE_GST_OBJECT_UNREF(capsfilter);
SAFE_GST_OBJECT_UNREF(audioenc);
SAFE_GST_OBJECT_UNREF(audiopay);
source->media_types = MEDIA_TYPE_VIDEO;
source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
- if (!(videotestsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), NULL)))
+ if (!(videotestsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
g_object_set(G_OBJECT(videotestsrc), "is-live", TRUE, "pattern", 18, NULL); /* 18: ball */
{
int ret = WEBRTC_ERROR_NONE;
GstElement *audiotestsrc;
+ GstElement *volume = NULL;
GstElement *capsfilter = NULL;
GstElement *audioenc = NULL;
GstElement *audiopay = NULL;
g_object_set(G_OBJECT(audiotestsrc), "is-live", TRUE, NULL);
+ if (!(volume = _create_element(DEFAULT_ELEMENT_VOLUME, ELEMENT_NAME_VOLUME)))
+ goto exit;
+
if ((ret = __create_rest_of_elements(webrtc, source, &capsfilter, &audioenc, &audiopay, &queue, &capsfilter2)) != WEBRTC_ERROR_NONE)
goto exit;
- gst_bin_add_many(source->bin, audiotestsrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
- if (!gst_element_link_many(audiotestsrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) {
+ gst_bin_add_many(source->bin, audiotestsrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
+ if (!gst_element_link_many(audiotestsrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL)) {
LOG_ERROR("failed to gst_element_link_many()");
ret = WEBRTC_ERROR_INVALID_OPERATION;
goto exit_with_remove_from_bin;
exit_with_remove_from_bin:
/* elements will be dereferenced */
- gst_bin_remove_many(source->bin, audiotestsrc, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
+ gst_bin_remove_many(source->bin, audiotestsrc, volume, capsfilter, audioenc, audiopay, queue, capsfilter2, NULL);
return ret;
exit:
SAFE_GST_OBJECT_UNREF(audiotestsrc);
+ SAFE_GST_OBJECT_UNREF(volume);
SAFE_GST_OBJECT_UNREF(capsfilter);
SAFE_GST_OBJECT_UNREF(audioenc);
SAFE_GST_OBJECT_UNREF(audiopay);
goto error;
}
- /* The gst_element_get_request_pad() of webrtcbin will trigger the transciever callback. To update the mline value of
+ /* The gst_element_get_request_pad() of webrtcbin will trigger the transceiver callback. To update the mline value of
* new transceiver object to the source structure in the callback, hash table inserting should be preceded. */
if (!g_hash_table_insert(webrtc->gst.source_slots, bin_name, (gpointer)source)) {
LOG_ERROR("should not be reached here, bin_name[%s] already exist, source id[%u] will be removed", bin_name, source->id);
return ret;
}
+
+static GstPadProbeReturn __buffer_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+{
+ GstBuffer *buffer = gst_pad_probe_info_get_buffer(info);
+ GstMemory *mem = NULL;
+ tbm_surface_h src_tsurface;
+ tbm_surface_info_s ts_info;
+ int ret = TBM_SURFACE_ERROR_NONE;
+ unsigned int i = 0;
+
+ RET_VAL_IF(buffer == NULL, GST_PAD_PROBE_OK, "buffer is NULL");
+ RET_VAL_IF(gst_buffer_get_size(buffer) == 0, GST_PAD_PROBE_OK, "empty buffer");
+
+ mem = gst_buffer_peek_memory(buffer, 0);
+ RET_VAL_IF(mem == NULL, GST_PAD_PROBE_OK, "mem is NULL");
+
+ src_tsurface = (tbm_surface_h)gst_tizen_memory_get_surface(mem);
+ if (!src_tsurface) {
+ LOG_ERROR("failed to gst_tizen_memory_get_surface()");
+ return GST_PAD_PROBE_OK;
+ }
+
+ ret = tbm_surface_get_info(src_tsurface, &ts_info);
+ if (ret != TBM_SURFACE_ERROR_NONE) {
+ LOG_ERROR("failed to tbm_surface_get_info() [%d]", ret);
+ return GST_PAD_PROBE_OK;
+ }
+
+ /* fill the buffer with black (NV12, YUV, RGB) */
+ for (i = 0 ; i < ts_info.num_planes ; i++) {
+ if (i == 0)
+ memset(ts_info.planes[i].ptr, 0x00, ts_info.planes[i].size);
+ else
+ memset(ts_info.planes[i].ptr, 0x80, ts_info.planes[i].size);
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+static int __mute_camerasrc(webrtc_gst_slot_s *source, bool mute)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ GstElement *camerasrc = NULL;
+ GstPad *src_pad = NULL;
+
+ LOG_DEBUG_ENTER();
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->video_muted == mute, WEBRTC_ERROR_NONE, "Already %s", mute ? "muted" : "unmuted");
+
+ camerasrc = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SRC);
+ src_pad = gst_element_get_static_pad(camerasrc, "src");
+
+ if (mute) {
+ if (source->probe_id != 0) {
+ LOG_ERROR("fail to change to mute");
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ } else {
+ source->probe_id = gst_pad_add_probe(src_pad, GST_PAD_PROBE_TYPE_BUFFER, __buffer_probe_cb, NULL, NULL);
+ if (source->probe_id == 0) {
+ LOG_ERROR("failed to gst_pad_add_probe()");
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ }
+ }
+ } else {
+ if (source->probe_id == 0) {
+ LOG_ERROR("fail to change to unmute");
+ ret = WEBRTC_ERROR_INVALID_OPERATION;
+ } else {
+ gst_pad_remove_probe(src_pad, source->probe_id);
+ source->probe_id = 0;
+ }
+ }
+
+ gst_object_unref(src_pad);
+
+ return ret;
+}
+
+static int __mute_videosrc(webrtc_gst_slot_s *source, bool mute)
+{
+ GstElement *video_switch = NULL;
+ GstPad *new_pad = NULL;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->video_muted == mute, WEBRTC_ERROR_NONE, "Already %s", mute ? "muted" : "unmuted");
+
+ video_switch = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SWITCH);
+
+ new_pad = gst_element_get_static_pad(video_switch, mute ? "sink_1" : "sink_0");
+ RET_VAL_IF(new_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "new_pad is NULL");
+
+ g_object_set(G_OBJECT(video_switch), "active-pad", new_pad, NULL);
+ gst_object_unref(new_pad);
+
+ return WEBRTC_ERROR_NONE;
+}
+
+static int __mute_videotestsrc(webrtc_gst_slot_s *source, bool mute)
+{
+ GstElement *src_element = NULL;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->video_muted == mute, WEBRTC_ERROR_NONE, "Already %s", mute ? "muted" : "unmuted");
+
+ src_element = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SRC);
+ RET_VAL_IF(src_element == NULL, WEBRTC_ERROR_INVALID_OPERATION, "src_element is NULL");
+
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(src_element), "pattern")) {
+ LOG_ERROR("there is no pattern property");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ g_object_set(G_OBJECT(src_element), "pattern", mute ? 2 : 18, NULL); /* 2: black 18: ball */
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _set_video_source_mute(webrtc_s *webrtc, unsigned int source_id, bool mute)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source = NULL;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+
+ if (!(source->media_types & MEDIA_TYPE_VIDEO)) {
+ LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ LOG_DEBUG("source_id[%u] mute[%d]", source_id, mute);
+
+ switch (source->type) {
+ case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA:
+ ret = __mute_camerasrc(source, mute);
+ break;
+
+ case WEBRTC_MEDIA_SOURCE_TYPE_SCREEN:
+ ret = __mute_videosrc(source, mute);
+ break;
+
+ case WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST:
+ ret = __mute_videotestsrc(source, mute);
+ break;
+
+ default :
+ LOG_ERROR_IF_REACHED("type(%d)", source->type);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ if (ret == WEBRTC_ERROR_NONE)
+ source->video_muted = mute ? true : false;
+
+ return ret;
+}
+
+int _set_audio_source_mute(webrtc_s *webrtc, unsigned int source_id, bool mute)
+{
+ webrtc_gst_slot_s *source = NULL;
+ GstElement *volume = NULL;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+
+ if (!(source->media_types & MEDIA_TYPE_AUDIO)) {
+ LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ switch (source->type) {
+ case WEBRTC_MEDIA_SOURCE_TYPE_MIC:
+ case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST:
+ volume = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VOLUME);
+ RET_VAL_IF(volume == NULL, WEBRTC_ERROR_INVALID_OPERATION, "volume is NULL");
+
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(volume), "mute")) {
+ LOG_ERROR("there is no mute property");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ g_object_set(G_OBJECT(volume), "mute", mute, NULL);
+ break;
+
+ default :
+ LOG_ERROR_IF_REACHED("type(%d)", source->type);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ LOG_DEBUG("source_id[%u] mute[%d]", source_id, mute);
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _get_video_source_muted(webrtc_s *webrtc, unsigned int source_id, bool *muted)
+{
+ webrtc_gst_slot_s *source = NULL;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+
+ if (!(source->media_types & MEDIA_TYPE_VIDEO)) {
+ LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ switch (source->type) {
+ case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA:
+ case WEBRTC_MEDIA_SOURCE_TYPE_SCREEN:
+ case WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST:
+ *muted = source->video_muted;
+ break;
+
+ default :
+ LOG_ERROR_IF_REACHED("type(%d)", source->type);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ LOG_DEBUG("source_id[%u] muted[%d]", source_id, *muted);
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _get_audio_source_muted(webrtc_s *webrtc, unsigned int source_id, bool *muted)
+{
+ webrtc_gst_slot_s *source = NULL;
+ GstElement *volume = NULL;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+
+ if (!(source->media_types & MEDIA_TYPE_AUDIO)) {
+ LOG_ERROR("invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ switch (source->type) {
+ case WEBRTC_MEDIA_SOURCE_TYPE_MIC:
+ case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST:
+ volume = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VOLUME);
+ RET_VAL_IF(volume == NULL, WEBRTC_ERROR_INVALID_OPERATION, "volume is NULL");
+
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(volume), "mute")) {
+ LOG_ERROR("there is no mute property");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ g_object_get(G_OBJECT(volume), "mute", muted, NULL);
+ break;
+
+ default :
+ LOG_ERROR_IF_REACHED("type(%d)", source->type);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ LOG_DEBUG("source_id[%u] muted[%d]", source_id, *muted);
+
+ return WEBRTC_ERROR_NONE;
+}