int _unset_screen_source_crop(webrtc_s *webrtc, unsigned int source_id);
bool _is_screen_source_cropped(webrtc_gst_slot_s *source);
+/* source loopback */
+int _set_audio_loopback(webrtc_s *webrtc, unsigned int source_id, sound_stream_info_h stream_info, unsigned int *track_id);
+int _unset_audio_loopback(webrtc_s *webrtc, unsigned int source_id);
+int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display, unsigned int *track_id);
+int _unset_video_loopback(webrtc_s *webrtc, unsigned int source_id);
+int _set_display_mode_to_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e mode);
+int _get_display_mode_from_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e *mode);
+int _set_display_visible_to_loopback(webrtc_s *webrtc, unsigned int track_id, bool visible);
+int _get_display_visible_from_loopback(webrtc_s *webrtc, unsigned int track_id, bool *visible);
+GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
+void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb);
+void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx);
+void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx);
+void _unset_caps_for_render(webrtc_gst_slot_s *source, int av_idx);
+int _update_caps_for_render_with_resolution(webrtc_gst_slot_s *source, int width, int height);
+
/* source */
int _complete_sources(webrtc_s *webrtc);
const char *_get_audio_media_type(const char *codec_name);
int _get_video_framerate(webrtc_s *webrtc, unsigned int source_id, int *framerate);
int _set_camera_device_id(webrtc_s *webrtc, unsigned int source_id, unsigned int device_id);
int _get_camera_device_id(webrtc_s *webrtc, unsigned int source_id, unsigned int *device_id);
-int _set_display_mode_to_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e mode);
-int _get_display_mode_from_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e *mode);
-int _set_display_visible_to_loopback(webrtc_s *webrtc, unsigned int track_id, bool visible);
-int _get_display_visible_from_loopback(webrtc_s *webrtc, unsigned int track_id, bool *visible);
-int _set_audio_loopback(webrtc_s *webrtc, unsigned int source_id, sound_stream_info_h stream_info, unsigned int *track_id);
-int _unset_audio_loopback(webrtc_s *webrtc, unsigned int source_id);
-int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display, unsigned int *track_id);
-int _unset_video_loopback(webrtc_s *webrtc, unsigned int source_id);
int _set_sound_stream_info(webrtc_s *webrtc, unsigned int source_id, sound_stream_info_h stream_info);
int _set_media_path(webrtc_s *webrtc, unsigned int source_id, const char *path);
GstPadProbeReturn _payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
void _add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb);
void _remove_probe_from_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx);
-GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
-void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb);
-void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx);
int _link_source_with_webrtcbin(webrtc_gst_slot_s *source, GstElement *webrtcbin);
int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list, bool is_audio);
int _set_encoder_element_bitrate(GstElement *encoder, int target_bitrate);
int _get_encoder_element_bitrate(GstElement *encoder, int *target_bitrate);
-void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx);
-void _unset_caps_for_render(webrtc_gst_slot_s *source, int av_idx);
void _set_video_src_resolution(webrtc_gst_slot_s *source, int width, int height);
GstCaps *_make_video_raw_caps_with_resolution(webrtc_gst_slot_s *source, webrtc_ini_s *ini, int width, int height);
-int _update_caps_for_render_with_resolution(webrtc_gst_slot_s *source, int width, int height);
#endif /* __TIZEN_MEDIA_WEBRTC_SOURCE_COMMON_H__ */
Name: capi-media-webrtc
Summary: A WebRTC library in Tizen Native API
-Version: 0.3.230
+Version: 0.3.231
Release: 0
Group: Multimedia/API
License: Apache-2.0
return WEBRTC_ERROR_NONE;
}
-
-static int __build_loopback_audiosink(webrtc_gst_slot_s *source, GstElement *link_with)
-{
- webrtc_s *webrtc;
- GstElement *audiosink;
- GstElement *audioconvert;
- GstElement *audioresample;
- GList *element_list = NULL;
- int ret = WEBRTC_ERROR_NONE;
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(link_with == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "link_with is NULL");
- RET_VAL_IF(source->webrtc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "webrtc is NULL");
-
- webrtc = source->webrtc;
-
- if (!(audioconvert = _create_element(DEFAULT_ELEMENT_AUDIOCONVERT, NULL)))
- return WEBRTC_ERROR_INVALID_OPERATION;
- APPEND_ELEMENT(element_list, audioconvert);
-
- if (!(audioresample = _create_element(DEFAULT_ELEMENT_AUDIORESAMPLE, NULL)))
- goto exit;
- APPEND_ELEMENT(element_list, audioresample);
-
- if (!(audiosink = _create_element(webrtc->ini.rendering_sink.a_sink_element, NULL)))
- goto exit;
- APPEND_ELEMENT(element_list, audiosink);
-
- if (g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(audiosink)), "stream-properties")) {
- if (source->sound_stream_info.type) {
- ret = _apply_stream_info(audiosink, source->sound_stream_info.type, source->sound_stream_info.index,
- SOUND_MANAGER_STREAM_NO_REFERENCE_DEVICE);
- if (ret != WEBRTC_ERROR_NONE)
- goto exit;
- }
- }
- g_object_set(G_OBJECT(audiosink), "sync", FALSE, NULL);
-
- if (!_add_elements_to_bin(GST_BIN(source->av[AV_IDX_AUDIO].render.pipeline), element_list)) {
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- if (!_link_elements(element_list))
- goto exit_with_remove_from_bin;
-
- if (!gst_element_link(link_with, audioconvert)) {
- LOG_ERROR("failed to gst_element_link(), [%s] - [%s]", GST_ELEMENT_NAME(link_with), GST_ELEMENT_NAME(audioconvert));
- goto exit_with_remove_from_bin;
- }
-
- if (!_sync_elements_state_with_parent(element_list))
- goto exit_with_remove_from_bin;
-
- SAFE_G_LIST_FREE(element_list);
-
- return WEBRTC_ERROR_NONE;
-
-exit_with_remove_from_bin:
- _remove_elements_from_bin(source->bin, element_list);
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
-exit:
- SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-
-static int __build_loopback_videosink(webrtc_gst_slot_s *source, GstElement *link_with)
-{
- webrtc_s *webrtc;
- const char *videosink_factory_name;
- GstElement *videosink;
- GstElement *videoconvert;
- GstElement *capsfilter;
- GList *element_list = NULL;
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(link_with == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "link_with is NULL");
- RET_VAL_IF(source->webrtc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "webrtc is NULL");
- RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
- RET_VAL_IF(source->display->surface == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->surface is NULL");
-
- webrtc = source->webrtc;
-
- switch (source->display->type) {
- case WEBRTC_DISPLAY_TYPE_OVERLAY:
- case WEBRTC_DISPLAY_TYPE_ECORE_WL:
- videosink_factory_name = webrtc->ini.rendering_sink.v_sink_element;
- break;
-//LCOV_EXCL_START
- case WEBRTC_DISPLAY_TYPE_EVAS:
- videosink_factory_name = DEFAULT_ELEMENT_FAKESINK;
- break;
-//LCOV_EXCL_STOP
- default:
- LOG_ERROR_IF_REACHED("invalid display type(%d)", source->display->type);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
-#ifndef TIZEN_TV
- if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY && webrtc->ini.resource_acquisition.video_overlay) {
- int ret;
- if ((ret = _acquire_resource_for_type(webrtc, MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY)) != WEBRTC_ERROR_NONE)
- return ret;
- }
-#endif
-
- if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL)))
- return WEBRTC_ERROR_INVALID_OPERATION;
- APPEND_ELEMENT(element_list, videoconvert);
-
- if (source->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
- GstCaps *caps;
- if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, NULL)))
- goto exit;
- APPEND_ELEMENT(element_list, capsfilter);
-
- caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW,
- "format", G_TYPE_STRING, webrtc->ini.rendering_sink.evas_native_surface_tbm_format,
- NULL);
- g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL);
- gst_caps_unref(caps);
- }
-
- if (!(videosink = _create_element(videosink_factory_name, NULL)))
- goto exit;
- APPEND_ELEMENT(element_list, videosink);
-
- if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY ||
- source->display->type == WEBRTC_DISPLAY_TYPE_ECORE_WL) {
- gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(videosink), source->display->overlay_surface_id);
- if (!g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(videosink)), "display-geometry-method")) {
- LOG_ERROR("could not find 'display-geometry-method' property");
- goto exit;
- }
- if (!g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(videosink)), "visible")) {
- LOG_ERROR("could not find 'visible' property");
- goto exit;
- }
- g_object_set(G_OBJECT(videosink),
- "display-geometry-method", (gint)source->display->mode, /* 0: letter box, 1: origin size, 2: full screen */
- "visible", (gboolean)source->display->visible,
- NULL);
-//LCOV_EXCL_START
- } else if (source->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
- g_object_set(G_OBJECT(videosink), "signal-handoffs", TRUE, NULL);
- _connect_and_append_signal(&source->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), source->display);
- }
-//LCOV_EXCL_STOP
- if (!_add_elements_to_bin(GST_BIN(source->av[AV_IDX_VIDEO].render.pipeline), element_list)) {
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- if (!_link_elements(element_list))
- goto exit_with_remove_from_bin;
-
- if (!gst_element_link(link_with, videoconvert)) {
- LOG_ERROR("failed to gst_element_link(), [%s] - [%s]", GST_ELEMENT_NAME(link_with), GST_ELEMENT_NAME(videoconvert));
- goto exit_with_remove_from_bin;
- }
-
- if (!_sync_elements_state_with_parent(element_list))
- goto exit_with_remove_from_bin;
-
- source->display->sink_element = videosink;
-
- SAFE_G_LIST_FREE(element_list);
-
- return WEBRTC_ERROR_NONE;
-
-exit_with_remove_from_bin:
- _remove_elements_from_bin(source->bin, element_list);
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
-exit:
- SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-
-//LCOV_EXCL_START
-static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new_pad, gpointer user_data)
-{
- int ret = WEBRTC_ERROR_NONE;
- webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
- gchar *media_type;
-
- RET_IF(source == NULL, "source is NULL");
-
- if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC)
- return;
-
- media_type = _get_mime_type_from_pad(new_pad);
- LOG_INFO("source_id[%u], media_type[%s], new_pad[%s]", source->id, media_type, GST_PAD_NAME(new_pad));
-
- if (!_is_supported_media_type(media_type)) {
- g_free(media_type);
- return;
- }
-
- if (_is_audio_media_type(media_type)) {
- ret = __build_loopback_audiosink(source, decodebin);
- if (ret != WEBRTC_ERROR_NONE)
- SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline);
- else
- GENERATE_DOT(source->webrtc, source->av[AV_IDX_AUDIO].render.pipeline, "%s.%s-%s",
- GST_ELEMENT_NAME(source->av[AV_IDX_AUDIO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad));
-
- } else {
- ret = __build_loopback_videosink(source, decodebin);
- if (ret != WEBRTC_ERROR_NONE)
- SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
- else
- GENERATE_DOT(source->webrtc, source->av[AV_IDX_VIDEO].render.pipeline, "%s.%s-%s",
- GST_ELEMENT_NAME(source->av[AV_IDX_VIDEO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad));
-
- }
-
- g_free(media_type);
-
- if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to build loopback rendering sink, ret[0x%x]", ret);
- _post_error_cb_in_idle(source->webrtc, ret);
- }
-}
-//LCOV_EXCL_STOP
-
-static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source, media_type_e type, unsigned int *track_id)
-{
- GstElement *appsrc;
- gchar *pipeline_name;
- const int idx = GET_AV_IDX_BY_TYPE(type);
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- if (type == MEDIA_TYPE_VIDEO) {
- RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
- RET_VAL_IF(source->display->surface == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->surface is NULL");
- }
-
- pipeline_name = g_strdup_printf("loopback-pipeline-for-source_%u", source->id);
- source->av[idx].render.pipeline = gst_pipeline_new(pipeline_name);
- g_free(pipeline_name);
- RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");
-
- appsrc = _create_element(DEFAULT_ELEMENT_APPSRC, NULL);
- if (!appsrc)
- goto error;
-
- g_object_set(G_OBJECT(appsrc),
- "is-live", TRUE,
- "format", GST_FORMAT_TIME,
- NULL);
-
- if (source->av[idx].render.appsrc_caps)
- g_object_set(G_OBJECT(appsrc), "caps", source->av[idx].render.appsrc_caps, NULL);
-
- if (source->av[idx].render.need_decoding) {
- GstElement *decodebin = _create_element("decodebin", NULL);
- if (!decodebin) {
- SAFE_GST_OBJECT_UNREF(appsrc);
- goto error;
- }
-
- gst_bin_add_many(GST_BIN(source->av[idx].render.pipeline), appsrc, decodebin, NULL);
- if (!gst_element_link(appsrc, decodebin)) {
- LOG_ERROR("failed to gst_element_link()");
- goto error;
- }
-
- g_signal_connect(decodebin, "pad-added", G_CALLBACK(__loopback_decodebin_pad_added_cb), source);
- g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(_decodebin_autoplug_select_cb), webrtc);
-
- } else {
- int ret = WEBRTC_ERROR_NONE;
-
- gst_bin_add(GST_BIN(source->av[idx].render.pipeline), appsrc);
-
- if (type == MEDIA_TYPE_AUDIO)
- ret = __build_loopback_audiosink(source, appsrc);
- else
- ret = __build_loopback_videosink(source, appsrc);
- if (ret != WEBRTC_ERROR_NONE) {
- SAFE_GST_OBJECT_UNREF(appsrc);
- goto error;
- }
- }
-
- source->av[idx].render.appsrc = appsrc;
-
- /* NOTE : The track id has already been used for a remote stream. Here we newly add the track id
- * for loopback rendering pipeline, we also need to distinguish which value belongs to remote's
- * or loopback's. Hence a simple operation is added to make the track id of loopback's. */
- source->av[idx].render.track_id = source->id * 100 + idx;
-
- LOG_INFO("source_id[%u] track_id[%u] pipeline[%p, %s] appsrc[%p]",
- source->id, source->av[idx].render.track_id, source->av[idx].render.pipeline,
- GST_ELEMENT_NAME(source->av[idx].render.pipeline), source->av[idx].render.appsrc);
-
- gst_element_set_state(source->av[idx].render.pipeline, GST_STATE_PLAYING);
-
- if (track_id)
- *track_id = source->av[idx].render.track_id;
-
- if (!source->av[idx].render.need_decoding)
- GENERATE_DOT(webrtc, source->av[idx].render.pipeline, "%s", GST_ELEMENT_NAME(source->av[idx].render.pipeline));
-
- return WEBRTC_ERROR_NONE;
-
-error:
- SAFE_GST_OBJECT_UNREF(source->av[idx].render.pipeline);
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-
-int _set_audio_loopback(webrtc_s *webrtc, unsigned int source_id, sound_stream_info_h stream_info, unsigned int *track_id)
-{
- webrtc_gst_slot_s *source;
- char *stream_type;
- int stream_index;
- bool available;
- int ret = SOUND_MANAGER_ERROR_NONE;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
-
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
- WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_AUDIO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
- "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
- RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
- "this API does not support the media packet source");
- RET_VAL_IF(stream_info == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "stream_info is NULL");
- RET_VAL_IF(source->av[AV_IDX_AUDIO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "audio loopback has already been set");
-
- LOG_INFO("webrtc[%p] source_id[%u] stream_info[%p]", webrtc, source_id, stream_info);
-
- sound_manager_get_type_from_stream_information(stream_info, &stream_type);
- sound_manager_get_index_from_stream_information(stream_info, &stream_index);
-
- ret = sound_manager_is_available_stream_information(stream_info, NATIVE_API_WEBRTC, &available);
- if (ret != SOUND_MANAGER_ERROR_NONE) {
- LOG_ERROR("failed to sound_manager_is_available_stream_information()");
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- if (!available) {
- LOG_ERROR("this stream info[%p, type:%s, index:%d] is not allowed to this framework", stream_info, stream_type, stream_index);
- return WEBRTC_ERROR_INVALID_PARAMETER;
- }
-
- source->sound_stream_info.type = strdup(stream_type);
- source->sound_stream_info.index = stream_index;
-
- LOG_INFO("source_id[%u] stream_info[%p, type:%s, index:%d]", source_id, stream_info, stream_type, stream_index);
-
- return __build_loopback_render_pipeline(webrtc, source, MEDIA_TYPE_AUDIO, track_id);
-}
-
-int _unset_audio_loopback(webrtc_s *webrtc, unsigned int source_id)
-{
- webrtc_gst_slot_s *source = NULL;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
-
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
- WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_AUDIO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
- "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
- RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
- "this API does not support the media packet source");
- RET_VAL_IF(!source->av[AV_IDX_AUDIO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "audio loopback was not set");
-
- LOG_INFO("source_id[%u]", source_id);
-
- if (source->sound_stream_info.type) {
- free(source->sound_stream_info.type);
- source->sound_stream_info.type = NULL;
- }
-
- gst_element_set_state(source->av[AV_IDX_AUDIO].render.pipeline, GST_STATE_NULL);
- SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline);
-
- source->av[AV_IDX_AUDIO].render.appsrc = NULL;
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display, unsigned int *track_id)
-{
- int ret = WEBRTC_ERROR_NONE;
- webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
- RET_VAL_IF(display == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "display is NULL");
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
- WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
- "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
- RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
- "this API does not support the media packet source");
- RET_VAL_IF(source->av[AV_IDX_VIDEO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "video loopback has already been set");
-
- if (source->display == NULL) {
- source->display = _alloc_display();
- RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "source->display is NULL");
- }
-
- LOG_INFO("webrtc[%p] source_id[%u]", webrtc, source_id);
-
- _set_display_type_and_surface(source->display, type, display);
-
- ret = _apply_display(source->display);
- if (ret != WEBRTC_ERROR_NONE)
- goto error;
-
- ret = __build_loopback_render_pipeline(webrtc, source, MEDIA_TYPE_VIDEO, track_id);
- if (ret != WEBRTC_ERROR_NONE)
- goto error;
-
- return WEBRTC_ERROR_NONE;
-
-error:
- _release_display(source->display);
- source->display = NULL;
- return ret;
-}
-
-int _unset_video_loopback(webrtc_s *webrtc, unsigned int source_id)
-{
- webrtc_gst_slot_s *source = NULL;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
-
- RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
- WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
- "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
- RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
- "this API does not support the media packet source");
- RET_VAL_IF(!source->av[AV_IDX_VIDEO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "video loopback was not set");
-
- LOG_INFO("source_id[%u]", source_id);
-
- if (source->display) {
- _release_display(source->display);
- source->display = NULL;
- }
-
- gst_element_set_state(source->av[AV_IDX_VIDEO].render.pipeline, GST_STATE_NULL);
- SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
-
- source->av[AV_IDX_VIDEO].render.appsrc = NULL;
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _set_display_mode_to_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e mode)
-{
- webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
-
- source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
-
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
- RET_VAL_IF(_set_display_mode(source->display, mode) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _set_display_mode()");
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _get_display_mode_from_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e *mode)
-{
- const webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
- RET_VAL_IF(mode == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "mode is NULL");
-
- source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
-
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
- RET_VAL_IF(_get_display_mode(source->display, mode) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _get_display_mode()");
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _set_display_visible_to_loopback(webrtc_s *webrtc, unsigned int track_id, bool visible)
-{
- webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
-
- source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
-
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
- RET_VAL_IF(_set_display_visible(source->display, visible) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _set_display_visible()");
-
- return WEBRTC_ERROR_NONE;
-}
-
-int _get_display_visible_from_loopback(webrtc_s *webrtc, unsigned int track_id, bool *visible)
-{
- const webrtc_gst_slot_s *source;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
- RET_VAL_IF(visible == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "visible is NULL");
-
- source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
-
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
- RET_VAL_IF(_get_display_visible(source->display, visible) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _get_display_visible()");
-
- return WEBRTC_ERROR_NONE;
-}
--- /dev/null
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "webrtc_private.h"
+#include "webrtc_source_private.h"
+
+#include <sound_manager_internal.h>
+#include <gst/video/videooverlay.h>
+
+static int __build_loopback_audiosink(webrtc_gst_slot_s *source, GstElement *link_with)
+{
+ webrtc_s *webrtc;
+ GstElement *audiosink;
+ GstElement *audioconvert;
+ GstElement *audioresample;
+ GList *element_list = NULL;
+ int ret = WEBRTC_ERROR_NONE;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(link_with == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "link_with is NULL");
+ RET_VAL_IF(source->webrtc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "webrtc is NULL");
+
+ webrtc = source->webrtc;
+
+ if (!(audioconvert = _create_element(DEFAULT_ELEMENT_AUDIOCONVERT, NULL)))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ APPEND_ELEMENT(element_list, audioconvert);
+
+ if (!(audioresample = _create_element(DEFAULT_ELEMENT_AUDIORESAMPLE, NULL)))
+ goto exit;
+ APPEND_ELEMENT(element_list, audioresample);
+
+ if (!(audiosink = _create_element(webrtc->ini.rendering_sink.a_sink_element, NULL)))
+ goto exit;
+ APPEND_ELEMENT(element_list, audiosink);
+
+ if (g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(audiosink)), "stream-properties")) {
+ if (source->sound_stream_info.type) {
+ ret = _apply_stream_info(audiosink, source->sound_stream_info.type, source->sound_stream_info.index,
+ SOUND_MANAGER_STREAM_NO_REFERENCE_DEVICE);
+ if (ret != WEBRTC_ERROR_NONE)
+ goto exit;
+ }
+ }
+ g_object_set(G_OBJECT(audiosink), "sync", FALSE, NULL);
+
+ if (!_add_elements_to_bin(GST_BIN(source->av[AV_IDX_AUDIO].render.pipeline), element_list)) {
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!_link_elements(element_list))
+ goto exit_with_remove_from_bin;
+
+ if (!gst_element_link(link_with, audioconvert)) {
+ LOG_ERROR("failed to gst_element_link(), [%s] - [%s]", GST_ELEMENT_NAME(link_with), GST_ELEMENT_NAME(audioconvert));
+ goto exit_with_remove_from_bin;
+ }
+
+ if (!_sync_elements_state_with_parent(element_list))
+ goto exit_with_remove_from_bin;
+
+ SAFE_G_LIST_FREE(element_list);
+
+ return WEBRTC_ERROR_NONE;
+
+exit_with_remove_from_bin:
+ _remove_elements_from_bin(source->bin, element_list);
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+exit:
+ SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+static int __build_loopback_videosink(webrtc_gst_slot_s *source, GstElement *link_with)
+{
+ webrtc_s *webrtc;
+ const char *videosink_factory_name;
+ GstElement *videosink;
+ GstElement *videoconvert;
+ GstElement *capsfilter;
+ GList *element_list = NULL;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(link_with == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "link_with is NULL");
+ RET_VAL_IF(source->webrtc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "webrtc is NULL");
+ RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
+ RET_VAL_IF(source->display->surface == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->surface is NULL");
+
+ webrtc = source->webrtc;
+
+ switch (source->display->type) {
+ case WEBRTC_DISPLAY_TYPE_OVERLAY:
+ case WEBRTC_DISPLAY_TYPE_ECORE_WL:
+ videosink_factory_name = webrtc->ini.rendering_sink.v_sink_element;
+ break;
+//LCOV_EXCL_START
+ case WEBRTC_DISPLAY_TYPE_EVAS:
+ videosink_factory_name = DEFAULT_ELEMENT_FAKESINK;
+ break;
+//LCOV_EXCL_STOP
+ default:
+ LOG_ERROR_IF_REACHED("invalid display type(%d)", source->display->type);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+#ifndef TIZEN_TV
+ if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY && webrtc->ini.resource_acquisition.video_overlay) {
+ int ret;
+ if ((ret = _acquire_resource_for_type(webrtc, MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY)) != WEBRTC_ERROR_NONE)
+ return ret;
+ }
+#endif
+
+ if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL)))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ APPEND_ELEMENT(element_list, videoconvert);
+
+ if (source->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
+ GstCaps *caps;
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, NULL)))
+ goto exit;
+ APPEND_ELEMENT(element_list, capsfilter);
+
+ caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW,
+ "format", G_TYPE_STRING, webrtc->ini.rendering_sink.evas_native_surface_tbm_format,
+ NULL);
+ g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL);
+ gst_caps_unref(caps);
+ }
+
+ if (!(videosink = _create_element(videosink_factory_name, NULL)))
+ goto exit;
+ APPEND_ELEMENT(element_list, videosink);
+
+ if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY ||
+ source->display->type == WEBRTC_DISPLAY_TYPE_ECORE_WL) {
+ gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(videosink), source->display->overlay_surface_id);
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(videosink)), "display-geometry-method")) {
+ LOG_ERROR("could not find 'display-geometry-method' property");
+ goto exit;
+ }
+ if (!g_object_class_find_property(G_OBJECT_GET_CLASS(G_OBJECT(videosink)), "visible")) {
+ LOG_ERROR("could not find 'visible' property");
+ goto exit;
+ }
+ g_object_set(G_OBJECT(videosink),
+ "display-geometry-method", (gint)source->display->mode, /* 0: letter box, 1: origin size, 2: full screen */
+ "visible", (gboolean)source->display->visible,
+ NULL);
+//LCOV_EXCL_START
+ } else if (source->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
+ g_object_set(G_OBJECT(videosink), "signal-handoffs", TRUE, NULL);
+ _connect_and_append_signal(&source->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), source->display);
+ }
+//LCOV_EXCL_STOP
+ if (!_add_elements_to_bin(GST_BIN(source->av[AV_IDX_VIDEO].render.pipeline), element_list)) {
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!_link_elements(element_list))
+ goto exit_with_remove_from_bin;
+
+ if (!gst_element_link(link_with, videoconvert)) {
+ LOG_ERROR("failed to gst_element_link(), [%s] - [%s]", GST_ELEMENT_NAME(link_with), GST_ELEMENT_NAME(videoconvert));
+ goto exit_with_remove_from_bin;
+ }
+
+ if (!_sync_elements_state_with_parent(element_list))
+ goto exit_with_remove_from_bin;
+
+ source->display->sink_element = videosink;
+
+ SAFE_G_LIST_FREE(element_list);
+
+ return WEBRTC_ERROR_NONE;
+
+exit_with_remove_from_bin:
+ _remove_elements_from_bin(source->bin, element_list);
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+exit:
+ SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+//LCOV_EXCL_START
+static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new_pad, gpointer user_data)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
+ gchar *media_type;
+
+ RET_IF(source == NULL, "source is NULL");
+
+ if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC)
+ return;
+
+ media_type = _get_mime_type_from_pad(new_pad);
+ LOG_INFO("source_id[%u], media_type[%s], new_pad[%s]", source->id, media_type, GST_PAD_NAME(new_pad));
+
+ if (!_is_supported_media_type(media_type)) {
+ g_free(media_type);
+ return;
+ }
+
+ if (_is_audio_media_type(media_type)) {
+ ret = __build_loopback_audiosink(source, decodebin);
+ if (ret != WEBRTC_ERROR_NONE)
+ SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline);
+ else
+ GENERATE_DOT(source->webrtc, source->av[AV_IDX_AUDIO].render.pipeline, "%s.%s-%s",
+ GST_ELEMENT_NAME(source->av[AV_IDX_AUDIO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad));
+
+ } else {
+ ret = __build_loopback_videosink(source, decodebin);
+ if (ret != WEBRTC_ERROR_NONE)
+ SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
+ else
+ GENERATE_DOT(source->webrtc, source->av[AV_IDX_VIDEO].render.pipeline, "%s.%s-%s",
+ GST_ELEMENT_NAME(source->av[AV_IDX_VIDEO].render.pipeline), GST_ELEMENT_NAME(decodebin), GST_PAD_NAME(new_pad));
+
+ }
+
+ g_free(media_type);
+
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to build loopback rendering sink, ret[0x%x]", ret);
+ _post_error_cb_in_idle(source->webrtc, ret);
+ }
+}
+//LCOV_EXCL_STOP
+
+static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source, media_type_e type, unsigned int *track_id)
+{
+ GstElement *appsrc;
+ gchar *pipeline_name;
+ const int idx = GET_AV_IDX_BY_TYPE(type);
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ if (type == MEDIA_TYPE_VIDEO) {
+ RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
+ RET_VAL_IF(source->display->surface == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->surface is NULL");
+ }
+
+ pipeline_name = g_strdup_printf("loopback-pipeline-for-source_%u", source->id);
+ source->av[idx].render.pipeline = gst_pipeline_new(pipeline_name);
+ g_free(pipeline_name);
+ RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");
+
+ appsrc = _create_element(DEFAULT_ELEMENT_APPSRC, NULL);
+ if (!appsrc)
+ goto error;
+
+ g_object_set(G_OBJECT(appsrc),
+ "is-live", TRUE,
+ "format", GST_FORMAT_TIME,
+ NULL);
+
+ if (source->av[idx].render.appsrc_caps)
+ g_object_set(G_OBJECT(appsrc), "caps", source->av[idx].render.appsrc_caps, NULL);
+
+ if (source->av[idx].render.need_decoding) {
+ GstElement *decodebin = _create_element("decodebin", NULL);
+ if (!decodebin) {
+ SAFE_GST_OBJECT_UNREF(appsrc);
+ goto error;
+ }
+
+ gst_bin_add_many(GST_BIN(source->av[idx].render.pipeline), appsrc, decodebin, NULL);
+ if (!gst_element_link(appsrc, decodebin)) {
+ LOG_ERROR("failed to gst_element_link()");
+ goto error;
+ }
+
+ g_signal_connect(decodebin, "pad-added", G_CALLBACK(__loopback_decodebin_pad_added_cb), source);
+ g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(_decodebin_autoplug_select_cb), webrtc);
+
+ } else {
+ int ret = WEBRTC_ERROR_NONE;
+
+ gst_bin_add(GST_BIN(source->av[idx].render.pipeline), appsrc);
+
+ if (type == MEDIA_TYPE_AUDIO)
+ ret = __build_loopback_audiosink(source, appsrc);
+ else
+ ret = __build_loopback_videosink(source, appsrc);
+ if (ret != WEBRTC_ERROR_NONE) {
+ SAFE_GST_OBJECT_UNREF(appsrc);
+ goto error;
+ }
+ }
+
+ source->av[idx].render.appsrc = appsrc;
+
+ /* NOTE : The track id has already been used for a remote stream. Here we newly add the track id
+ * for loopback rendering pipeline, we also need to distinguish which value belongs to remote's
+ * or loopback's. Hence a simple operation is added to make the track id of loopback's. */
+ source->av[idx].render.track_id = source->id * 100 + idx;
+
+ LOG_INFO("source_id[%u] track_id[%u] pipeline[%p, %s] appsrc[%p]",
+ source->id, source->av[idx].render.track_id, source->av[idx].render.pipeline,
+ GST_ELEMENT_NAME(source->av[idx].render.pipeline), source->av[idx].render.appsrc);
+
+ gst_element_set_state(source->av[idx].render.pipeline, GST_STATE_PLAYING);
+
+ if (track_id)
+ *track_id = source->av[idx].render.track_id;
+
+ if (!source->av[idx].render.need_decoding)
+ GENERATE_DOT(webrtc, source->av[idx].render.pipeline, "%s", GST_ELEMENT_NAME(source->av[idx].render.pipeline));
+
+ return WEBRTC_ERROR_NONE;
+
+error:
+ SAFE_GST_OBJECT_UNREF(source->av[idx].render.pipeline);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+int _set_audio_loopback(webrtc_s *webrtc, unsigned int source_id, sound_stream_info_h stream_info, unsigned int *track_id)
+{
+ webrtc_gst_slot_s *source;
+ char *stream_type;
+ int stream_index;
+ bool available;
+ int ret = SOUND_MANAGER_ERROR_NONE;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
+ WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_AUDIO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
+ "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
+ "this API does not support the media packet source");
+ RET_VAL_IF(stream_info == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "stream_info is NULL");
+ RET_VAL_IF(source->av[AV_IDX_AUDIO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "audio loopback has already been set");
+
+ LOG_INFO("webrtc[%p] source_id[%u] stream_info[%p]", webrtc, source_id, stream_info);
+
+ sound_manager_get_type_from_stream_information(stream_info, &stream_type);
+ sound_manager_get_index_from_stream_information(stream_info, &stream_index);
+
+ ret = sound_manager_is_available_stream_information(stream_info, NATIVE_API_WEBRTC, &available);
+ if (ret != SOUND_MANAGER_ERROR_NONE) {
+ LOG_ERROR("failed to sound_manager_is_available_stream_information()");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!available) {
+ LOG_ERROR("this stream info[%p, type:%s, index:%d] is not allowed to this framework", stream_info, stream_type, stream_index);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ source->sound_stream_info.type = strdup(stream_type);
+ source->sound_stream_info.index = stream_index;
+
+ LOG_INFO("source_id[%u] stream_info[%p, type:%s, index:%d]", source_id, stream_info, stream_type, stream_index);
+
+ return __build_loopback_render_pipeline(webrtc, source, MEDIA_TYPE_AUDIO, track_id);
+}
+
+int _unset_audio_loopback(webrtc_s *webrtc, unsigned int source_id)
+{
+ webrtc_gst_slot_s *source = NULL;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
+ WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_AUDIO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
+ "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
+ "this API does not support the media packet source");
+ RET_VAL_IF(!source->av[AV_IDX_AUDIO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "audio loopback was not set");
+
+ LOG_INFO("source_id[%u]", source_id);
+
+ if (source->sound_stream_info.type) {
+ free(source->sound_stream_info.type);
+ source->sound_stream_info.type = NULL;
+ }
+
+ gst_element_set_state(source->av[AV_IDX_AUDIO].render.pipeline, GST_STATE_NULL);
+ SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_AUDIO].render.pipeline);
+
+ source->av[AV_IDX_AUDIO].render.appsrc = NULL;
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display, unsigned int *track_id)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF(display == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "display is NULL");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
+ WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
+ "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
+ "this API does not support the media packet source");
+ RET_VAL_IF(source->av[AV_IDX_VIDEO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "video loopback has already been set");
+
+ if (source->display == NULL) {
+ source->display = _alloc_display();
+ RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "source->display is NULL");
+ }
+
+ LOG_INFO("webrtc[%p] source_id[%u]", webrtc, source_id);
+
+ _set_display_type_and_surface(source->display, type, display);
+
+ ret = _apply_display(source->display);
+ if (ret != WEBRTC_ERROR_NONE)
+ goto error;
+
+ ret = __build_loopback_render_pipeline(webrtc, source, MEDIA_TYPE_VIDEO, track_id);
+ if (ret != WEBRTC_ERROR_NONE)
+ goto error;
+
+ return WEBRTC_ERROR_NONE;
+
+error:
+ _release_display(source->display);
+ source->display = NULL;
+ return ret;
+}
+
+int _unset_video_loopback(webrtc_s *webrtc, unsigned int source_id)
+{
+ webrtc_gst_slot_s *source = NULL;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
+ WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
+ "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
+ "this API does not support the media packet source");
+ RET_VAL_IF(!source->av[AV_IDX_VIDEO].render.pipeline, WEBRTC_ERROR_INVALID_OPERATION, "video loopback was not set");
+
+ LOG_INFO("source_id[%u]", source_id);
+
+ if (source->display) {
+ _release_display(source->display);
+ source->display = NULL;
+ }
+
+ gst_element_set_state(source->av[AV_IDX_VIDEO].render.pipeline, GST_STATE_NULL);
+ SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
+
+ source->av[AV_IDX_VIDEO].render.appsrc = NULL;
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _set_display_mode_to_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e mode)
+{
+ webrtc_gst_slot_s *source;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+
+ source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
+ RET_VAL_IF(_set_display_mode(source->display, mode) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _set_display_mode()");
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _get_display_mode_from_loopback(webrtc_s *webrtc, unsigned int track_id, webrtc_display_mode_e *mode)
+{
+ const webrtc_gst_slot_s *source;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+ RET_VAL_IF(mode == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "mode is NULL");
+
+ source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
+ RET_VAL_IF(_get_display_mode(source->display, mode) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _get_display_mode()");
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _set_display_visible_to_loopback(webrtc_s *webrtc, unsigned int track_id, bool visible)
+{
+ webrtc_gst_slot_s *source;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+
+ source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
+ RET_VAL_IF(_set_display_visible(source->display, visible) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _set_display_visible()");
+
+ return WEBRTC_ERROR_NONE;
+}
+
+int _get_display_visible_from_loopback(webrtc_s *webrtc, unsigned int track_id, bool *visible)
+{
+ const webrtc_gst_slot_s *source;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(track_id < TRACK_ID_THRESHOLD_OF_LOOPBACK, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+ RET_VAL_IF(visible == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "visible is NULL");
+
+ source = _get_slot_by_id(webrtc->gst.source_slots, track_id / 100);
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->av[AV_IDX_VIDEO].render.track_id != track_id, WEBRTC_ERROR_INVALID_PARAMETER, "invalid track_id(%d)", track_id);
+
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
+ RET_VAL_IF(_get_display_visible(source->display, visible) != WEBRTC_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to _get_display_visible()");
+
+ return WEBRTC_ERROR_NONE;
+}
+
+GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ GstBuffer *buffer;
+ GstElement *appsrc;
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+
+ RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
+ RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+ RET_VAL_IF(probe_data->slot == NULL, GST_PAD_PROBE_REMOVE, "probe_data->slot is NULL");
+
+ switch (probe_data->av_idx) {
+ case AV_IDX_AUDIO:
+ if (!probe_data->slot->sound_stream_info.type)
+ return GST_PAD_PROBE_OK;
+ break;
+ case AV_IDX_VIDEO:
+ if (!probe_data->slot->display)
+ return GST_PAD_PROBE_OK;
+ break;
+ default:
+ LOG_ERROR_IF_REACHED("av_idx(%d)", probe_data->av_idx);
+ return GST_PAD_PROBE_OK;
+ }
+
+ appsrc = probe_data->slot->av[probe_data->av_idx].render.appsrc;
+ if (appsrc) {
+ buffer = gst_pad_probe_info_get_buffer(info);
+ LOG_VERBOSE("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+{
+ probe_userdata_s *probe_userdata;
+
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+ RET_IF(pad == NULL, "pad is NULL");
+ RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+ probe_userdata = g_new0(probe_userdata_s, 1);
+ probe_userdata->slot = source;
+ probe_userdata->av_idx = idx;
+ source->av[idx].render.src_pad = pad;
+ source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ probe_cb, probe_userdata, g_free);
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
+ source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb);
+}
+
+void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+
+ if (source->av[idx].render.src_pad_probe_id == 0)
+ return;
+
+ LOG_DEBUG("source[id:%u, av_idx:%s] pad[%p] probe_id[%lu]",
+ source->id, GET_MEDIA_TYPE_NAME(idx == AV_IDX_AUDIO), source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+
+ gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+ source->av[idx].render.src_pad_probe_id = 0;
+
+ gst_object_unref(source->av[idx].render.src_pad);
+ source->av[idx].render.src_pad = NULL;
+
+ _unset_caps_for_render(source, idx);
+}
+
+void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(caps == NULL, "caps is NULL");
+
+ source->av[av_idx].render.appsrc_caps = caps;
+
+ if (source->av[av_idx].render.appsrc)
+ g_object_set(G_OBJECT(source->av[av_idx].render.appsrc), "caps", caps, NULL);
+}
+
+void _unset_caps_for_render(webrtc_gst_slot_s *source, int av_idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+
+ if (!(source->av[av_idx].render.appsrc_caps))
+ return;
+
+ gst_caps_unref(source->av[av_idx].render.appsrc_caps);
+ source->av[av_idx].render.appsrc_caps = NULL;
+}
+
+int _update_caps_for_render_with_resolution(webrtc_gst_slot_s *source, int width, int height)
+{
+ GstCaps *new_caps;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+
+ if (!(new_caps = _make_video_raw_caps_with_resolution(source, &source->webrtc->ini, width, height)))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ PRINT_CAPS(new_caps, "appsrc");
+
+ _unset_caps_for_render(source, AV_IDX_VIDEO);
+ _set_caps_for_render(source, new_caps, AV_IDX_VIDEO);
+
+ return WEBRTC_ERROR_NONE;
+}
source->av[idx].src_pad = NULL;
}
-GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
-{
- probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
- GstBuffer *buffer;
- GstElement *appsrc;
- GstFlowReturn gst_ret = GST_FLOW_OK;
-
- RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
- RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
- RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
- RET_VAL_IF(probe_data->slot == NULL, GST_PAD_PROBE_REMOVE, "probe_data->slot is NULL");
-
- switch (probe_data->av_idx) {
- case AV_IDX_AUDIO:
- if (!probe_data->slot->sound_stream_info.type)
- return GST_PAD_PROBE_OK;
- break;
- case AV_IDX_VIDEO:
- if (!probe_data->slot->display)
- return GST_PAD_PROBE_OK;
- break;
- default:
- LOG_ERROR_IF_REACHED("av_idx(%d)", probe_data->av_idx);
- return GST_PAD_PROBE_OK;
- }
-
- appsrc = probe_data->slot->av[probe_data->av_idx].render.appsrc;
- if (appsrc) {
- buffer = gst_pad_probe_info_get_buffer(info);
- LOG_VERBOSE("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
- g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK)
- LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
- }
-
- return GST_PAD_PROBE_OK;
-}
-
-void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
-{
- probe_userdata_s *probe_userdata;
-
- RET_IF(source == NULL, "source is NULL");
- RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
- RET_IF(pad == NULL, "pad is NULL");
- RET_IF(probe_cb == NULL, "probe_cb is NULL");
-
- probe_userdata = g_new0(probe_userdata_s, 1);
- probe_userdata->slot = source;
- probe_userdata->av_idx = idx;
- source->av[idx].render.src_pad = pad;
- source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
- probe_cb, probe_userdata, g_free);
-
- LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
- source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb);
-}
-
-void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx)
-{
- RET_IF(source == NULL, "source is NULL");
- RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
-
- if (source->av[idx].render.src_pad_probe_id == 0)
- return;
-
- LOG_DEBUG("source[id:%u, av_idx:%s] pad[%p] probe_id[%lu]",
- source->id, GET_MEDIA_TYPE_NAME(idx == AV_IDX_AUDIO), source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
-
- gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
- source->av[idx].render.src_pad_probe_id = 0;
-
- gst_object_unref(source->av[idx].render.src_pad);
- source->av[idx].render.src_pad = NULL;
-
- _unset_caps_for_render(source, idx);
-}
-
static bool __is_linked_pad(webrtc_gst_slot_s *source, const char *pad_name)
{
GstIterator *iter = NULL;
return WEBRTC_ERROR_INVALID_OPERATION;
}
-void _set_caps_for_render(webrtc_gst_slot_s *source, GstCaps *caps, int av_idx)
-{
- RET_IF(source == NULL, "source is NULL");
- RET_IF(caps == NULL, "caps is NULL");
-
- source->av[av_idx].render.appsrc_caps = caps;
-
- if (source->av[av_idx].render.appsrc)
- g_object_set(G_OBJECT(source->av[av_idx].render.appsrc), "caps", caps, NULL);
-}
-
-void _unset_caps_for_render(webrtc_gst_slot_s *source, int av_idx)
-{
- RET_IF(source == NULL, "source is NULL");
-
- if (!(source->av[av_idx].render.appsrc_caps))
- return;
-
- gst_caps_unref(source->av[av_idx].render.appsrc_caps);
- source->av[av_idx].render.appsrc_caps = NULL;
-}
-
void _set_video_src_resolution(webrtc_gst_slot_s *source, int width, int height)
{
RET_IF(source == NULL, "source is NULL");
return caps;
}
-
-int _update_caps_for_render_with_resolution(webrtc_gst_slot_s *source, int width, int height)
-{
- GstCaps *new_caps;
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
-
- if (!(new_caps = _make_video_raw_caps_with_resolution(source, &source->webrtc->ini, width, height)))
- return WEBRTC_ERROR_INVALID_OPERATION;
- PRINT_CAPS(new_caps, "appsrc");
-
- _unset_caps_for_render(source, AV_IDX_VIDEO);
- _set_caps_for_render(source, new_caps, AV_IDX_VIDEO);
-
- return WEBRTC_ERROR_NONE;
-}