From: Sangchul Lee Date: Wed, 20 Dec 2023 00:36:04 +0000 (+0900) Subject: webrtc_internal: Add APIs to add/remove transceiver encoding option X-Git-Tag: accepted/tizen/unified/20240215.130027~5 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=060828225281c1c9258ac205ea895f0724f1aa89;p=platform%2Fcore%2Fapi%2Fwebrtc.git webrtc_internal: Add APIs to add/remove transceiver encoding option Two functions are added. : webrtc_media_source_add_transceiver_encoding() : webrtc_media_source_remove_transceiver_encoding() Multiple source bin for rid-based simulcast could be added to a particular media source. Each source bin has its own encoding option and a rtp payloader with a specific ssrc. RTP header extensions are also added according to definitions from gstreamer. [Version] 0.4.38 [Issue Type] Internal API Change-Id: Iebc1fd223d81b04a7eb47a7a5d7181277737f7ad Signed-off-by: Sangchul Lee --- diff --git a/include/webrtc_internal.h b/include/webrtc_internal.h index 56c3d281..bbe7611f 100644 --- a/include/webrtc_internal.h +++ b/include/webrtc_internal.h @@ -321,6 +321,47 @@ int webrtc_media_source_set_payload_type(webrtc_h webrtc, unsigned int source_id */ int webrtc_media_source_get_payload_type(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, unsigned int *pt); +/** + * @internal + * @brief Adds the transceiver encoding option to the media source. + * @since_tizen 9.0 + * @remarks If @a source_id should be a media source of #WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST or #WEBRTC_MEDIA_SOURCE_TYPE_MIC or #WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST, + * otherwise this function will return #WEBRTC_ERROR_INVALID_PARAMETER.\n + * If @a media_type is for #WEBRTC_MEDIA_TYPE_AUDIO, @a width and @a height will be ignored. + * @param[in] webrtc WebRTC handle + * @param[in] source_id The media source id + * @param[in] media_type The media type + * @param[in] rid The RID (RTP Stream Identifier) + * @param[in] target_bitrate The target bitrate of the encoder + * @param[in] width The video width (video only) + * @param[in] height The video height (video only) + * @return @c 0 on success, + * otherwise a negative error value + * @retval #WEBRTC_ERROR_NONE Successful + * @retval #WEBRTC_ERROR_INVALID_PARAMETER Invalid parameter + * @retval #WEBRTC_ERROR_INVALID_OPERATION Invalid operation + * @pre Add media source to @a webrtc to get @a source_id by calling webrtc_add_media_source(). + * @see webrtc_media_source_remove_transceiver_encoding() + */ +int webrtc_media_source_add_transceiver_encoding(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid, int target_bitrate, int width, int height); + +/** + * @internal + * @brief Removes the transceiver encoding option from the media source. + * @since_tizen 9.0 + * @param[in] webrtc WebRTC handle + * @param[in] source_id The media source id + * @param[in] media_type The media type + * @param[in] rid The RID (RTP Stream Identifier) + * @return @c 0 on success, + * otherwise a negative error value + * @retval #WEBRTC_ERROR_NONE Successful + * @retval #WEBRTC_ERROR_INVALID_PARAMETER Invalid parameter + * @pre Add media source to @a webrtc to get @a source_id by calling webrtc_add_media_source(). + * @see webrtc_media_source_add_transceiver_encoding() + */ +int webrtc_media_source_remove_transceiver_encoding(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid); + /** * @internal * @brief Sets the probability of RTP packet dropping. diff --git a/include/webrtc_private.h b/include/webrtc_private.h index e530d924..24434707 100644 --- a/include/webrtc_private.h +++ b/include/webrtc_private.h @@ -215,13 +215,13 @@ do { \ LOG_DEBUG("%s is prepended", GST_ELEMENT_NAME(x_element)); \ } while (0) -#define CREATE_ELEMENT_FROM_REGISTRY(x_elem_info, x_klass_name, x_sink_caps, x_src_caps, x_excluded_elements, x_element, x_i) \ +#define CREATE_ELEMENT_FROM_REGISTRY(x_elem_info, x_klass_name, x_sink_caps, x_src_caps, x_excluded_elements, x_element, x_ssrc) \ do { \ x_elem_info.klass_name = x_klass_name; \ x_elem_info.sink_caps = x_sink_caps; \ x_elem_info.src_caps = x_src_caps; \ x_elem_info.excluded_elements = x_excluded_elements; \ - x_element = _create_element_from_registry(&x_elem_info, x_i); \ + x_element = _create_element_from_registry(&x_elem_info, x_ssrc); \ if (!x_element) \ LOG_ERROR("failed to create element of [%s]", x_klass_name); \ if (x_elem_info.sink_caps) \ @@ -290,6 +290,7 @@ do { \ #define TRACK_ID_THRESHOLD_OF_LOOPBACK 100 #define MAX_MLINE_NUM 32 #define MAX_SOURCE_NUM 32 +#define MAX_ENCODINGS_NUM 4 /* See webrtc_transceiver_codec_e */ #define CODEC_TYPE_AUDIO 0x00000100 @@ -582,6 +583,7 @@ typedef struct _webrtc_gst_slot_s { GstWebRTCRTPTransceiver *transceiver; webrtc_transceiver_direction_e direction; gchar *mid; + gchar *payloader_factory_name; const char *codec; GstPad *src_pad; gulong src_pad_probe_id; @@ -592,6 +594,7 @@ typedef struct _webrtc_gst_slot_s { int packet_loss_percentage; unsigned int pt; bool pt_set_by_api; + GHashTable *encodings; struct { unsigned int track_id; bool need_decoding; @@ -656,6 +659,17 @@ typedef struct { webrtc_gst_slot_s *slot; } probe_userdata_s; +typedef struct { + GstBin *bin; + int ssrc; + int target_bitrate; + int width; + int height; + bool active; + GstPad *src_pad; + webrtc_gst_slot_s *source; +} webrtc_transceiver_encoding_s; + typedef struct _webrtc_data_channel_s { webrtc_s *webrtc; GMutex mutex; @@ -731,6 +745,9 @@ int _get_transceiver_direction(webrtc_s *webrtc, unsigned int source_id, webrtc_ int _set_transceiver_codec(webrtc_s *webrtc, unsigned int source_id, webrtc_media_type_e media_type, webrtc_transceiver_codec_e codec); int _get_transceiver_codec(webrtc_s *webrtc, unsigned int source_id, webrtc_media_type_e media_type, webrtc_transceiver_codec_e *codec); int _foreach_supported_transceiver_codec(webrtc_s *webrtc, webrtc_media_source_type_e source_type, webrtc_media_type_e media_type, webrtc_media_source_supported_transceiver_codec_cb callback, void *user_data); +int _add_transceiver_encoding(webrtc_s *webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid, int target_bitrate, int width, int height); +int _remove_transceiver_encoding(webrtc_s *webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid); +GstCaps *_make_rtp_caps_with_encoding(webrtc_gst_slot_s *source, bool is_audio); /* file source */ int _set_media_path(webrtc_s *webrtc, unsigned int source_id, const char *path); @@ -800,6 +817,14 @@ int _get_video_framerate(webrtc_s *webrtc, unsigned int source_id, int *framerat int _set_camera_device_id(webrtc_s *webrtc, unsigned int source_id, unsigned int device_id); int _get_camera_device_id(webrtc_s *webrtc, unsigned int source_id, unsigned int *device_id); int _set_sound_stream_info(webrtc_s *webrtc, unsigned int source_id, sound_stream_info_h stream_info); +int _build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool use_mic, int encoding_idx); +int _build_videotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source, int encoding_idx); +int _add_source_encoding(webrtc_gst_slot_s *source, int av_idx, const char *rid, int target_bitrate, int width, int height, int *ssrc); +int _remove_source_encoding(webrtc_gst_slot_s *source, int av_idx, const char *rid); +void _encoding_slot_destroy_cb(gpointer data); +int _complete_rest_of_src_for_simulcast(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool is_audio); +GstCaps *_create_simulcast_caps(webrtc_gst_slot_s *source, bool is_send, bool is_audio); +int _add_no_target_ghostpad_to_encoding(webrtc_transceiver_encoding_s *encoding, GstPad **new_pad); /* sink */ bool _is_owner_of_track_build_context(webrtc_s *webrtc, unsigned int track_id); @@ -901,8 +926,9 @@ int _gst_build_pipeline(webrtc_s *webrtc); void _gst_destroy_pipeline(webrtc_s *webrtc); int _gst_pipeline_set_state(webrtc_s *webrtc, GstState state); +gchar *_append_num_to_string(const char *str, int i); GstElement *_create_element(const char *factory_name, const char *name); -GstElement *_create_element_from_registry(element_info_s *elem_info, int i); +GstElement *_create_element_from_registry(element_info_s *elem_info, int ssrc); bool _sync_elements_state_with_parent(GList *element_list); bool _add_elements_to_bin(GstBin *bin, GList *element_list); bool _link_elements(GList *element_list); @@ -934,6 +960,7 @@ void _connect_and_append_signal(GList **signals, GObject *obj, const char *sig_n void _disconnect_signal(gpointer data); webrtc_gst_slot_s* _get_slot_by_id(GHashTable *slots, unsigned int id); +GstPad *_add_no_target_ghostpad(GstBin *bin, const char *pad_name, bool is_src); int _add_no_target_ghostpad_to_slot(webrtc_gst_slot_s *slot, bool is_src, GstPad **new_pad); int _set_ghost_pad_target(GstPad *ghost_pad, GstElement *target_element, bool is_src); void _generate_dot(GstElement *pipeline, const gchar *name); diff --git a/include/webrtc_source_private.h b/include/webrtc_source_private.h index ed583447..f8c66151 100644 --- a/include/webrtc_source_private.h +++ b/include/webrtc_source_private.h @@ -42,9 +42,11 @@ #define DEFAULT_ELEMENT_VIDEOSCALE "videoscale" #define DEFAULT_ELEMENT_VIDEORATE "videorate" #define DEFAULT_ELEMENT_FILESRC "filesrc" +#define DEFAULT_ELEMENT_RTP_FUNNEL "rtpfunnel" #define ELEMENT_NAME_FIRST_CAPSFILTER "firstCapsfilter" #define ELEMENT_NAME_RTP_CAPSFILTER "rtpCapsfilter" +#define ELEMENT_NAME_RTP_FUNNEL_CAPSFILTER "rtpFunnelCapsfilter" #define ELEMENT_NAME_ENCODER_CAPSFILTER "encoderCapsfilter" #define ELEMENT_NAME_VIDEO_SRC "videoSrc" #define ELEMENT_NAME_VIDEO_SWITCH "videoSwitch" @@ -76,20 +78,28 @@ typedef enum { ELEMENT_FAKESINK } gst_element_e; +int _build_src_check_params_and_get_ini_source(webrtc_s *webrtc, webrtc_gst_slot_s *source, const ini_item_media_source_s **ini_source); const char *_get_source_element(webrtc_s *webrtc, int type); GstElement *_find_element_in_bin(GstBin *bin, const gchar *name); bool _is_hw_encoder_used(webrtc_s *webrtc, webrtc_media_source_type_e source_type, media_type_e media_type); GstCaps *_get_caps_from_encoded_audio_media_type(const char *media_type, int channels, int samplerate); GstCaps *_get_caps_from_encoded_video_media_type(const char *media_type, int width, int height); -GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, unsigned int ssrc, webrtc_gst_slot_s *source, GstElement *encoder); +GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, int ssrc, webrtc_gst_slot_s *source, GstElement *encoder); const char *_get_element_name(int av_idx, gst_element_e element); GstPadProbeReturn _payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data); void _add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb); void _remove_probe_from_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx); int _link_source_with_webrtcbin(webrtc_gst_slot_s *source, GstElement *webrtcbin); -int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list, bool is_audio); +webrtc_transceiver_encoding_s *_find_encoding_by_ssrc(GHashTable *encodings, int ssrc); +bool _is_videoscale_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source); +bool _is_videorate_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source); +GstElement *_prepare_first_capsfilter(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc, int av_idx, GList **element_list, bool *encoded_support, GstCaps **caps_for_render); +GstElement *_prepare_videorate(webrtc_gst_slot_s *source, int ssrc, GList **element_list); +int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc, bool need_capsfilter, GList **element_list, bool is_audio); +GstCaps *_make_default_encoded_caps(webrtc_gst_slot_s *source, int ssrc, webrtc_ini_s *ini, gchar **media_type); int _set_encoder_element_bitrate(GstElement *encoder, int target_bitrate); int _get_encoder_element_bitrate(GstElement *encoder, int *target_bitrate); +GstElement *_prepare_encoder(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc, bool is_audio); void _set_video_src_resolution(webrtc_gst_slot_s *source, int width, int height); void _set_video_src_origin_resolution(webrtc_gst_slot_s *source, int width, int height); GstCaps *_make_video_raw_caps_with_resolution(webrtc_gst_slot_s *source, webrtc_ini_s *ini, int width, int height); diff --git a/packaging/capi-media-webrtc.spec b/packaging/capi-media-webrtc.spec index 4b63b4ac..50292edd 100644 --- a/packaging/capi-media-webrtc.spec +++ b/packaging/capi-media-webrtc.spec @@ -1,6 +1,6 @@ Name: capi-media-webrtc Summary: A WebRTC library in Tizen Native API -Version: 0.4.37 +Version: 0.4.38 Release: 0 Group: Multimedia/API License: Apache-2.0 diff --git a/src/webrtc_internal.c b/src/webrtc_internal.c index 2e3753d5..0c2158d5 100644 --- a/src/webrtc_internal.c +++ b/src/webrtc_internal.c @@ -229,6 +229,36 @@ int webrtc_media_source_get_payload_type(webrtc_h webrtc, unsigned int source_id return WEBRTC_ERROR_NONE; } +int webrtc_media_source_add_transceiver_encoding(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid, int target_bitrate, int width, int height) +{ + g_autoptr(GMutexLocker) locker = NULL; + webrtc_s *_webrtc = (webrtc_s *)webrtc; + + RET_VAL_IF(_webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(rid == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "rid is NULL"); + + locker = g_mutex_locker_new(&_webrtc->mutex); + + RET_VAL_IF(_webrtc->state != WEBRTC_STATE_IDLE, WEBRTC_ERROR_INVALID_STATE, "the state should be IDLE"); + + return _add_transceiver_encoding(webrtc, source_id, media_type, rid, target_bitrate, width, height); +} + +int webrtc_media_source_remove_transceiver_encoding(webrtc_h webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid) +{ + g_autoptr(GMutexLocker) locker = NULL; + webrtc_s *_webrtc = (webrtc_s *)webrtc; + + RET_VAL_IF(_webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(rid == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "rid is NULL"); + + locker = g_mutex_locker_new(&_webrtc->mutex); + + RET_VAL_IF(_webrtc->state != WEBRTC_STATE_IDLE, WEBRTC_ERROR_INVALID_STATE, "the state should be IDLE"); + + return _remove_transceiver_encoding(webrtc, source_id, media_type, rid); +} + int webrtc_set_rtp_packet_drop_probability(webrtc_h webrtc, bool sender, float probability) { webrtc_s *_webrtc = (webrtc_s *)webrtc; diff --git a/src/webrtc_private.c b/src/webrtc_private.c index c35e1cc0..eefbddb0 100644 --- a/src/webrtc_private.c +++ b/src/webrtc_private.c @@ -985,27 +985,27 @@ static int __rank_compare(GstPluginFeature *first, GstPluginFeature *second) return second_rank - first_rank; } -static gchar *__append_num_to_string(const char *str, int i) +gchar *_append_num_to_string(const char *str, int i) { if (i == -1) return g_strdup(str); return g_strdup_printf("%s%i", str, i); } -static gchar *__get_element_name_if_encoder(const char *klass_name, int i) +static gchar *__get_element_name_if_encoder(const char *klass_name, int ssrc) { RET_VAL_IF(klass_name == NULL, NULL, "klass_name is NULL"); if (g_strrstr(klass_name, GST_KLASS_NAME_ENCODER_AUDIO)) - return __append_num_to_string(ELEMENT_NAME_AUDIO_ENCODER, i); + return _append_num_to_string(ELEMENT_NAME_AUDIO_ENCODER, ssrc); if (g_strrstr(klass_name, GST_KLASS_NAME_ENCODER_VIDEO)) - return __append_num_to_string(ELEMENT_NAME_VIDEO_ENCODER, i); + return _append_num_to_string(ELEMENT_NAME_VIDEO_ENCODER, ssrc); return NULL; } -GstElement *_create_element_from_registry(element_info_s *elem_info, int i) +GstElement *_create_element_from_registry(element_info_s *elem_info, int ssrc) { GstElement *element = NULL; GList *factories = NULL; @@ -1020,7 +1020,8 @@ GstElement *_create_element_from_registry(element_info_s *elem_info, int i) if (factories) { factory = GST_ELEMENT_FACTORY(factories->data); LOG_INFO("sorted result element is [%s]", GST_OBJECT_NAME(factory)); - element_name = __get_element_name_if_encoder(elem_info->klass_name, i); + if (!(element_name = __get_element_name_if_encoder(elem_info->klass_name, ssrc))) + element_name = _append_num_to_string(GST_OBJECT_NAME(factory), ssrc); element = _create_element(GST_OBJECT_NAME(factory), element_name); } else { LOG_DEBUG("could not find any compatible element for klass_name[%s]", elem_info->klass_name); @@ -1259,7 +1260,7 @@ static void __webrtcbin_ice_connection_state_cb(GstElement *webrtcbin, GParamSpe } //LCOV_EXCL_STOP -static GstPad* __add_no_target_ghostpad(GstBin *bin, const char *pad_name, bool is_src) +GstPad *_add_no_target_ghostpad(GstBin *bin, const char *pad_name, bool is_src) { gchar *bin_name = NULL; GstPad *ghost_pad = NULL; @@ -1334,7 +1335,7 @@ int _add_no_target_ghostpad_to_slot(webrtc_gst_slot_s *slot, bool is_src, GstPad pad_name = g_strdup_printf("%s_%u", is_src ? "src" : "sink", slot->id); } - if (!(*new_pad = __add_no_target_ghostpad(slot->bin, pad_name, is_src))) { + if (!(*new_pad = _add_no_target_ghostpad(slot->bin, pad_name, is_src))) { LOG_ERROR("failed to add new ghost pad[%s] for bin[%s]", pad_name, GST_ELEMENT_NAME(slot->bin)); g_free(pad_name); return WEBRTC_ERROR_INVALID_OPERATION; diff --git a/src/webrtc_source.c b/src/webrtc_source.c index d82fee5b..8718ba4c 100644 --- a/src/webrtc_source.c +++ b/src/webrtc_source.c @@ -26,21 +26,6 @@ static GstPadProbeReturn __camerasrc_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data); -static int __build_src_check_params_and_get_ini_source(webrtc_s *webrtc, webrtc_gst_slot_s *source, const ini_item_media_source_s **ini_source) -{ - RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); - RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); - RET_VAL_IF(ini_source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "ini_source is NULL"); - RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL"); - - if (!(*ini_source = _ini_get_source_by_type(&webrtc->ini, source->type))) { - LOG_ERROR("ini_source is NULL"); - return WEBRTC_ERROR_INVALID_OPERATION; - } - - return WEBRTC_ERROR_NONE; -} - //LCOV_EXCL_START static int __mute_by_changing_property(webrtc_gst_slot_s *source, GstElement *videotestsrc, bool mute) { @@ -119,6 +104,9 @@ static int __complete_rest_of_videosrc(webrtc_s *webrtc, webrtc_gst_slot_s *sour if (source->av[AV_IDX_VIDEO].render.src_pad_probe_id > 0) return WEBRTC_ERROR_NONE; + if (g_hash_table_size(source->av[AV_IDX_VIDEO].encodings) > 0) + return _complete_rest_of_src_for_simulcast(webrtc, source, false); + if (source->type == WEBRTC_MEDIA_SOURCE_TYPE_SCREEN) { GstElement *videoconvert; @@ -136,7 +124,7 @@ static int __complete_rest_of_videosrc(webrtc_s *webrtc, webrtc_gst_slot_s *sour } } - if (_create_rest_of_elements(webrtc, source, true, &element_list, false) != WEBRTC_ERROR_NONE) + if (_create_rest_of_elements(webrtc, source, -1, true, &element_list, false) != WEBRTC_ERROR_NONE) goto exit; if (!_add_elements_to_bin(source->bin, element_list)) { @@ -208,8 +196,8 @@ static int __build_camerasrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) GstElement *camerasrc; const ini_item_media_source_s *ini_source; - ret = __build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to __build_src_check_params_and_get_ini_source()"); + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_VIDEO].src_pad); RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); @@ -253,13 +241,19 @@ static int __complete_rest_of_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *sour GstElement *capsfilter; const ini_item_media_source_s *ini_source; - ret = __build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to __build_src_check_params_and_get_ini_source()"); + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); /* skip when it is already completed. e.g) start() -> stop() -> start() again */ if (source->av[AV_IDX_AUDIO].render.src_pad_probe_id > 0) return WEBRTC_ERROR_NONE; + source->av[AV_IDX_AUDIO].inbandfec = ini_source->use_inbandfec; + source->av[AV_IDX_AUDIO].packet_loss_percentage = ini_source->packet_loss_percentage; + + if (g_hash_table_size(source->av[AV_IDX_AUDIO].encodings) > 0) + return _complete_rest_of_src_for_simulcast(webrtc, source, true); + if (!(volume = _create_element(DEFAULT_ELEMENT_VOLUME, ELEMENT_NAME_VOLUME))) goto exit; if (!g_object_class_find_property(G_OBJECT_GET_CLASS(volume), "mute")) { @@ -270,10 +264,7 @@ static int __complete_rest_of_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *sour g_object_set(volume, "mute", (gboolean)source->av[AV_IDX_AUDIO].mute, NULL); - source->av[AV_IDX_AUDIO].inbandfec = ini_source->use_inbandfec; - source->av[AV_IDX_AUDIO].packet_loss_percentage = ini_source->packet_loss_percentage; - - if (_create_rest_of_elements(webrtc, source, true, &element_list, true) != WEBRTC_ERROR_NONE) + if (_create_rest_of_elements(webrtc, source, -1, true, &element_list, true) != WEBRTC_ERROR_NONE) goto exit; if (!_add_elements_to_bin(source->bin, element_list)) { @@ -347,68 +338,115 @@ add_transceiver: return WEBRTC_ERROR_NONE; } -static int __build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool use_mic) +int _build_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool use_mic, int ssrc) { int ret; const char *source_factory_name; GstElement *audiosrc; const ini_item_media_source_s *ini_source; + g_autofree gchar *element_name = NULL; + bool is_simulcast = (ssrc != -1); + GstBin *bin; + webrtc_transceiver_encoding_s *encoding; + + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); + + if (!is_simulcast) { + ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_AUDIO].src_pad); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); + + source->media_types = MEDIA_TYPE_AUDIO; + source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types); + source->av[AV_IDX_AUDIO].codec = ini_source->a_codecs[0]; + bin = source->bin; + } else { + encoding = _find_encoding_by_ssrc(source->av[AV_IDX_AUDIO].encodings, ssrc); + ASSERT(encoding); + ret = _add_no_target_ghostpad_to_encoding(encoding, &encoding->src_pad); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); - ret = __build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to __build_src_check_params_and_get_ini_source()"); - - ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_AUDIO].src_pad); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); - - source->media_types = MEDIA_TYPE_AUDIO; - source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types); - source->av[AV_IDX_AUDIO].codec = ini_source->a_codecs[0]; + bin = encoding->bin; + } source_factory_name = _get_source_element(webrtc, use_mic ? WEBRTC_MEDIA_SOURCE_TYPE_MIC : WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST); - if (!(audiosrc = _create_element(source_factory_name, use_mic ? ELEMENT_NAME_MIC_SRC : ELEMENT_NAME_AUDIO_SRC))) + element_name = _append_num_to_string(use_mic ? ELEMENT_NAME_MIC_SRC : ELEMENT_NAME_AUDIO_SRC, ssrc); + if (!(audiosrc = _create_element(source_factory_name, element_name))) return WEBRTC_ERROR_INVALID_OPERATION; _gst_set_element_properties(audiosrc, ini_source->source_element_properties); - if (!gst_bin_add(source->bin, audiosrc)) { - LOG_ERROR("failed to gst_bin_add(), bin[%s], audiosrc[%s]", GST_ELEMENT_NAME(source->bin), GST_ELEMENT_NAME(audiosrc)); + if (!gst_bin_add(bin, audiosrc)) { + LOG_ERROR("failed to gst_bin_add(), bin[%s], audiosrc[%s]", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(audiosrc)); SAFE_GST_OBJECT_UNREF(audiosrc); return WEBRTC_ERROR_INVALID_OPERATION; } + if (is_simulcast) { + /* remove the initial source element */ + audiosrc = gst_bin_get_by_name(source->bin, source->type == WEBRTC_MEDIA_SOURCE_TYPE_MIC ? + ELEMENT_NAME_MIC_SRC : ELEMENT_NAME_AUDIO_SRC); + if (audiosrc) + gst_bin_remove(source->bin, audiosrc); + } + return WEBRTC_ERROR_NONE; } -static int __build_videotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) +int _build_videotestsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc) { int ret; + const char *source_factory_name; GstElement *videotestsrc; const ini_item_media_source_s *ini_source; + g_autofree gchar *element_name = NULL; + bool is_simulcast = (ssrc != -1); + GstBin *bin; + webrtc_transceiver_encoding_s *encoding; + + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); + + if (!is_simulcast) { + ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_VIDEO].src_pad); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); + + source->media_types = MEDIA_TYPE_VIDEO; + source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types); + source->av[AV_IDX_VIDEO].codec = ini_source->v_codecs[0]; + bin = source->bin; + } else { + encoding = _find_encoding_by_ssrc(source->av[AV_IDX_VIDEO].encodings, ssrc); + ASSERT(encoding); + ret = _add_no_target_ghostpad_to_encoding(encoding, &encoding->src_pad); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); - ret = __build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to __build_src_check_params_and_get_ini_source()"); - - ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_VIDEO].src_pad); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); - - source->media_types = MEDIA_TYPE_VIDEO; - source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types); - source->av[AV_IDX_VIDEO].codec = ini_source->v_codecs[0]; + bin = encoding->bin; + } if (!_set_default_video_info(source, ini_source)) return WEBRTC_ERROR_INVALID_OPERATION; - if (!(videotestsrc = _create_element(_get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), ELEMENT_NAME_VIDEO_SRC))) + source_factory_name = _get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST); + element_name = _append_num_to_string(ELEMENT_NAME_VIDEO_SRC, ssrc); + if (!(videotestsrc = _create_element(source_factory_name, element_name))) return WEBRTC_ERROR_INVALID_OPERATION; _gst_set_element_properties(videotestsrc, ini_source->source_element_properties); - if (!gst_bin_add(source->bin, videotestsrc)) { - LOG_ERROR("failed to gst_bin_add(), bin[%s], videotestsrc[%s]", GST_ELEMENT_NAME(source->bin), GST_ELEMENT_NAME(videotestsrc)); + if (!gst_bin_add(bin, videotestsrc)) { + LOG_ERROR("failed to gst_bin_add(), bin[%s], videotestsrc[%s]", GST_ELEMENT_NAME(bin), GST_ELEMENT_NAME(videotestsrc)); SAFE_GST_OBJECT_UNREF(videotestsrc); return WEBRTC_ERROR_INVALID_OPERATION; } + if (is_simulcast) { + /* remove the initial source element */ + videotestsrc = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEO_SRC); + if (videotestsrc) + gst_bin_remove(source->bin, videotestsrc); + } + return WEBRTC_ERROR_NONE; } @@ -419,8 +457,8 @@ static int __build_custom_videosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) GstElement *custom_videosrc; const ini_item_media_source_s *ini_source; - ret = __build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to __build_src_check_params_and_get_ini_source()"); + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_VIDEO].src_pad); RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); @@ -453,8 +491,8 @@ static int __build_custom_audiosrc(webrtc_s *webrtc, webrtc_gst_slot_s *source) GstElement *custom_audiosrc; const ini_item_media_source_s *ini_source; - ret = __build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); - RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to __build_src_check_params_and_get_ini_source()"); + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); ret = _add_no_target_ghostpad_to_slot(source, true, &source->av[AV_IDX_AUDIO].src_pad); RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()"); @@ -487,13 +525,13 @@ static int __build_source_bin(webrtc_s *webrtc, webrtc_gst_slot_s *source) switch (source->type) { case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST: - return __build_audiosrc(webrtc, source, false); + return _build_audiosrc(webrtc, source, false, -1); case WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST: - return __build_videotestsrc(webrtc, source); + return _build_videotestsrc(webrtc, source, -1); case WEBRTC_MEDIA_SOURCE_TYPE_MIC: - return __build_audiosrc(webrtc, source, true); + return _build_audiosrc(webrtc, source, true, -1); case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA: return __build_camerasrc(webrtc, source); @@ -503,7 +541,7 @@ static int __build_source_bin(webrtc_s *webrtc, webrtc_gst_slot_s *source) return _build_screensrc(webrtc, source); #else LOG_ERROR("TIZEN_FEATURE_UI is disabled, skip this function"); - return WEBRTC_ERROR_INVALID_OPERATION; + return WEBRTC_ERROR_INVALID_OPERATION; #endif //LCOV_EXCL_STOP case WEBRTC_MEDIA_SOURCE_TYPE_FILE: @@ -557,6 +595,10 @@ void _source_slot_destroy_cb(gpointer data) if (source->av[i].transceiver) gst_object_unref(source->av[i].transceiver); g_free(source->av[i].mid); + + g_hash_table_destroy(source->av[i].encodings); + + g_free(source->av[i].payloader_factory_name); } if (source->bin) { @@ -587,6 +629,7 @@ void _source_slot_destroy_cb(gpointer data) static int __alloc_source_slot(webrtc_s *webrtc, int type, webrtc_gst_slot_s **source, gchar **name) { + int i; RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); RET_VAL_IF(name == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "name is NULL"); @@ -603,8 +646,10 @@ static int __alloc_source_slot(webrtc_s *webrtc, int type, webrtc_gst_slot_s **s (*source)->bin = GST_BIN(gst_bin_new(*name)); (*source)->type = type; (*source)->webrtc = webrtc; - (*source)->av[AV_IDX_AUDIO].direction = (type == WEBRTC_MEDIA_SOURCE_TYPE_NULL) ? WEBRTC_TRANSCEIVER_DIRECTION_RECVONLY : WEBRTC_TRANSCEIVER_DIRECTION_SENDRECV; - (*source)->av[AV_IDX_VIDEO].direction = (type == WEBRTC_MEDIA_SOURCE_TYPE_NULL) ? WEBRTC_TRANSCEIVER_DIRECTION_RECVONLY : WEBRTC_TRANSCEIVER_DIRECTION_SENDRECV; + for (i = 0; i < AV_IDX_MAX; i++) { + (*source)->av[i].direction = (type == WEBRTC_MEDIA_SOURCE_TYPE_NULL) ? WEBRTC_TRANSCEIVER_DIRECTION_RECVONLY : WEBRTC_TRANSCEIVER_DIRECTION_SENDRECV; + (*source)->av[i].encodings = g_hash_table_new_full(g_str_hash, g_str_equal, g_free, _encoding_slot_destroy_cb); + } LOG_DEBUG("webrtc[%p] source[%p, id:%u, type:%d, name:%s]", webrtc, *source, webrtc->cur_max_source_id, type, *name); diff --git a/src/webrtc_source_file.c b/src/webrtc_source_file.c index 9f0f43b4..a9e1f377 100644 --- a/src/webrtc_source_file.c +++ b/src/webrtc_source_file.c @@ -324,7 +324,7 @@ static GstElement * __prepare_capsfilter_for_filesrc_pipeline(webrtc_gst_slot_s return NULL; } - if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), source->av[GET_AV_IDX(is_audio)].pt, 0, source, NULL))) { + if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), source->av[GET_AV_IDX(is_audio)].pt, -1, source, NULL))) { g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL); gst_caps_unref(sink_caps); } @@ -708,7 +708,7 @@ static int __create_rest_of_raw_audio_elements_for_filesrc_pipeline(webrtc_s *we goto exit; APPEND_ELEMENT(_element_list, conv); - if (_create_rest_of_elements(webrtc, source, false, &_element_list, true) != WEBRTC_ERROR_NONE) + if (_create_rest_of_elements(webrtc, source, -1, false, &_element_list, true) != WEBRTC_ERROR_NONE) goto exit; if (!(fakesink = __prepare_fakesink_for_filesrc_pipeline(source, true))) diff --git a/src/webrtc_source_mediapacket.c b/src/webrtc_source_mediapacket.c index e94ab34f..fec84a58 100644 --- a/src/webrtc_source_mediapacket.c +++ b/src/webrtc_source_mediapacket.c @@ -334,7 +334,7 @@ static int __create_rest_of_elements_for_encoded_format(webrtc_s *webrtc, webrtc if (_set_payload_type(webrtc, source, GET_AV_IDX_BY_TYPE(source->media_types), NULL) != WEBRTC_ERROR_NONE) goto error; - if ((sink_caps = _make_rtp_caps(media_type, source->av[GET_AV_IDX_BY_TYPE(source->media_types)].pt, 0, source, NULL))) { + if ((sink_caps = _make_rtp_caps(media_type, source->av[GET_AV_IDX_BY_TYPE(source->media_types)].pt, -1, source, NULL))) { g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL); gst_caps_unref(sink_caps); } @@ -757,7 +757,7 @@ static int __complete_mediapacketsrc_from_raw_format(webrtc_s *webrtc, webrtc_gs source->zerocopy_enabled = _is_hw_encoder_used(webrtc, source->type, source->media_types); - if ((ret = _create_rest_of_elements(webrtc, source, false, &element_list, (source->media_types == MEDIA_TYPE_AUDIO))) != WEBRTC_ERROR_NONE) + if ((ret = _create_rest_of_elements(webrtc, source, -1, false, &element_list, (source->media_types == MEDIA_TYPE_AUDIO))) != WEBRTC_ERROR_NONE) goto exit; if (!(sink_caps = _make_mediapacketsrc_raw_caps_from_media_format(source))) { @@ -863,7 +863,7 @@ int _update_pt_if_media_packet_source(webrtc_s *webrtc, webrtc_gst_slot_s *sourc idx = GET_AV_IDX_BY_TYPE(source->media_types); media_type = (idx == AV_IDX_AUDIO) ? _get_audio_media_type(source->av[idx].codec) : _get_video_media_type(source->av[idx].codec); - if (!(sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, 0, source, NULL))) + if (!(sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, -1, source, NULL))) return WEBRTC_ERROR_INVALID_OPERATION; g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL); diff --git a/src/webrtc_source_private.c b/src/webrtc_source_private.c index 63d76b0d..9971f68b 100644 --- a/src/webrtc_source_private.c +++ b/src/webrtc_source_private.c @@ -19,6 +19,21 @@ #include "webrtc_source_private.h" #include +int _build_src_check_params_and_get_ini_source(webrtc_s *webrtc, webrtc_gst_slot_s *source, const ini_item_media_source_s **ini_source) +{ + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); + RET_VAL_IF(ini_source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "ini_source is NULL"); + RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL"); + + if (!(*ini_source = _ini_get_source_by_type(&webrtc->ini, source->type))) { + LOG_ERROR("ini_source is NULL"); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + return WEBRTC_ERROR_NONE; +} + static const char *__get_default_source_element(int type) { const char *element = NULL; @@ -198,7 +213,7 @@ static GstAudioFormat __get_gst_audio_raw_format_from_string(const char *format) return GST_AUDIO_FORMAT_UNKNOWN; } -GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, unsigned int ssrc, webrtc_gst_slot_s *source, GstElement *encoder) +GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, int ssrc, webrtc_gst_slot_s *source, GstElement *encoder) { GstCaps *caps; bool is_audio; @@ -211,9 +226,9 @@ GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, unsi "media", G_TYPE_STRING, GET_MEDIA_TYPE_NAME(is_audio), "payload", G_TYPE_INT, payload_type, NULL); - if (ssrc != 0) + if (ssrc > -1) gst_caps_set_simple(caps, - "ssrc", G_TYPE_UINT, ssrc, + "ssrc", G_TYPE_UINT, (guint)ssrc, NULL); if (!is_audio || !source->av[AV_IDX_AUDIO].inbandfec) @@ -400,12 +415,34 @@ exit: return ret; } -static GstCaps *__make_default_raw_caps(webrtc_gst_slot_s *source, webrtc_ini_s *ini) +webrtc_transceiver_encoding_s *_find_encoding_by_ssrc(GHashTable *encodings, int ssrc) +{ + GHashTableIter iter; + gpointer key, value; + webrtc_transceiver_encoding_s *encoding; + + ASSERT(encodings); + ASSERT(ssrc > -1); + + g_hash_table_iter_init(&iter, encodings); + while (g_hash_table_iter_next(&iter, &key, &value)) { + encoding = (webrtc_transceiver_encoding_s *)value; + if (ssrc == encoding->ssrc) { + LOG_DEBUG("rid[%s] encoding[%p, ssrc:%d]", (gchar *)key, encoding, encoding->ssrc); + return encoding; + } + } + return NULL; +} + +static GstCaps *__make_default_raw_caps(webrtc_gst_slot_s *source, int ssrc, webrtc_ini_s *ini) { GstCaps *caps = NULL; GstAudioInfo info; GstAudioFormat format; int channels, samplerate; + int width, height; + webrtc_transceiver_encoding_s *encoding; RET_VAL_IF(source == NULL, NULL, "source is NULL"); RET_VAL_IF(ini == NULL, NULL, "ini is NULL"); @@ -415,11 +452,20 @@ static GstCaps *__make_default_raw_caps(webrtc_gst_slot_s *source, webrtc_ini_s case WEBRTC_MEDIA_SOURCE_TYPE_CAMERA: case WEBRTC_MEDIA_SOURCE_TYPE_SCREEN: case WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_VIDEO: { + if (ssrc == -1) { + width = source->video_info.width; + height = source->video_info.height; + } else { + encoding = _find_encoding_by_ssrc(source->av[AV_IDX_VIDEO].encodings, ssrc); + ASSERT(encoding); + width = encoding->width; + height = encoding->height; + } caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW, "format", G_TYPE_STRING, _get_raw_format_from_ini(ini, source->type, MEDIA_TYPE_VIDEO), "framerate", GST_TYPE_FRACTION, source->video_info.framerate, 1, - "width", G_TYPE_INT, source->video_info.width, - "height", G_TYPE_INT, source->video_info.height, + "width", G_TYPE_INT, width, + "height", G_TYPE_INT, height, NULL); break; } @@ -452,11 +498,13 @@ static GstCaps *__make_default_raw_caps(webrtc_gst_slot_s *source, webrtc_ini_s } /* Use g_free() to free the media_type parameter. */ -static GstCaps *__make_default_encoded_caps(webrtc_gst_slot_s *source, webrtc_ini_s *ini, gchar **media_type) +GstCaps *_make_default_encoded_caps(webrtc_gst_slot_s *source, int ssrc, webrtc_ini_s *ini, gchar **media_type) { GstCaps *caps; const char *_media_type; int channels, samplerate; + int width, height; + webrtc_transceiver_encoding_s *encoding; RET_VAL_IF(source == NULL, NULL, "source is NULL"); RET_VAL_IF(ini == NULL, NULL, "ini is NULL"); @@ -473,7 +521,16 @@ static GstCaps *__make_default_encoded_caps(webrtc_gst_slot_s *source, webrtc_in case WEBRTC_MEDIA_SOURCE_TYPE_CUSTOM_VIDEO: _media_type = _get_video_media_type(source->av[AV_IDX_VIDEO].codec); RET_VAL_IF(_media_type == NULL, NULL, "_media_type is NULL"); - caps = _get_caps_from_encoded_video_media_type(_media_type, source->video_info.width, source->video_info.height); + if (ssrc == -1) { + width = source->video_info.width; + height = source->video_info.height; + } else { + encoding = _find_encoding_by_ssrc(source->av[AV_IDX_VIDEO].encodings, ssrc); + ASSERT(encoding); + width = encoding->width; + height = encoding->height; + } + caps = _get_caps_from_encoded_video_media_type(_media_type, width, height); if (source->zerocopy_enabled) caps = _gst_set_caps_additional_fields(caps, _get_v_hw_encoder_caps_ext_fields_from_ini(ini, source->type)); break; @@ -500,7 +557,6 @@ static GstCaps *__make_default_encoded_caps(webrtc_gst_slot_s *source, webrtc_in caps = _get_caps_from_encoded_audio_media_type(_media_type, channels, samplerate); } else if (source->media_types == MEDIA_TYPE_VIDEO) { - int width, height; _media_type = _get_video_media_type(source->av[AV_IDX_VIDEO].codec); RET_VAL_IF(_media_type == NULL, NULL, "_media_type is NULL"); @@ -527,18 +583,21 @@ static GstCaps *__make_default_encoded_caps(webrtc_gst_slot_s *source, webrtc_in } //LCOV_EXCL_START -static GstElement *__get_hw_encoder_element(webrtc_s *webrtc, webrtc_gst_slot_s *source) +static GstElement *__get_hw_encoder_element(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc) { GstElement *encoder = NULL; + g_autofree gchar *element_name = NULL; RET_VAL_IF(webrtc == NULL, NULL, "webrtc is NULL"); RET_VAL_IF(source == NULL, NULL, "source is NULL"); switch (source->media_types) { case MEDIA_TYPE_AUDIO: + element_name = _append_num_to_string(ELEMENT_NAME_AUDIO_ENCODER, ssrc); return _create_element(_get_hw_encoder_from_ini(&webrtc->ini, source->type, MEDIA_TYPE_AUDIO), ELEMENT_NAME_AUDIO_ENCODER); break; case MEDIA_TYPE_VIDEO: + element_name = _append_num_to_string(ELEMENT_NAME_VIDEO_ENCODER, ssrc); encoder = _create_element(_get_hw_encoder_from_ini(&webrtc->ini, source->type, MEDIA_TYPE_VIDEO), ELEMENT_NAME_VIDEO_ENCODER); break; default: @@ -606,12 +665,13 @@ int _get_encoder_element_bitrate(GstElement *encoder, int *target_bitrate) return WEBRTC_ERROR_INVALID_OPERATION; } -static GstElement * __prepare_encoder(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool is_audio) +GstElement *_prepare_encoder(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc, bool is_audio) { GstElement *encoder = NULL; element_info_s elem_info; const gchar *encoder_klass_name = NULL; const gchar *encoder_name = NULL; + int av_idx = is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO; RET_VAL_IF(webrtc == NULL, NULL, "webrtc is NULL"); RET_VAL_IF(source == NULL, NULL, "source is NULL"); @@ -619,14 +679,14 @@ static GstElement * __prepare_encoder(webrtc_s *webrtc, webrtc_gst_slot_s *sourc encoder_klass_name = is_audio ? GST_KLASS_NAME_ENCODER_AUDIO : GST_KLASS_NAME_ENCODER_VIDEO; if (source->zerocopy_enabled) - encoder = __get_hw_encoder_element(webrtc, source); + encoder = __get_hw_encoder_element(webrtc, source, ssrc); else CREATE_ELEMENT_FROM_REGISTRY(elem_info, encoder_klass_name, - __make_default_raw_caps(source, &webrtc->ini), - __make_default_encoded_caps(source, &webrtc->ini, NULL), + __make_default_raw_caps(source, ssrc, &webrtc->ini), + _make_default_encoded_caps(source, ssrc, &webrtc->ini, NULL), webrtc->ini.general.gst_excluded_elements, encoder, - -1); + ssrc); RET_VAL_IF(encoder == NULL, NULL, "encoder is NULL"); @@ -660,30 +720,38 @@ static GstElement * __prepare_encoder(webrtc_s *webrtc, webrtc_gst_slot_s *sourc } } - if (source->av[is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO].target_bitrate > 0) - _set_encoder_element_bitrate(encoder, source->av[is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO].target_bitrate); + if (ssrc == -1) { + if (source->av[av_idx].target_bitrate > 0) + _set_encoder_element_bitrate(encoder, source->av[av_idx].target_bitrate); + } else { + webrtc_transceiver_encoding_s *encoding = _find_encoding_by_ssrc(source->av[av_idx].encodings, ssrc); + if (encoding && encoding->target_bitrate > 0) + _set_encoder_element_bitrate(encoder, encoding->target_bitrate); + } return encoder; } -static GstElement *__prepare_first_capsfilter(webrtc_s *webrtc, webrtc_gst_slot_s *source, int av_idx, GList **element_list, bool *encoded_support, GstCaps **caps_for_render) +GstElement *_prepare_first_capsfilter(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc, int av_idx, GList **element_list, bool *encoded_support, GstCaps **caps_for_render) { GstElement *capsfilter; GstCaps *sink_caps; + g_autofree gchar *element_name = NULL; RET_VAL_IF(webrtc == NULL, NULL, "webrtc is NULL"); RET_VAL_IF(source == NULL, NULL, "source is NULL"); RET_VAL_IF(encoded_support == NULL, NULL, "encoded_support is NULL"); RET_VAL_IF(caps_for_render == NULL, NULL, "caps_for_render is NULL"); - if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_FIRST_CAPSFILTER))) + element_name = _append_num_to_string(ELEMENT_NAME_FIRST_CAPSFILTER, ssrc); + if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, element_name))) return NULL; APPEND_ELEMENT(*element_list, capsfilter); _get_video_encoded_support_from_ini(&webrtc->ini, source->type, encoded_support); if (*encoded_support) { - if ((sink_caps = __make_default_encoded_caps(source, &webrtc->ini, NULL))) { + if ((sink_caps = _make_default_encoded_caps(source, ssrc, &webrtc->ini, NULL))) { PRINT_CAPS(sink_caps, "capsfilter"); g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL); @@ -695,7 +763,7 @@ static GstElement *__prepare_first_capsfilter(webrtc_s *webrtc, webrtc_gst_slot_ return capsfilter; } - if ((sink_caps = __make_default_raw_caps(source, &webrtc->ini))) { + if ((sink_caps = __make_default_raw_caps(source, ssrc, &webrtc->ini))) { PRINT_CAPS(sink_caps, "capsfilter"); g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL); @@ -705,7 +773,7 @@ static GstElement *__prepare_first_capsfilter(webrtc_s *webrtc, webrtc_gst_slot_ return capsfilter; } -static bool __is_videoscale_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source) +bool _is_videoscale_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source) { bool drc_support; bool encoded_support; @@ -760,7 +828,7 @@ static GstElement *__prepare_videoscale(webrtc_gst_slot_s *source, GList **eleme return videoscaleCapsfilter; } -static bool __is_videorate_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source) +bool _is_videorate_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source) { bool encoded_support; @@ -780,17 +848,19 @@ static bool __is_videorate_needed(webrtc_s *webrtc, webrtc_gst_slot_s *source) return true; } -static GstElement *__prepare_videorate(webrtc_gst_slot_s *source, GList **element_list) +GstElement *_prepare_videorate(webrtc_gst_slot_s *source, int ssrc, GList **element_list) { GstElement *videorate; GstElement *videorateCapsfilter; GstCaps *caps; + g_autofree gchar *element_name = NULL; if (!(videorate = _create_element(DEFAULT_ELEMENT_VIDEORATE, NULL))) return NULL; APPEND_ELEMENT(*element_list, videorate); - if (!(videorateCapsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_VIDEORATE_CAPSFILTER))) + element_name = _append_num_to_string(ELEMENT_NAME_VIDEORATE_CAPSFILTER, ssrc); + if (!(videorateCapsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, element_name))) return NULL; APPEND_ELEMENT(*element_list, videorateCapsfilter); @@ -798,14 +868,14 @@ static GstElement *__prepare_videorate(webrtc_gst_slot_s *source, GList **elemen "framerate", GST_TYPE_FRACTION, source->video_info.framerate, 1, NULL); - PRINT_CAPS(caps, ELEMENT_NAME_VIDEORATE_CAPSFILTER); + PRINT_CAPS(caps, element_name); g_object_set(G_OBJECT(videorateCapsfilter), "caps", caps, NULL); gst_caps_unref(caps); return videorateCapsfilter; } -int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list, bool is_audio) +int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, int ssrc, bool need_capsfilter, GList **element_list, bool is_audio) { GstElement *encoder = NULL; GstElement *payloader; @@ -815,11 +885,13 @@ int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool n GstElement *videocrop; GstCaps *sink_caps; element_info_s elem_info; - gchar *media_type = NULL; + g_autofree gchar *media_type = NULL; int idx; GstPad *pad_for_render = NULL; GstCaps *caps_for_render = NULL; GstCaps *caps_for_encoder = NULL; + g_autofree gchar *element_name = NULL; + bool is_simulcast = (ssrc > -1); RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); @@ -831,7 +903,7 @@ int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool n GstElement *capsfilter; bool encoded_support; - if(!(capsfilter = __prepare_first_capsfilter(webrtc, source, idx, element_list, &encoded_support, &caps_for_render))) + if(!(capsfilter = _prepare_first_capsfilter(webrtc, source, ssrc, idx, element_list, &encoded_support, &caps_for_render))) return WEBRTC_ERROR_INVALID_OPERATION; pad_for_render = gst_element_get_static_pad(capsfilter, "src"); @@ -840,7 +912,7 @@ int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool n goto skip_encoder; } - if (__is_videoscale_needed(webrtc, source)) { + if (!is_simulcast && _is_videoscale_needed(webrtc, source)) { GstElement *videoscaleCapsfilter; if(!(videoscaleCapsfilter = __prepare_videoscale(source, element_list))) @@ -850,10 +922,10 @@ int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool n pad_for_render = gst_element_get_static_pad(videoscaleCapsfilter, "src"); } - if (__is_videorate_needed(webrtc, source)) { + if (_is_videorate_needed(webrtc, source)) { GstElement *videorateCapsfilter; - if(!(videorateCapsfilter = __prepare_videorate(source, element_list))) + if(!(videorateCapsfilter = _prepare_videorate(source, ssrc, element_list))) goto error; gst_object_unref(pad_for_render); @@ -869,18 +941,19 @@ int _create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool n pad_for_render = gst_element_get_static_pad(videocrop, "src"); } - if (!(encoder = __prepare_encoder(webrtc, source, is_audio))) + if (!(encoder = _prepare_encoder(webrtc, source, ssrc, is_audio))) goto error; APPEND_ELEMENT(*element_list, encoder); _set_need_decoding_for_loopback(source, idx, false); - if (!(encoder_capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_ENCODER_CAPSFILTER))) + element_name = _append_num_to_string(ELEMENT_NAME_ENCODER_CAPSFILTER, ssrc); + if (!(encoder_capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, element_name))) goto error; APPEND_ELEMENT(*element_list, encoder_capsfilter); skip_encoder: - caps_for_encoder = __make_default_encoded_caps(source, &webrtc->ini, &media_type); + caps_for_encoder = _make_default_encoded_caps(source, ssrc, &webrtc->ini, &media_type); if (encoder_capsfilter) g_object_set(G_OBJECT(encoder_capsfilter), "caps", caps_for_encoder, NULL); @@ -889,34 +962,51 @@ skip_encoder: NULL, NULL, payloader, - -1); + ssrc); if (payloader == NULL) goto error; APPEND_ELEMENT(*element_list, payloader); + if (!source->av[idx].payloader_factory_name) { + GstElementFactory *factory = gst_element_get_factory(payloader); + source->av[idx].payloader_factory_name = g_strdup(GST_OBJECT_NAME(factory)); + } - if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL))) - goto error; - APPEND_ELEMENT(*element_list, queue); + if (!is_simulcast) { + /* FIXME: check this queue can be removed */ + if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL))) + goto error; + APPEND_ELEMENT(*element_list, queue); + } - if (!(rtp_capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER))) + g_free(element_name); + element_name = _append_num_to_string(ELEMENT_NAME_RTP_CAPSFILTER, ssrc); + if (!(rtp_capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, element_name))) goto error; APPEND_ELEMENT(*element_list, rtp_capsfilter); - if (_set_payload_type(webrtc, source, idx, media_type) != WEBRTC_ERROR_NONE) - goto error; - - if ((sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, 0, source, encoder))) { + if (!is_simulcast || + (is_simulcast && source->av[idx].pt == 0)) { + if (_set_payload_type(webrtc, source, idx, media_type) != WEBRTC_ERROR_NONE) + goto error; + } + if ((sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, ssrc, source, encoder))) { g_object_set(G_OBJECT(rtp_capsfilter), "caps", sink_caps, NULL); gst_caps_unref(sink_caps); } - g_free(media_type); - - if (caps_for_render) - _set_caps_for_render(source, caps_for_render, idx); + if (caps_for_render) { + if (is_simulcast) + gst_caps_unref(caps_for_render); + else + _set_caps_for_render(source, caps_for_render, idx); + } - if (pad_for_render) - _add_probe_to_pad_for_render(source, idx, pad_for_render, _source_data_probe_cb); + if (pad_for_render) { + if (is_simulcast) + gst_object_unref(pad_for_render); + else + _add_probe_to_pad_for_render(source, idx, pad_for_render, _source_data_probe_cb); + } return WEBRTC_ERROR_NONE; @@ -924,8 +1014,6 @@ error: gst_caps_unref(caps_for_render); gst_object_unref(pad_for_render); - g_free(media_type); - return WEBRTC_ERROR_INVALID_OPERATION; } diff --git a/src/webrtc_source_simulcast.c b/src/webrtc_source_simulcast.c new file mode 100644 index 00000000..6591127a --- /dev/null +++ b/src/webrtc_source_simulcast.c @@ -0,0 +1,464 @@ +/* + * Copyright (c) 2024 Samsung Electronics Co., Ltd All Rights Reserved + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +//LCOV_EXCL_START +#include "webrtc_private.h" +#include "webrtc_source_private.h" +#include + +#define RTP_HDREXT_BASE "urn:ietf:params:rtp-hdrext:" +#define RTPHDREXT_MID RTP_HDREXT_BASE "sdes:mid" +#define RTPHDREXT_STREAM_ID RTP_HDREXT_BASE "sdes:rtp-stream-id" +#define RTPHDREXT_REPAIRED_STREAM_ID RTP_HDREXT_BASE "sdes:repaired-rtp-stream-id" + +static void __add_rtp_header_extensions_for_sdes(GstElement *payloader, const char *mid, const char *rid) +{ + GstRTPHeaderExtension *ext; + g_autofree gchar *mid_str = NULL; + + ASSERT(payloader); + ASSERT(mid); + ASSERT(rid); + + g_signal_emit_by_name(payloader, "clear-extensions"); + + ext = gst_rtp_header_extension_create_from_uri(RTPHDREXT_MID); + RET_IF(ext == NULL, "failed to gst_rtp_header_extension_create_from_uri(%s)", RTPHDREXT_MID); + + gst_rtp_header_extension_set_id (ext, 1); + g_object_set(ext, "mid", mid, NULL); + g_signal_emit_by_name(payloader, "add-extension", ext); + gst_clear_object(&ext); + + ext = gst_rtp_header_extension_create_from_uri(RTPHDREXT_STREAM_ID); + RET_IF(ext == NULL, "failed to gst_rtp_header_extension_create_from_uri(%s)", RTPHDREXT_STREAM_ID); + + gst_rtp_header_extension_set_id (ext, 2); + g_object_set(ext, "rid", rid, NULL); + g_signal_emit_by_name(payloader, "add-extension", ext); + gst_clear_object(&ext); + + ext = gst_rtp_header_extension_create_from_uri(RTPHDREXT_REPAIRED_STREAM_ID); + RET_IF(ext == NULL, "failed to gst_rtp_header_extension_create_from_uri(%s)", RTPHDREXT_REPAIRED_STREAM_ID); + + gst_rtp_header_extension_set_id (ext, 2); + g_object_set (ext, "rid", rid, NULL); + g_signal_emit_by_name(payloader, "add-extension", ext); + gst_clear_object(&ext); + + LOG_DEBUG("payloader[%s] mid[%s] rid[%s]", GST_ELEMENT_NAME(payloader), mid, rid); +} + +typedef struct { + GstElement *funnel; + webrtc_gst_slot_s *source; + bool is_audio; + bool error_occurred; +} table_foreach_userdata_s; + +static int __link_encoding_bin_with_funnel(webrtc_transceiver_encoding_s *encoding, GstElement *funnel) +{ + int ret = WEBRTC_ERROR_NONE; + GstPad *sinkpad; + g_autofree gchar *sinkpad_name = NULL; + g_autofree gchar *srcpad_name = NULL; + + ASSERT(encoding); + ASSERT(funnel); + + if (!(sinkpad = gst_element_request_pad_simple(funnel, "sink_%u"))) { + LOG_ERROR("failed to gst_element_get_request_pad()"); + return WEBRTC_ERROR_INVALID_OPERATION; + } + if (!(sinkpad_name = gst_pad_get_name(sinkpad))) { + LOG_ERROR("failed to gst_pad_get_name()"); + ret = WEBRTC_ERROR_INVALID_OPERATION; + goto exit; + } + + srcpad_name = g_strdup_printf("src_%u", encoding->ssrc); + + if (!gst_element_link_pads(GST_ELEMENT(encoding->bin), srcpad_name, funnel, sinkpad_name)) { + LOG_ERROR("failed to link pads, [%s:%s] - [%s:%s]", + GST_ELEMENT_NAME(encoding->bin), srcpad_name, GST_ELEMENT_NAME(funnel), sinkpad_name); + ret = WEBRTC_ERROR_INVALID_OPERATION; + goto exit; + } + LOG_DEBUG("link pads successfully, [%s:%s] - [%s:%s]", + GST_ELEMENT_NAME(encoding->bin), srcpad_name, GST_ELEMENT_NAME(funnel), sinkpad_name); + +exit: + if (ret != WEBRTC_ERROR_NONE) { + gst_element_release_request_pad(funnel, sinkpad); + g_object_unref(sinkpad); + } + return ret; +} + +static void __create_and_link_elements_foreach_cb(gpointer key, gpointer value, gpointer user_data) +{ + webrtc_transceiver_encoding_s *encoding = (webrtc_transceiver_encoding_s *)value; + table_foreach_userdata_s *userdata = (table_foreach_userdata_s *)user_data; + const char *rid = (const char *)key; + GList *element_list = NULL; + GstElement *src_element; + GstElement *volume; + GstElement *capsfilter; + gchar *element_name; + webrtc_gst_slot_s *source; + int av_idx; + + ASSERT(userdata); + ASSERT(userdata->funnel); + ASSERT(userdata->source); + + source = userdata->source; + av_idx = userdata->is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO; + + LOG_INFO("key[%s] encoding[%p, ssrc:%d, active:%d] userdata[is_audio:%d, source;%p]", + rid, encoding, encoding->ssrc, encoding->active, userdata->is_audio, userdata->source); + + if (!encoding->active) { + LOG_WARNING("skip this inactive encoding option"); + return; + } + if (gst_pad_is_linked(encoding->src_pad)) { + LOG_WARNING("already linked, skip this"); + return; + } + + if (userdata->is_audio) { + element_name = _append_num_to_string(ELEMENT_NAME_VOLUME, encoding->ssrc); + volume = _create_element(DEFAULT_ELEMENT_VOLUME, element_name); + g_free(element_name); + if (!volume) + goto exit; + if (!g_object_class_find_property(G_OBJECT_GET_CLASS(volume), "mute")) { + LOG_ERROR("there is no mute property"); + goto exit; + } + g_object_set(volume, "mute", (gboolean)source->av[av_idx].mute, NULL); + APPEND_ELEMENT(element_list, volume); + } + + if (_create_rest_of_elements(source->webrtc, source, encoding->ssrc, true, &element_list, userdata->is_audio) != WEBRTC_ERROR_NONE) + goto exit; + + if (!_add_elements_to_bin(encoding->bin, element_list)) + goto exit; + + if (userdata->is_audio) + element_name = _append_num_to_string(source->type == WEBRTC_MEDIA_SOURCE_TYPE_MIC ? ELEMENT_NAME_MIC_SRC : ELEMENT_NAME_AUDIO_SRC, encoding->ssrc); + else + element_name = _append_num_to_string(ELEMENT_NAME_VIDEO_SRC, encoding->ssrc); + + src_element = gst_bin_get_by_name(encoding->bin, element_name); + g_free(element_name); + if (!src_element) { + LOG_ERROR("failed to gst_bin_get_by_name() for src_element"); + goto exit_with_remove_from_bin; + } + + PREPEND_ELEMENT(element_list, src_element); + if (!_link_elements(element_list)) + goto exit_with_remove_from_bin; + + element_name = _append_num_to_string(ELEMENT_NAME_RTP_CAPSFILTER, encoding->ssrc); + capsfilter = gst_bin_get_by_name(encoding->bin, element_name); + g_free(element_name); + if (!capsfilter) { + LOG_ERROR("failed to gst_bin_get_by_name(), for rtp capsfilter"); + goto exit_with_remove_from_bin; + } + if (_set_ghost_pad_target(encoding->src_pad, capsfilter, true) != WEBRTC_ERROR_NONE) + goto exit_with_remove_from_bin; + + if (!gst_bin_add(source->bin, GST_ELEMENT(encoding->bin))) { + LOG_ERROR("failed to gst_bin_add(), encoding->bin"); + goto exit_with_remove_from_bin; + } + + if (__link_encoding_bin_with_funnel(encoding, userdata->funnel) != WEBRTC_ERROR_NONE) + goto exit_with_remove_from_bin; + + SAFE_G_LIST_FREE(element_list); + return; + +exit_with_remove_from_bin: + userdata->error_occurred = true; + _remove_elements_from_bin(encoding->bin, element_list); + SAFE_G_LIST_FREE(element_list); + return; +exit: + userdata->error_occurred = true; + SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref); + return; +} + +static void __add_rtp_header_extensions_foreach_cb(gpointer key, gpointer value, gpointer user_data) +{ + webrtc_transceiver_encoding_s *encoding = (webrtc_transceiver_encoding_s *)value; + table_foreach_userdata_s *userdata = (table_foreach_userdata_s *)user_data; + const char *rid = (const char *)key; + webrtc_gst_slot_s *source; + int av_idx; + g_autofree gchar *element_name = NULL; + + ASSERT(userdata); + ASSERT(userdata->source); + + source = userdata->source; + av_idx = userdata->is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO; + + LOG_INFO("key[%s] encoding[%p, ssrc:%d, active:%d] userdata[is_audio:%d, source:%p]", + rid, encoding, encoding->ssrc, encoding->active, userdata->is_audio, userdata->source); + + if (!encoding->active) { + LOG_WARNING("skip this inactive encoding option"); + return; + } + + element_name = _append_num_to_string(source->av[av_idx].payloader_factory_name, encoding->ssrc); + __add_rtp_header_extensions_for_sdes(gst_bin_get_by_name(encoding->bin, element_name), + source->av[av_idx].mid, rid); +} + +int _add_no_target_ghostpad_to_encoding(webrtc_transceiver_encoding_s *encoding, GstPad **new_pad) +{ + g_autofree gchar *pad_name = NULL; + + ASSERT(encoding); + ASSERT(encoding->bin); + ASSERT(new_pad); + + pad_name = g_strdup_printf("%s_%u", "src", encoding->ssrc); + + if (!(*new_pad = _add_no_target_ghostpad(encoding->bin, pad_name, true))) { + LOG_ERROR("failed to add new ghost pad[%s] for bin[%s]", pad_name, GST_ELEMENT_NAME(encoding->bin)); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + return WEBRTC_ERROR_NONE; +} + +void _encoding_slot_destroy_cb(gpointer data) +{ + webrtc_transceiver_encoding_s *encoding = (webrtc_transceiver_encoding_s *)data; + + ASSERT(encoding); + ASSERT(encoding->source); + + LOG_DEBUG("encoding[%p, ssrc:%d, source:%s, soource_id:%u] is removed", encoding, encoding->ssrc, + encoding->source->bin ? GST_ELEMENT_NAME(encoding->source->bin) : "null", encoding->source->id); + + GstElement *parent = GST_ELEMENT(gst_element_get_parent(encoding->bin)); + if (parent) + gst_bin_remove(GST_BIN(parent), GST_ELEMENT(encoding->bin)); + + g_free(encoding); +} + +int _add_source_encoding(webrtc_gst_slot_s *source, int av_idx, const char *rid, int target_bitrate, int width, int height, int *ssrc) +{ + webrtc_transceiver_encoding_s *encoding; + g_autofree gchar *bin_name = NULL; + + ASSERT(source); + ASSERT(rid); + ASSERT(ssrc); + + if (g_hash_table_contains(source->av[av_idx].encodings, rid)) { + LOG_ERROR("it already has the same rid[%s]", rid); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + if (g_hash_table_size(source->av[av_idx].encodings) == MAX_ENCODINGS_NUM) { + LOG_ERROR("table full, max size[%d]", MAX_ENCODINGS_NUM); + return WEBRTC_ERROR_INVALID_OPERATION; + } + + bin_name = g_strdup_printf("encoding_%s", rid); + + encoding = g_new0(webrtc_transceiver_encoding_s, 1); + encoding->bin = GST_BIN(gst_bin_new(bin_name)); + encoding->ssrc = g_random_int_range(0, G_MAXINT32); + encoding->target_bitrate = target_bitrate; + encoding->width = width; + encoding->height = height; + encoding->active = true; + encoding->source = source; + LOG_INFO("source[%p, id:%u, av_idx:%d], rid[%s], encoding[%p, ssrc:%u, target_bitrate:%d, width:%d, height:%d, active:%d]", + source, source->id, av_idx, rid, encoding, encoding->ssrc, encoding->target_bitrate, encoding->width, encoding->height, + encoding->active); + + ASSERT(g_hash_table_insert(source->av[av_idx].encodings, (gpointer)g_strdup(rid), (gpointer)encoding)); + + *ssrc = encoding->ssrc; + + return WEBRTC_ERROR_NONE; +} + +int _remove_source_encoding(webrtc_gst_slot_s *source, int av_idx, const char *rid) +{ + ASSERT(source); + ASSERT(rid); + + if (!g_hash_table_contains(source->av[av_idx].encodings, rid)) { + LOG_ERROR("nothing to remove, rid[%s]", rid); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + if (!g_hash_table_remove(source->av[av_idx].encodings, (gpointer)rid)) { + LOG_ERROR("failed to find encoding by rid[%s]", rid); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + LOG_INFO("source_id[%u], av_idx[%d], rid[%s]", source->id, av_idx, rid); + + return WEBRTC_ERROR_NONE; +} + +int _complete_rest_of_src_for_simulcast(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool is_audio) +{ + int ret; + GstElement *funnel = NULL; + GstElement *capsfilter = NULL; + const ini_item_media_source_s *ini_source; + GstCaps *caps; + table_foreach_userdata_s tb_userdata = { .source = source, .is_audio = is_audio, .error_occurred = false}; + int av_idx = is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO; + + ASSERT(webrtc); + ASSERT(source); + ASSERT(g_hash_table_size(source->av[av_idx].encodings) > 0); + + ret = _build_src_check_params_and_get_ini_source(webrtc, source, &ini_source); + RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _build_src_check_params_and_get_ini_source()"); + + if (!(funnel = gst_bin_get_by_name(source->bin, "funnel"))) { + if (!(funnel = _create_element(DEFAULT_ELEMENT_RTP_FUNNEL, "funnel"))) + goto exit; + if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, "funnelCapsfilter"))) { + gst_object_unref(funnel); + goto exit; + } + if (!gst_bin_add(source->bin, funnel)) { + LOG_ERROR("failed to gst_bin_add(), funnel"); + goto exit; + } + if (!gst_bin_add(source->bin, capsfilter)) { + LOG_ERROR("failed to gst_bin_add(), capsfilter"); + goto exit; + } + if (!gst_element_link(funnel, capsfilter)) { + LOG_ERROR("failed to gst_element_link()"); + goto exit; + } + } + + tb_userdata.funnel = funnel; + g_hash_table_foreach(source->av[av_idx].encodings, __create_and_link_elements_foreach_cb, (gpointer)(&tb_userdata)); + if (tb_userdata.error_occurred) + goto exit; + + if (!gst_pad_is_linked(source->av[av_idx].src_pad)) { + if (_set_ghost_pad_target(source->av[av_idx].src_pad, capsfilter, true) != WEBRTC_ERROR_NONE) + goto exit; + + if (_link_source_with_webrtcbin(source, webrtc->gst.webrtcbin) != WEBRTC_ERROR_NONE) + goto exit; + } + + if (!capsfilter) + capsfilter = gst_bin_get_by_name(source->bin, "funnelCapsfilter"); + ASSERT(capsfilter); + caps = _create_simulcast_caps(source, true, is_audio); + g_object_set(capsfilter, "caps", caps, NULL); + gst_clear_caps(&caps); + + /* NOTE: 'mid' is set after linking with webrtcbin, therefore setting header extensions are placed here. */ + g_hash_table_foreach(source->av[av_idx].encodings, __add_rtp_header_extensions_foreach_cb, (gpointer)(&tb_userdata)); + + if (source->av[av_idx].src_pad_probe_id == 0) + _add_probe_to_pad_for_pause(source, av_idx, source->av[av_idx].src_pad, _payloaded_data_probe_cb); + + return WEBRTC_ERROR_NONE; + +exit: + SAFE_GST_OBJECT_UNREF(funnel); + SAFE_GST_OBJECT_UNREF(capsfilter); + + return WEBRTC_ERROR_INVALID_OPERATION; +} + +GstCaps *_create_simulcast_caps(webrtc_gst_slot_s *source, bool is_send, bool is_audio) +{ + GstStructure *s; + GstCaps *caps; + GString *simulcast_value; + g_autofree gchar *simulcast_attr = NULL; + gchar *extmap_key; + gchar *rid_key; + unsigned int ext_mid = 1; + unsigned int ext_stream_id = 2; + unsigned int ext_repaired_stream_id = 3; + int av_idx = is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO; + GHashTableIter iter; + gpointer key, value; + int count = 0; + + RET_VAL_IF(source == NULL, NULL, "source is NULL"); + + caps = _make_rtp_caps_with_encoding(source, is_audio); + s = gst_caps_get_structure(caps, 0); + + extmap_key = g_strdup_printf("extmap-%u", ext_mid); + gst_structure_set(s, + "a-mid", G_TYPE_STRING, source->av[av_idx].mid, + extmap_key, G_TYPE_STRING, RTPHDREXT_MID, + NULL); + g_free(extmap_key); + + simulcast_value = g_string_new(is_send ? "send" : "recv"); + g_string_append_c(simulcast_value, ' '); + + g_hash_table_iter_init(&iter, source->av[av_idx].encodings); + while (g_hash_table_iter_next(&iter, &key, &value)) { + rid_key = g_strdup_printf("rid-%s", (gchar *)key); + gst_structure_set(s, rid_key, G_TYPE_STRING, is_send ? "send" : "recv", NULL); + if (count++ > 0) + g_string_append_c(simulcast_value, ';'); + g_string_append(simulcast_value, (gchar *)key); + g_free(rid_key); + } + + simulcast_attr = g_string_free(simulcast_value, FALSE); + extmap_key = g_strdup_printf("extmap-%u", ext_stream_id); + gst_structure_set(s, + extmap_key, G_TYPE_STRING, RTPHDREXT_STREAM_ID, + "a-simulcast", G_TYPE_STRING, simulcast_attr, + NULL); + g_free(extmap_key); + + extmap_key = g_strdup_printf("extmap-%u", ext_repaired_stream_id); + gst_structure_set(s, + extmap_key, G_TYPE_STRING, RTPHDREXT_REPAIRED_STREAM_ID, + NULL); + g_free(extmap_key); + + return caps; +} +//LCOV_EXCL_STOP diff --git a/src/webrtc_transceiver.c b/src/webrtc_transceiver.c index cde61595..2b9ac9f5 100644 --- a/src/webrtc_transceiver.c +++ b/src/webrtc_transceiver.c @@ -282,6 +282,22 @@ void _update_transceivers_for_offer(webrtc_s *webrtc) } } +GstCaps *_make_rtp_caps_with_encoding(webrtc_gst_slot_s *source, bool is_audio) +{ + GstCaps *caps = NULL; + rtp_payload_info_s *payload_info = NULL; + int av_idx = is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO; + + RET_VAL_IF(source == NULL, NULL, "source is NULL"); + + if ((payload_info = __get_payload_info_by_encoding_name(source->av[av_idx].codec))) { + caps = __make_transceiver_caps_with_pt(payload_info, source->av[av_idx].pt); + PRINT_CAPS(caps, "caps_with_encoding"); + } + + return caps; +} + static rtp_payload_info_s * __get_payload_info(webrtc_transceiver_codec_e codec) { int i = 0; @@ -722,3 +738,63 @@ int _get_transceiver_codec(webrtc_s *webrtc, unsigned int source_id, webrtc_medi return WEBRTC_ERROR_NONE; } + +int _add_transceiver_encoding(webrtc_s *webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid, int target_bitrate, int width, int height) +{ + int ret; + webrtc_gst_slot_s *source; + int av_idx = (media_type == WEBRTC_MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO; + int ssrc; + + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); + RET_VAL_IF((source->type > WEBRTC_MEDIA_SOURCE_TYPE_MIC), WEBRTC_ERROR_INVALID_PARAMETER, "not supported media source type[%d]", source->type); + RET_VAL_IF(rid == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "rid is NULL"); + + if ((media_type == WEBRTC_MEDIA_TYPE_AUDIO && !(source->media_types & MEDIA_TYPE_AUDIO)) || + (media_type == WEBRTC_MEDIA_TYPE_VIDEO && !(source->media_types & MEDIA_TYPE_VIDEO))) { + LOG_ERROR("invalid media_type[%d]", media_type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + if (media_type == WEBRTC_MEDIA_TYPE_AUDIO) + LOG_DEBUG("media_type is AUDIO, skip width[%d] height[%d] parameters", width, height); + + ret = _add_source_encoding(source, av_idx, rid, target_bitrate, width, height, &ssrc); + if (ret != WEBRTC_ERROR_NONE) + return ret; + + switch (source->type) { + case WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST: + case WEBRTC_MEDIA_SOURCE_TYPE_MIC: + ret = _build_audiosrc(webrtc, source, source->type == WEBRTC_MEDIA_SOURCE_TYPE_MIC, ssrc); + break; + case WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST: + ret = _build_videotestsrc(webrtc, source, ssrc); + break; + default: + /* TODO: support other types */ + LOG_ERROR_IF_REACHED("type(%d)", source->type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + return _set_transceiver_direction(webrtc, source_id, media_type, WEBRTC_TRANSCEIVER_DIRECTION_SENDONLY); +} + +int _remove_transceiver_encoding(webrtc_s *webrtc, unsigned int source_id, webrtc_media_type_e media_type, const char *rid) +{ + webrtc_gst_slot_s *source; + int av_idx = (media_type == WEBRTC_MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO; + + RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL"); + RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL"); + RET_VAL_IF(rid == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "rid is NULL"); + + if ((media_type == WEBRTC_MEDIA_TYPE_AUDIO && !(source->media_types & MEDIA_TYPE_AUDIO)) || + (media_type == WEBRTC_MEDIA_TYPE_VIDEO && !(source->media_types & MEDIA_TYPE_VIDEO))) { + LOG_ERROR("invalid media_type[%d]", media_type); + return WEBRTC_ERROR_INVALID_PARAMETER; + } + + return _remove_source_encoding(source, av_idx, rid); +}