bool _is_hw_encoder_used(webrtc_s *webrtc, webrtc_media_source_type_e source_type, media_type_e media_type);
GstCaps *_get_caps_from_encoded_audio_media_type(const char *media_type, int channels, int samplerate);
GstCaps *_get_caps_from_encoded_video_media_type(const char *media_type, int width, int height);
-GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, webrtc_gst_slot_s *source, GstElement *encoder);
+GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, unsigned int ssrc, webrtc_gst_slot_s *source, GstElement *encoder);
const char *_get_element_name(int av_idx, gst_element_e element);
GstPadProbeReturn _payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
void _add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb);
Name: capi-media-webrtc
Summary: A WebRTC library in Tizen Native API
-Version: 0.4.26
+Version: 0.4.27
Release: 0
Group: Multimedia/API
License: Apache-2.0
return NULL;
}
- if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), source->av[GET_AV_IDX(is_audio)].pt, source, NULL))) {
+ if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), source->av[GET_AV_IDX(is_audio)].pt, 0, source, NULL))) {
g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
gst_caps_unref(sink_caps);
}
if (_set_payload_type(webrtc, source, GET_AV_IDX_BY_TYPE(source->media_types), NULL) != WEBRTC_ERROR_NONE)
goto error;
- if ((sink_caps = _make_rtp_caps(media_type, source->av[GET_AV_IDX_BY_TYPE(source->media_types)].pt, source, NULL))) {
+ if ((sink_caps = _make_rtp_caps(media_type, source->av[GET_AV_IDX_BY_TYPE(source->media_types)].pt, 0, source, NULL))) {
g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
gst_caps_unref(sink_caps);
}
idx = GET_AV_IDX_BY_TYPE(source->media_types);
media_type = (idx == AV_IDX_AUDIO) ? _get_audio_media_type(source->av[idx].codec) : _get_video_media_type(source->av[idx].codec);
- if (!(sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, source, NULL)))
+ if (!(sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, 0, source, NULL)))
return WEBRTC_ERROR_INVALID_OPERATION;
g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
return GST_AUDIO_FORMAT_UNKNOWN;
}
-GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, webrtc_gst_slot_s *source, GstElement *encoder)
+GstCaps *_make_rtp_caps(const gchar *media_type, unsigned int payload_type, unsigned int ssrc, webrtc_gst_slot_s *source, GstElement *encoder)
{
GstCaps *caps;
bool is_audio;
"media", G_TYPE_STRING, GET_MEDIA_TYPE_NAME(is_audio),
"payload", G_TYPE_INT, payload_type,
NULL);
+ if (ssrc != 0)
+ gst_caps_set_simple(caps,
+ "ssrc", G_TYPE_UINT, ssrc,
+ NULL);
if (!is_audio || !source->av[AV_IDX_AUDIO].inbandfec)
goto out;
if (_set_payload_type(webrtc, source, idx, media_type) != WEBRTC_ERROR_NONE)
goto error;
- if ((sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, source, encoder))) {
+ if ((sink_caps = _make_rtp_caps(media_type, source->av[idx].pt, 0, source, encoder))) {
g_object_set(G_OBJECT(rtp_capsfilter), "caps", sink_caps, NULL);
gst_caps_unref(sink_caps);
}
return caps;
}
-//LCOV_EXCL_STOP
\ No newline at end of file
+//LCOV_EXCL_STOP