#define MIN_DYNAMIC_PAYLOAD_TYPE 96
#define MAX_DYNAMIC_PAYLOAD_TYPE 127
-typedef struct {
- int av_idx;
- webrtc_gst_slot_s *source;
-} probe_userdata_s;
-
static direction_info_s __direction_info[] = {
[WEBRTC_TRANSCEIVER_DIRECTION_SENDONLY] = { "SENDONLY", GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDONLY },
[WEBRTC_TRANSCEIVER_DIRECTION_RECVONLY] = { "RECVONLY", GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_RECVONLY },
webrtc->payload_types ^= bitmask;
}
-static GstPadProbeReturn __source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
-{
- probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
- GstBuffer *buffer;
- GstElement *appsrc;
- GstFlowReturn gst_ret = GST_FLOW_OK;
-
- RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
- RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
- RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
- RET_VAL_IF(probe_data->source == NULL, GST_PAD_PROBE_REMOVE, "probe_data->source is NULL");
-
- switch (probe_data->av_idx) {
- case AV_IDX_AUDIO:
- if (!probe_data->source->sound_stream_info.type)
- return GST_PAD_PROBE_OK;
- break;
- case AV_IDX_VIDEO:
- if (!probe_data->source->display)
- return GST_PAD_PROBE_OK;
- break;
- default:
- LOG_ERROR_IF_REACHED("av_idx(%d)", probe_data->av_idx);
- return GST_PAD_PROBE_OK;
- }
-
- appsrc = probe_data->source->av[probe_data->av_idx].render.appsrc;
- if (appsrc) {
- buffer = gst_pad_probe_info_get_buffer(info);
- LOG_VERBOSE("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
- g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK)
- LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
- }
-
- return GST_PAD_PROBE_OK;
-}
-
-static void __add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
-{
- probe_userdata_s *probe_userdata;
-
- RET_IF(source == NULL, "source is NULL");
- RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
- RET_IF(pad == NULL, "pad is NULL");
- RET_IF(probe_cb == NULL, "probe_cb is NULL");
-
- probe_userdata = g_new0(probe_userdata_s, 1);
- probe_userdata->source = source;
- probe_userdata->av_idx = idx;
- source->av[idx].render.src_pad = pad;
- source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
- probe_cb, probe_userdata, g_free);
-
- LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
- source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb);
-}
-
-static void __remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx)
-{
- RET_IF(source == NULL, "source is NULL");
- RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
-
- if (source->av[idx].render.src_pad_probe_id == 0)
- return;
-
- LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe_id[%lu]",
- source->id, idx, source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
- gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
- source->av[idx].render.src_pad_probe_id = 0;
- gst_object_unref(source->av[idx].render.src_pad);
- source->av[idx].render.src_pad = NULL;
- if (source->av[idx].render.appsrc_caps) {
- gst_caps_unref(source->av[idx].render.appsrc_caps);
- source->av[idx].render.appsrc_caps = NULL;
- }
-}
-
static GstElement * __prepare_encoder(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool is_audio)
{
GstElement *encoder = NULL;
return encoder;
}
-static GstPadProbeReturn __payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
-{
- probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
- GstBuffer *buffer;
- static unsigned int counts[AV_IDX_MAX] = { 0 };
-
- RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
- RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
- RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
-
- buffer = gst_pad_probe_info_get_buffer(info);
-
- if (probe_data->source->av[probe_data->av_idx].pause) {
- if (counts[probe_data->av_idx]++ % 10 == 0)
- LOG_DEBUG("paused, drop [%s] buffer[%p] of pad[%p], source[%p], count[%u]",
- GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO),
- buffer, pad, probe_data->source, counts[probe_data->av_idx]);
- return GST_PAD_PROBE_DROP;
- }
-
- if (counts[probe_data->av_idx] > 0) {
- counts[probe_data->av_idx] = 0;
- LOG_DEBUG("play again, [%s] buffer[%p] of pad[%p], source[%p]",
- GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->source);
- }
-
- return GST_PAD_PROBE_OK;
-}
-
-static void __add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
-{
- probe_userdata_s *probe_userdata;
-
- RET_IF(source == NULL, "source is NULL");
- RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
- RET_IF(pad == NULL, "pad is NULL");
- RET_IF(probe_cb == NULL, "probe_cb is NULL");
-
- probe_userdata = g_new0(probe_userdata_s, 1);
- probe_userdata->source = source;
- probe_userdata->av_idx = idx;
- source->av[idx].src_pad = pad;
- source->av[idx].src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
- probe_cb, probe_userdata, g_free);
-
- LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
- source->id, idx, pad, source->av[idx].src_pad_probe_id, probe_cb);
-}
-
-static void __remove_probe_from_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx)
-{
- RET_IF(source == NULL, "source is NULL");
- RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
-
- if (source->av[idx].src_pad_probe_id == 0)
- return;
-
- LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe_id[%lu]",
- source->id, idx, source->av[idx].src_pad, source->av[idx].src_pad_probe_id);
- gst_pad_remove_probe(source->av[idx].src_pad, source->av[idx].src_pad_probe_id);
- gst_element_remove_pad(GST_ELEMENT(source->bin), source->av[idx].src_pad);
- source->av[idx].src_pad_probe_id = 0;
- source->av[idx].src_pad = NULL;
-}
-
static bool __link_switch_srcs(GstElement *switch_element, GList *switch_src_list)
{
GstElement *element;
}
source->av[idx].render.need_decoding = true;
- __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+ _add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), _source_data_probe_cb);
goto skip_encoder;
}
g_object_set(G_OBJECT(source->av[idx].render.appsrc), "caps", sink_caps, NULL);
}
- __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+ _add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), _source_data_probe_cb);
}
if (source->type == WEBRTC_MEDIA_SOURCE_TYPE_SCREEN && !source->zerocopy_enabled) {
encoder = __prepare_encoder(webrtc, source, is_audio);
if (encoder == NULL) {
- __remove_probe_from_pad_for_render(source, idx);
+ _remove_probe_from_pad_for_render(source, idx);
return WEBRTC_ERROR_INVALID_OPERATION;
}
APPEND_ELEMENT(*element_list, encoder);
return WEBRTC_ERROR_NONE;
error:
- __remove_probe_from_pad_for_render(source, idx);
+ _remove_probe_from_pad_for_render(source, idx);
g_free(media_type);
return WEBRTC_ERROR_INVALID_OPERATION;
if (__link_source_with_webrtcbin(source, webrtc->gst.webrtcbin) != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, source->av[AV_IDX_VIDEO].src_pad, __payloaded_data_probe_cb);
+ _add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, source->av[AV_IDX_VIDEO].src_pad, _payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
if (__link_source_with_webrtcbin(source, webrtc->gst.webrtcbin) != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad_for_pause(source, AV_IDX_AUDIO, source->av[AV_IDX_AUDIO].src_pad, __payloaded_data_probe_cb);
+ _add_probe_to_pad_for_pause(source, AV_IDX_AUDIO, source->av[AV_IDX_AUDIO].src_pad, _payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
if (_set_ghost_pad_target(src_pad, capsfilter, true) != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad_for_pause(source, av_idx, src_pad, __payloaded_data_probe_cb);
+ _add_probe_to_pad_for_pause(source, av_idx, src_pad, _payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
source->av[av_idx].render.need_decoding = true;
source->av[av_idx].render.appsrc_caps = gst_pad_get_current_caps(pad);
- __add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), __source_data_probe_cb);
+ _add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), _source_data_probe_cb);
GENERATE_DOT(source->webrtc, source->filesrc_pipeline, "%s.%s-%s",
GST_ELEMENT_NAME(source->filesrc_pipeline), GST_ELEMENT_NAME(element), GST_PAD_NAME(pad));
gst_element_foreach_src_pad(GST_ELEMENT(source->bin), __foreach_src_pad_cb, source);
for (i = 0; i < AV_IDX_MAX; i++) {
- __remove_probe_from_pad_for_pause(source, i);
- __remove_probe_from_pad_for_render(source, i);
+ _remove_probe_from_pad_for_pause(source, i);
+ _remove_probe_from_pad_for_render(source, i);
if (source->av[i].pt > 0)
__return_payload_type(source->webrtc, source->av[i].pt);
if (source->av[av_idx].src_pad_probe_id == 0)
continue;
- __remove_probe_from_pad_for_pause(source, av_idx);
- __remove_probe_from_pad_for_render(source, av_idx);
+ _remove_probe_from_pad_for_pause(source, av_idx);
+ _remove_probe_from_pad_for_render(source, av_idx);
if (source->av[av_idx].pt > 0)
__return_payload_type(source->webrtc, source->av[av_idx].pt);
#include "webrtc_private.h"
#include "webrtc_source_private.h"
+typedef struct {
+ int av_idx;
+ webrtc_gst_slot_s *source;
+} probe_userdata_s;
+
const char *_get_audio_format_name(media_format_mimetype_e mime_type)
{
switch (mime_type) {
return WEBRTC_ERROR_NONE;
}
+
+GstPadProbeReturn _payloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ GstBuffer *buffer;
+ static unsigned int counts[AV_IDX_MAX] = { 0 };
+
+ RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
+ RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+
+ buffer = gst_pad_probe_info_get_buffer(info);
+
+ if (probe_data->source->av[probe_data->av_idx].pause) {
+ if (counts[probe_data->av_idx]++ % 10 == 0)
+ LOG_DEBUG("paused, drop [%s] buffer[%p] of pad[%p], source[%p], count[%u]",
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO),
+ buffer, pad, probe_data->source, counts[probe_data->av_idx]);
+ return GST_PAD_PROBE_DROP;
+ }
+
+ if (counts[probe_data->av_idx] > 0) {
+ counts[probe_data->av_idx] = 0;
+ LOG_DEBUG("play again, [%s] buffer[%p] of pad[%p], source[%p]",
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->source);
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+void _add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+{
+ probe_userdata_s *probe_userdata;
+
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+ RET_IF(pad == NULL, "pad is NULL");
+ RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+ probe_userdata = g_new0(probe_userdata_s, 1);
+ probe_userdata->source = source;
+ probe_userdata->av_idx = idx;
+ source->av[idx].src_pad = pad;
+ source->av[idx].src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ probe_cb, probe_userdata, g_free);
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
+ source->id, idx, pad, source->av[idx].src_pad_probe_id, probe_cb);
+}
+
+void _remove_probe_from_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+
+ if (source->av[idx].src_pad_probe_id == 0)
+ return;
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe_id[%lu]",
+ source->id, idx, source->av[idx].src_pad, source->av[idx].src_pad_probe_id);
+ gst_pad_remove_probe(source->av[idx].src_pad, source->av[idx].src_pad_probe_id);
+ gst_element_remove_pad(GST_ELEMENT(source->bin), source->av[idx].src_pad);
+ source->av[idx].src_pad_probe_id = 0;
+ source->av[idx].src_pad = NULL;
+}
+
+GstPadProbeReturn _source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ GstBuffer *buffer;
+ GstElement *appsrc;
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+
+ RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
+ RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+ RET_VAL_IF(probe_data->source == NULL, GST_PAD_PROBE_REMOVE, "probe_data->source is NULL");
+
+ switch (probe_data->av_idx) {
+ case AV_IDX_AUDIO:
+ if (!probe_data->source->sound_stream_info.type)
+ return GST_PAD_PROBE_OK;
+ break;
+ case AV_IDX_VIDEO:
+ if (!probe_data->source->display)
+ return GST_PAD_PROBE_OK;
+ break;
+ default:
+ LOG_ERROR_IF_REACHED("av_idx(%d)", probe_data->av_idx);
+ return GST_PAD_PROBE_OK;
+ }
+
+ appsrc = probe_data->source->av[probe_data->av_idx].render.appsrc;
+ if (appsrc) {
+ buffer = gst_pad_probe_info_get_buffer(info);
+ LOG_VERBOSE("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ LOG_ERROR("failed to 'push-buffer', gst_ret[%d]", gst_ret);
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+void _add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+{
+ probe_userdata_s *probe_userdata;
+
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+ RET_IF(pad == NULL, "pad is NULL");
+ RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+ probe_userdata = g_new0(probe_userdata_s, 1);
+ probe_userdata->source = source;
+ probe_userdata->av_idx = idx;
+ source->av[idx].render.src_pad = pad;
+ source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ probe_cb, probe_userdata, g_free);
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
+ source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb);
+}
+
+void _remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+
+ if (source->av[idx].render.src_pad_probe_id == 0)
+ return;
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe_id[%lu]",
+ source->id, idx, source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+ gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+ source->av[idx].render.src_pad_probe_id = 0;
+ gst_object_unref(source->av[idx].render.src_pad);
+ source->av[idx].render.src_pad = NULL;
+ if (source->av[idx].render.appsrc_caps) {
+ gst_caps_unref(source->av[idx].render.appsrc_caps);
+ source->av[idx].render.appsrc_caps = NULL;
+ }
+}