} webrtc_resource_s;
#endif
-typedef struct _ini_item_general_s {
+typedef struct _ini_item_general_s {
bool generate_dot;
const char *dot_path;
int stats_log_period;
} filesrc_av[AV_IDX_MAX];
webrtc_display_s *display;
+
+ struct {
+ gchar *path;
+ FILE *fp;
+ } dump;
} webrtc_gst_slot_s;
+typedef struct {
+ int av_idx;
+ webrtc_gst_slot_s *slot;
+} probe_userdata_s;
+
typedef struct _webrtc_data_channel_s {
webrtc_s *webrtc;
GMutex mutex;
int _set_display_visible_to_sink(webrtc_s *webrtc, unsigned int track_id, bool visible);
int _get_display_visible_from_sink(webrtc_s *webrtc, unsigned int track_id, bool *visible);
+/* sink dump */
+GstPadProbeReturn _depayloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data);
+void _add_probe_to_pad_for_dump(webrtc_gst_slot_s *sink, GstElement *element, void *probe_cb);
+void _remove_probe_from_pad_for_dump(webrtc_gst_slot_s *sink);
+
/* display */
void _video_stream_decoded_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data);
int _apply_display(webrtc_display_s *display);
Name: capi-media-webrtc
Summary: A WebRTC library in Tizen Native API
-Version: 0.3.198
+Version: 0.3.199
Release: 0
Group: Multimedia/API
License: Apache-2.0
}
}
+static bool __is_rtp_depayloader(GstElementFactory *factory)
+{
+ const gchar *klass;
+
+ RET_VAL_IF(factory == NULL, false, "factory is NULL");
+
+ klass = gst_element_factory_get_metadata(factory, GST_ELEMENT_METADATA_KLASS);
+
+ return g_strrstr(klass, "Codec/Depayloader/Network/RTP");
+}
+
static bool __is_factory_name_for_hw(gchar *factory_name)
{
RET_VAL_IF(factory_name == NULL, FALSE, "factory_name is NULL");
"use-inband-fec", TRUE,
NULL);
}
+
} else if (g_strrstr(factory_name, "h264parse")) {
/* send SPS and PPS Insertion Interval every second */
g_object_set(G_OBJECT(element), "config-interval", 1, NULL);
+ _add_probe_to_pad_for_dump(sink, element, _depayloaded_data_probe_cb);
+ }
+
+ /* all other depayloaders except h264 */
+ if (__is_rtp_depayloader(factory) && !g_strrstr(factory_name, "h264")) {
+ _add_probe_to_pad_for_dump(sink, element, _depayloaded_data_probe_cb);
}
sink->av[AV_IDX_AUDIO].render.hw_decoder_used = __is_hw_decoder_element(factory, true);
if (sink->sound_stream_info.type)
free(sink->sound_stream_info.type);
+ _remove_probe_from_pad_for_dump(sink);
+
g_free(sink);
}
--- /dev/null
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "webrtc.h"
+#include "webrtc_private.h"
+
+//LCOV_EXCL_START
+static void __dump_buffer_data_to_file(FILE *fp, GstBuffer *buffer)
+{
+ GstMapInfo map_info = GST_MAP_INFO_INIT;
+
+ RET_IF(fp == NULL, "fp is NULL");
+ RET_IF(buffer == NULL, "buffer is NULL");
+ RET_IF(gst_buffer_get_size(buffer) == 0, "empty buffer");
+
+ gst_buffer_map(buffer, &map_info, GST_MAP_READ);
+ fwrite(map_info.data, map_info.size, 1, fp);
+ gst_buffer_unmap(buffer, &map_info);
+}
+
+GstPadProbeReturn _depayloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ g_autofree gchar *media_type = _get_mime_type_from_pad(pad);
+ g_auto(GStrv) str_arr = g_strsplit((const gchar *)media_type, "/", 2);
+ webrtc_gst_slot_s *sink;
+
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+
+ sink = probe_data->slot;
+
+ if (!sink->webrtc->ini.general.dump_incoming_streams)
+ return GST_PAD_PROBE_OK;
+
+ if (!sink->dump.path) {
+ sink->dump.path = g_strdup_printf("%s/webrtc-%p.sink%u.%s.%s.dump",
+ sink->webrtc->ini.general.dump_path, sink->webrtc, sink->id,
+ probe_data->av_idx == AV_IDX_AUDIO ? "audio" : "video", str_arr[1]);
+ sink->dump.fp = fopen(sink->dump.path, "w");
+ if (!sink->dump.fp) {
+ LOG_ERROR("failed to fopen() for %s, error:%s", sink->dump.path, g_strerror(errno));
+ g_free(sink->dump.path);
+ sink->dump.path = NULL;
+ } else {
+ LOG_WARNING("dump[path:%s, fp:%p]", sink->dump.path, sink->dump.fp);
+ __dump_buffer_data_to_file(sink->dump.fp, gst_pad_probe_info_get_buffer(info));
+ }
+ } else {
+ __dump_buffer_data_to_file(sink->dump.fp, gst_pad_probe_info_get_buffer(info));
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+void _add_probe_to_pad_for_dump(webrtc_gst_slot_s *sink, GstElement *element, void *probe_cb)
+{
+ GstPad *pad;
+ probe_userdata_s *probe_userdata;
+ g_autofree gchar *media_type = NULL;
+ unsigned int idx;
+
+ RET_IF(sink == NULL, "sink is NULL");
+ RET_IF(element == NULL, "element is NULL");
+ RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+ if (!sink->webrtc->ini.general.dump_incoming_streams)
+ return;
+
+ pad = gst_element_get_static_pad(element, "src");
+ media_type = _get_mime_type_from_pad(pad);
+
+ if (!_is_supported_media_type(media_type)) {
+ gst_object_unref(pad);
+ return;
+ }
+
+ idx = _is_audio_media_type(media_type) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
+
+ probe_userdata = g_new0(probe_userdata_s, 1);
+ probe_userdata->slot = sink;
+ probe_userdata->av_idx = idx;
+ sink->av[idx].src_pad = pad;
+ sink->av[idx].src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ probe_cb, probe_userdata, g_free);
+
+ LOG_DEBUG("sink[id:%u] for [%s] pad[%p] probe[id:%lu, callback:%p]",
+ sink->id, GET_MEDIA_TYPE_NAME(idx == AV_IDX_AUDIO), pad, sink->av[idx].src_pad_probe_id, probe_cb);
+}
+
+void _remove_probe_from_pad_for_dump(webrtc_gst_slot_s *sink)
+{
+ int idx;
+
+ RET_IF(sink == NULL, "sink is NULL");
+
+ for (idx = AV_IDX_AUDIO; idx < AV_IDX_MAX; idx++) {
+ if (sink->av[idx].src_pad_probe_id == 0)
+ continue;
+
+ LOG_DEBUG("sink[id:%u] for [%s] pad[%p] probe_id[%lu]",
+ sink->id, GET_MEDIA_TYPE_NAME(idx == AV_IDX_AUDIO), sink->av[idx].src_pad, sink->av[idx].src_pad_probe_id);
+
+ gst_pad_remove_probe(sink->av[idx].src_pad, sink->av[idx].src_pad_probe_id);
+ gst_object_unref(sink->av[idx].src_pad);
+
+ sink->av[idx].src_pad_probe_id = 0;
+ sink->av[idx].src_pad = NULL;
+
+ if (sink->dump.fp) {
+ LOG_WARNING("dump[path:%s, fp:%p] is closed", sink->dump.path, sink->dump.fp);
+ fclose(sink->dump.fp);
+ sink->dump.fp = NULL;
+ }
+ if (sink->dump.path) {
+ g_free(sink->dump.path);
+ sink->dump.path = NULL;
+ }
+ }
+}
+//LCOV_EXCL_STOP
\ No newline at end of file
#include "webrtc_private.h"
#include "webrtc_source_private.h"
-typedef struct {
- int av_idx;
- webrtc_gst_slot_s *source;
-} probe_userdata_s;
-
static rtp_payload_info_s __payload_info[] = {
{ WEBRTC_TRANSCEIVER_CODEC_PCMU, MEDIA_TYPE_AUDIO_MULAW, "audio", "PCMU", 8000 },
{ WEBRTC_TRANSCEIVER_CODEC_PCMA, MEDIA_TYPE_AUDIO_ALAW, "audio", "PCMA", 8000 },
buffer = gst_pad_probe_info_get_buffer(info);
- if (probe_data->source->av[probe_data->av_idx].pause) {
+ if (probe_data->slot->av[probe_data->av_idx].pause) {
if (counts[probe_data->av_idx]++ % 10 == 0)
LOG_DEBUG("paused, drop [%s] buffer[%p] of pad[%p], source[%p], count[%u]",
GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO),
- buffer, pad, probe_data->source, counts[probe_data->av_idx]);
+ buffer, pad, probe_data->slot, counts[probe_data->av_idx]);
return GST_PAD_PROBE_DROP;
}
if (counts[probe_data->av_idx] > 0) {
counts[probe_data->av_idx] = 0;
LOG_DEBUG("play again, [%s] buffer[%p] of pad[%p], source[%p]",
- GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->source);
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->slot);
}
return GST_PAD_PROBE_OK;
RET_IF(probe_cb == NULL, "probe_cb is NULL");
probe_userdata = g_new0(probe_userdata_s, 1);
- probe_userdata->source = source;
+ probe_userdata->slot = source;
probe_userdata->av_idx = idx;
source->av[idx].src_pad = pad;
source->av[idx].src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
- RET_VAL_IF(probe_data->source == NULL, GST_PAD_PROBE_REMOVE, "probe_data->source is NULL");
+ RET_VAL_IF(probe_data->slot == NULL, GST_PAD_PROBE_REMOVE, "probe_data->slot is NULL");
switch (probe_data->av_idx) {
case AV_IDX_AUDIO:
- if (!probe_data->source->sound_stream_info.type)
+ if (!probe_data->slot->sound_stream_info.type)
return GST_PAD_PROBE_OK;
break;
case AV_IDX_VIDEO:
- if (!probe_data->source->display)
+ if (!probe_data->slot->display)
return GST_PAD_PROBE_OK;
break;
default:
return GST_PAD_PROBE_OK;
}
- appsrc = probe_data->source->av[probe_data->av_idx].render.appsrc;
+ appsrc = probe_data->slot->av[probe_data->av_idx].render.appsrc;
if (appsrc) {
buffer = gst_pad_probe_info_get_buffer(info);
LOG_VERBOSE("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
RET_IF(probe_cb == NULL, "probe_cb is NULL");
probe_userdata = g_new0(probe_userdata_s, 1);
- probe_userdata->source = source;
+ probe_userdata->slot = source;
probe_userdata->av_idx = idx;
source->av[idx].render.src_pad = pad;
source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,