LOG_DEBUG("%s is prepended", GST_ELEMENT_NAME(x_element)); \
} while (0)
+#define GET_MEDIA_TYPE_NAME(x_is_audio) (x_is_audio) ? "audio" : "video"
+#define GET_AV_IDX(x_is_audio) (x_is_audio) ? AV_IDX_AUDIO : AV_IDX_VIDEO
+#define GET_AV_IDX_BY_TYPE(x_media_type) GET_AV_IDX(x_media_type == MEDIA_TYPE_AUDIO)
+
typedef struct {
media_packet_h packet;
GstBuffer *buffer;
[WEBRTC_TRANSCEIVER_DIRECTION_SENDRECV] = { "SENDRECV", GST_WEBRTC_RTP_TRANSCEIVER_DIRECTION_SENDRECV }
};
+typedef struct {
+ const char *appsrc_name;
+ const char *queue_name;
+ const char *payload_name;
+ const char *capsfilter_name;
+ const char *fakesink_name;
+} av_mapping_table_s;
+
+static av_mapping_table_s _av_tbl[AV_IDX_MAX] = {
+ {
+ DEFAULT_NAME_AUDIO_APPSRC,
+ DEFAULT_NAME_AUDIO_QUEUE,
+ DEFAULT_NAME_AUDIO_PAYLOAD,
+ DEFAULT_NAME_AUDIO_CAPSFILTER,
+ DEFAULT_NAME_AUDIO_FAKESINK,
+ },
+ {
+ DEFAULT_NAME_VIDEO_APPSRC,
+ DEFAULT_NAME_VIDEO_QUEUE,
+ DEFAULT_NAME_VIDEO_PAYLOAD,
+ DEFAULT_NAME_VIDEO_CAPSFILTER,
+ DEFAULT_NAME_VIDEO_FAKESINK,
+ }
+};
+
static int __link_source_with_webrtcbin(webrtc_gst_slot_s *source, GstElement *webrtcbin);
static const char * __get_audio_media_type(const char *codec_name)
static GstCaps *__make_rtp_caps(const gchar *media_type, unsigned int payload_id)
{
GstCaps *caps;
- bool is_video;
+ bool is_audio;
RET_VAL_IF(media_type == NULL, NULL, "media_type is NULL");
- is_video = (g_strrstr(media_type, "video") || g_strrstr(media_type, "image")) ? TRUE : FALSE;
+ is_audio = !(g_strrstr(media_type, "video") || g_strrstr(media_type, "image"));
caps = gst_caps_new_simple("application/x-rtp",
- "media", G_TYPE_STRING, is_video ? "video" : "audio",
+ "media", G_TYPE_STRING, GET_MEDIA_TYPE_NAME(is_audio),
"payload", G_TYPE_INT, payload_id, NULL);
PRINT_CAPS(caps, "RTP");
LOG_ERROR("not ready for this media_types[0x%x]", source->media_types);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- idx = (source->media_types == MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
+ idx = GET_AV_IDX_BY_TYPE(source->media_types);
if (need_capsfilter) {
if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_FIRST_CAPSFILTER)))
g_free(media_type);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- source->av[source->media_types == MEDIA_TYPE_VIDEO ? AV_IDX_VIDEO : AV_IDX_AUDIO].payload_id = payload_id;
+ source->av[GET_AV_IDX_BY_TYPE(source->media_types)].payload_id = payload_id;
if ((sink_caps = __make_rtp_caps(media_type, payload_id))) {
g_object_set(G_OBJECT(*capsfilter), "caps", sink_caps, NULL);
if (probe_data->source->av[probe_data->av_idx].pause) {
if (counts[probe_data->av_idx]++ % 10 == 0)
LOG_DEBUG("paused, drop [%s] buffer[%p] of pad[%p], source[%p], count[%u]",
- probe_data->av_idx == AV_IDX_AUDIO ? "AUDIO" : "VIDEO",
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO),
buffer, pad, probe_data->source, counts[probe_data->av_idx]);
return GST_PAD_PROBE_DROP;
}
if (counts[probe_data->av_idx] > 0) {
counts[probe_data->av_idx] = 0;
LOG_DEBUG("play again, [%s] buffer[%p] of pad[%p], source[%p]",
- probe_data->av_idx == AV_IDX_AUDIO ? "AUDIO" : "VIDEO", buffer, pad, probe_data->source);
+ GET_MEDIA_TYPE_NAME(probe_data->av_idx == AV_IDX_AUDIO), buffer, pad, probe_data->source);
}
return GST_PAD_PROBE_OK;
static int __build_filesrc_bin(webrtc_gst_slot_s *source, media_type_e media_type)
{
int ret = WEBRTC_ERROR_NONE;
- gboolean is_audio;
GstPad *src_pad = NULL;
GstElement *appsrc = NULL;
GstElement *queue = NULL;
GstElement *capsfilter = NULL;
+ int idx = GET_AV_IDX_BY_TYPE(media_type);
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- is_audio = (media_type == MEDIA_TYPE_AUDIO) ? TRUE : FALSE;
-
source->media_types |= media_type;
ret = _add_no_target_ghostpad_to_slot(source, true, &src_pad);
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
- if (!(appsrc = _create_element("appsrc", is_audio ? DEFAULT_NAME_AUDIO_APPSRC : DEFAULT_NAME_VIDEO_APPSRC)))
+ if (!(appsrc = _create_element("appsrc", _av_tbl[idx].appsrc_name)))
return WEBRTC_ERROR_INVALID_OPERATION;
g_object_set(G_OBJECT(appsrc),
"format", GST_FORMAT_TIME,
NULL);
- if (!(queue = _create_element("queue", is_audio ? DEFAULT_NAME_AUDIO_QUEUE : DEFAULT_NAME_VIDEO_QUEUE))) {
+ if (!(queue = _create_element("queue", _av_tbl[idx].queue_name))) {
SAFE_GST_OBJECT_UNREF(appsrc);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (!(capsfilter = _create_element("capsfilter", is_audio ? DEFAULT_NAME_AUDIO_CAPSFILTER : DEFAULT_NAME_VIDEO_CAPSFILTER))) {
+ if (!(capsfilter = _create_element("capsfilter", _av_tbl[idx].capsfilter_name))) {
SAFE_GST_OBJECT_UNREF(appsrc);
SAFE_GST_OBJECT_UNREF(queue);
return WEBRTC_ERROR_INVALID_OPERATION;
if (ret != WEBRTC_ERROR_NONE)
goto error;
- __add_probe_to_pad_for_pause(source, is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, idx, src_pad, __payloaded_data_probe_cb);
return WEBRTC_ERROR_NONE;
static void __remove_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
{
GstBin *bin = NULL;
-
GstElement *payload = NULL;
GstElement *capsfilter = NULL;
GstElement *fakesink = NULL;
+ int idx = GET_AV_IDX(is_audio);
RET_IF(source == NULL, "pad is NULL");
bin = GST_BIN(source->filesrc_pipeline);
- payload = gst_bin_get_by_name(bin, is_audio ? DEFAULT_NAME_AUDIO_PAYLOAD: DEFAULT_NAME_VIDEO_PAYLOAD);
+ payload = gst_bin_get_by_name(bin, _av_tbl[idx].payload_name);
RET_IF(payload == NULL, "payload is NULL");
- capsfilter = gst_bin_get_by_name(bin, is_audio ? DEFAULT_NAME_AUDIO_CAPSFILTER : DEFAULT_NAME_VIDEO_CAPSFILTER);
+ capsfilter = gst_bin_get_by_name(bin, _av_tbl[idx].capsfilter_name);
RET_IF(capsfilter == NULL, "capsfilter is NULL");
- fakesink = gst_bin_get_by_name(bin, is_audio ? DEFAULT_NAME_AUDIO_FAKESINK: DEFAULT_NAME_VIDEO_FAKESINK);
+ fakesink = gst_bin_get_by_name(bin, _av_tbl[idx].fakesink_name);
RET_IF(fakesink == NULL, "fakesink is NULL");
gst_bin_remove_many(bin, payload, capsfilter, fakesink, NULL);
RET_VAL_IF(payload == NULL, NULL, "payload is NULL");
- gst_element_set_name(payload, is_audio ? DEFAULT_NAME_AUDIO_PAYLOAD : DEFAULT_NAME_VIDEO_PAYLOAD);
+ gst_element_set_name(payload, _av_tbl[GET_AV_IDX(is_audio)].payload_name);
return payload;
}
RET_VAL_IF(source == NULL, NULL, "source is NULL");
- if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, is_audio ? DEFAULT_NAME_AUDIO_CAPSFILTER : DEFAULT_NAME_VIDEO_CAPSFILTER)))
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, _av_tbl[GET_AV_IDX(is_audio)].capsfilter_name)))
return NULL;
payload_id = __get_available_payload_id(source->webrtc);
return NULL;
}
- source->av[is_audio ? AV_IDX_AUDIO : AV_IDX_VIDEO].payload_id = payload_id;
+ source->av[GET_AV_IDX(is_audio)].payload_id = payload_id;
- if ((sink_caps = __make_rtp_caps(is_audio ? "audio" : "video", payload_id))) {
+ if ((sink_caps = __make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), payload_id))) {
g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
gst_caps_unref(sink_caps);
}
RET_VAL_IF(source == NULL, NULL, "source is NULL");
- if (!(fakesink = _create_element("fakesink", is_audio ? DEFAULT_NAME_AUDIO_FAKESINK : DEFAULT_NAME_VIDEO_FAKESINK)))
+ if (!(fakesink = _create_element("fakesink", _av_tbl[GET_AV_IDX(is_audio)].fakesink_name)))
return NULL;
sink_pad = gst_element_get_static_pad(fakesink, "sink");
"signal-handoffs", TRUE,
NULL);
- g_signal_connect(fakesink, "handoff", is_audio ? G_CALLBACK(__filesrc_pipeline_audio_stream_handoff_cb) : G_CALLBACK(__filesrc_pipeline_video_stream_handoff_cb), (gpointer)source);
+ g_signal_connect(fakesink,
+ "handoff",
+ G_CALLBACK(is_audio ? __filesrc_pipeline_audio_stream_handoff_cb : __filesrc_pipeline_video_stream_handoff_cb),
+ (gpointer)source);
return fakesink;
}
RET_VAL_IF(pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "pad is NULL");
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- payload = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), is_audio ? DEFAULT_NAME_AUDIO_PAYLOAD : DEFAULT_NAME_VIDEO_PAYLOAD);
+ payload = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _av_tbl[GET_AV_IDX(is_audio)].payload_name);
if (!payload) {
- LOG_ERROR("failed to get element [%s]", is_audio ? DEFAULT_NAME_AUDIO_PAYLOAD : DEFAULT_NAME_VIDEO_PAYLOAD);
+ LOG_ERROR("failed to get element [%s]", _av_tbl[GET_AV_IDX(is_audio)].payload_name);
return WEBRTC_ERROR_INVALID_OPERATION;
}
}
is_audio = (g_strrstr(media_type, "audio")) ? TRUE : FALSE;
- av_idx = (is_audio) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
+ av_idx = GET_AV_IDX(is_audio);
if (source->av[av_idx].src_pad_probe_id > 0) {
LOG_INFO("Pipeline already built");
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
- src_pad = (source->media_types == MEDIA_TYPE_AUDIO) ? &source->av[AV_IDX_AUDIO].src_pad : &source->av[AV_IDX_VIDEO].src_pad;
+ src_pad = &source->av[GET_AV_IDX_BY_TYPE(source->media_types)].src_pad;
ret = _add_no_target_ghostpad_to_slot(source, true, src_pad);
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
RET_VAL_IF(source->media_format == NULL, WEBRTC_ERROR_INVALID_OPERATION, "media_format is NULL");
- src_pad = (source->media_types == MEDIA_TYPE_AUDIO) ? &source->av[AV_IDX_AUDIO].src_pad : &source->av[AV_IDX_VIDEO].src_pad;
+ src_pad = &source->av[GET_AV_IDX_BY_TYPE(source->media_types)].src_pad;
ret = _add_no_target_ghostpad_to_slot(source, true, src_pad);
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
}
if (ret == WEBRTC_ERROR_NONE)
- source->video_muted = mute ? true : false;
+ source->video_muted = mute;
return ret;
}
static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source, media_type_e type, unsigned int *track_id)
{
- int idx = (type == MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
GstElement *appsrc;
gchar *pipeline_name;
+ int idx = GET_AV_IDX_BY_TYPE(type);
RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(source->display->surface == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->surface is NULL");
}
- pipeline_name = g_strdup_printf("webrtc-source_%u-%s-render-pipeline", source->id, type == MEDIA_TYPE_AUDIO ? "audio" : "video");
+ pipeline_name = g_strdup_printf("webrtc-source_%u-%s-render-pipeline", source->id, GET_MEDIA_TYPE_NAME(type == MEDIA_TYPE_AUDIO));
source->av[idx].render.pipeline = gst_pipeline_new(pipeline_name);
g_free(pipeline_name);
RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");