int _set_media_path(webrtc_s *webrtc, unsigned int source_id, const char *path);
int _set_screen_source_crop(webrtc_s *webrtc, unsigned int source_id, int x, int y, int w, int h, bool portrait_mode, int *width, int *height);
int _unset_screen_source_crop(webrtc_s *webrtc, unsigned int source_id);
+
+/* file source */
int _gst_filesrc_pipeline_set_state(webrtc_s *webrtc, GstState state);
int _set_filesrc_looping(webrtc_s *webrtc, unsigned int source_id, bool looping);
int _get_filesrc_looping(webrtc_s *webrtc, unsigned int source_id, bool *looping);
int _remove_filesrc_pad_block_probe(webrtc_s *webrtc);
void _set_filesrc_media_types(webrtc_gst_slot_s *source, const char *path);
+int _build_filesrc_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source);
+void _destroy_filesrc_pipeline(webrtc_gst_slot_s *source);
+void _remove_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio);
+
+/* media packet src */
int _build_mediapacketsrc(webrtc_s *webrtc, webrtc_gst_slot_s *source);
int _complete_rest_of_mediapacketsrc(webrtc_gst_slot_s *source, GstPad **src_pad, GstElement *appsrc, GList *element_list);
int _complete_mediapacketsrc_from_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source);
GstCaps *_make_mediapacketsrc_raw_caps_from_media_format(webrtc_gst_slot_s *source);
int _push_media_packet(webrtc_s *webrtc, unsigned int source_id, media_packet_h packet);
+
bool _check_if_path_is_set_to_file_sources(webrtc_s *webrtc);
int _set_rtp_packet_drop_probability(webrtc_s *webrtc, unsigned int source_id, float probability);
int _get_rtp_packet_drop_probability(webrtc_s *webrtc, unsigned int source_id, float *probability);
Name: capi-media-webrtc
Summary: A WebRTC library in Tizen Native API
-Version: 0.3.169
+Version: 0.3.170
Release: 0
Group: Multimedia/API
License: Apache-2.0
return WEBRTC_ERROR_NONE;
}
-
-static int __build_filesrc_bin(webrtc_gst_slot_s *source, media_type_e media_type)
-{
- int ret = WEBRTC_ERROR_NONE;
- GstPad *src_pad = NULL;
- GstElement *appsrc = NULL;
- GstElement *queue = NULL;
- GstElement *capsfilter = NULL;
- GList *element_list = NULL;
- const int av_idx = GET_AV_IDX_BY_TYPE(media_type);
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
-
- source->media_types |= media_type;
-
- ret = _add_no_target_ghostpad_to_slot(source, true, &src_pad);
- RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
-
- if (!(appsrc = _create_element(DEFAULT_ELEMENT_APPSRC, _get_element_name(av_idx, ELEMENT_APPSRC))))
- return WEBRTC_ERROR_INVALID_OPERATION;
- APPEND_ELEMENT(element_list, appsrc);
-
- g_object_set(G_OBJECT(appsrc),
- "is-live", TRUE,
- "format", GST_FORMAT_TIME,
- NULL);
-
- if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, _get_element_name(av_idx, ELEMENT_QUEUE))))
- goto exit;
- APPEND_ELEMENT(element_list, queue);
-
- if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, _get_element_name(av_idx, ELEMENT_CAPSFILTER))))
- goto exit;
- APPEND_ELEMENT(element_list, capsfilter);
-
- if (!_add_elements_to_bin(source->bin, element_list)) {
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- if (!_link_elements(element_list))
- goto exit_with_remove_from_bin;
-
- if (!_sync_elements_state_with_parent(element_list))
- goto exit_with_remove_from_bin;
-
- if (_set_ghost_pad_target(src_pad, capsfilter, true) != WEBRTC_ERROR_NONE)
- goto exit_with_remove_from_bin;
-
- _add_probe_to_pad_for_pause(source, av_idx, src_pad, _payloaded_data_probe_cb);
-
- SAFE_G_LIST_FREE(element_list);
-
- return WEBRTC_ERROR_NONE;
-
-exit_with_remove_from_bin:
- _remove_elements_from_bin(source->bin, element_list);
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
-exit:
- SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-
-static void __remove_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
-{
- GstBin *bin;
- GstElement *queue;
- GstElement *payloader;
- GstElement *capsfilter;
- GstElement *fakesink;
- GList *element_list = NULL;
- int av_idx = GET_AV_IDX(is_audio);
-
- RET_IF(source == NULL, "pad is NULL");
- RET_IF(source->webrtc == NULL, "webrtc is NULL");
- RET_IF(source->filesrc_pipeline == NULL, "filesrc_pipeline is NULL");
-
- bin = GST_BIN(source->filesrc_pipeline);
-
- if ((queue = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_QUEUE))))
- APPEND_ELEMENT(element_list, queue);
- else
- LOG_ERROR("queue is NULL");
-
- if ((payloader = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_PAYLOADER))))
- APPEND_ELEMENT(element_list, payloader);
- else
- LOG_ERROR("payloader is NULL");
-
- if ((capsfilter = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_CAPSFILTER))))
- APPEND_ELEMENT(element_list, capsfilter);
- else
- LOG_ERROR("capsfilter is NULL");
-
- if ((fakesink = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_FAKESINK))))
- APPEND_ELEMENT(element_list, fakesink);
- else
- LOG_ERROR("fakesink is NULL");
-
- _remove_elements_from_bin(bin, element_list);
-
- SAFE_G_LIST_FREE(element_list);
-}
-
-static void __filesrc_pipeline_audio_stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
-{
- webrtc_gst_slot_s *source = data;
- GstFlowReturn gst_ret = GST_FLOW_OK;
-
- g_signal_emit_by_name(gst_bin_get_by_name(source->bin, _get_element_name(AV_IDX_AUDIO, ELEMENT_APPSRC)), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK)
- LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
-}
-
-static void __filesrc_pipeline_video_stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
-{
- webrtc_gst_slot_s *source = data;
- GstFlowReturn gst_ret = GST_FLOW_OK;
-
- g_signal_emit_by_name(gst_bin_get_by_name(source->bin, _get_element_name(AV_IDX_VIDEO, ELEMENT_APPSRC)), "push-buffer", buffer, &gst_ret, NULL);
- if (gst_ret != GST_FLOW_OK)
- LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
-}
-
-static GstPadProbeReturn __fakesink_block_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
-{
- webrtc_gst_slot_s *source = u_data;
- gchar *media_type = NULL;
-
- media_type = _get_media_type_from_pad(pad);
- RET_VAL_IF(media_type == NULL, GST_PAD_PROBE_OK, "media_type is NULL");
-
- LOG_DEBUG("source[%p, id:%u] fakesink pad[%p] for [%s] is blocked", source, source->id, pad, media_type);
- g_free(media_type);
-
- return GST_PAD_PROBE_OK;
-}
-
-static GstPadProbeReturn __fakesink_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
-{
- webrtc_gst_slot_s *source = u_data;
- GstCaps *caps = NULL;
- GstElement *appsrc = NULL;
- gchar *media_type = NULL;
- int av_idx;
-
- media_type = _get_media_type_from_pad(pad);
- RET_VAL_IF(media_type == NULL, GST_PAD_PROBE_OK, "media_type is NULL");
-
- if (!_is_supported_media_type(media_type)) {
- g_free(media_type);
- return GST_PAD_PROBE_OK;
- }
-
- av_idx = GET_AV_IDX(_is_audio_media_type(media_type));
- g_free(media_type);
-
- appsrc = gst_bin_get_by_name(source->bin, _get_element_name(av_idx, ELEMENT_APPSRC));
- RET_VAL_IF(appsrc == NULL, GST_PAD_PROBE_OK, "There is no appsrc for [%s]", (av_idx == AV_IDX_AUDIO) ? "audio" : "video");
-
- caps = gst_pad_get_current_caps(pad);
- g_object_set(G_OBJECT(appsrc), "caps", caps, NULL);
-
- LOG_INFO("setting caps for [%s appsrc] successfully", (av_idx == AV_IDX_AUDIO) ? "audio" : "video");
- PRINT_CAPS(caps, "appsrc");
-
- source->filesrc_av[av_idx].sink_pad = pad;
- source->filesrc_av[av_idx].sink_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BLOCK,
- __fakesink_block_probe_cb, source, NULL);
- source->av[av_idx].codec = gst_structure_get_string(gst_caps_get_structure(caps, 0), "encoding-name");
-
- if (source->av[av_idx].direction == WEBRTC_TRANSCEIVER_DIRECTION_RECVONLY) {
- rtp_payload_info_s payload_info = {
- .media_type = _get_media_type_from_pad(pad),
- .encoding_name = source->av[av_idx].codec,
- };
- gst_structure_get_int(gst_caps_get_structure(caps, 0), "clock-rate", &payload_info.clock_rate);
- _add_transceiver(source, (av_idx == AV_IDX_AUDIO) ? WEBRTC_MEDIA_TYPE_AUDIO : WEBRTC_MEDIA_TYPE_VIDEO, &payload_info);
- g_free((gchar *)payload_info.media_type);
- }
- gst_caps_unref(caps);
-
- return GST_PAD_PROBE_REMOVE;
-}
-
-static GstElement * __create_payloader_for_filesrc_pipeline(GstPad *pad, bool is_audio)
-{
- element_info_s elem_info;
- GstElement *payloader = NULL;
-
- RET_VAL_IF(pad == NULL, NULL, "pad is NULL");
-
- CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
- gst_pad_get_current_caps(pad),
- NULL,
- NULL,
- payloader);
- RET_VAL_IF(payloader == NULL, NULL, "payloader is NULL");
-
- gst_element_set_name(payloader, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_PAYLOADER));
-
- return payloader;
-}
-
-static GstElement * __prepare_capsfilter_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
-{
- GstElement *capsfilter = NULL;
- GstCaps *sink_caps = NULL;
-
- RET_VAL_IF(source == NULL, NULL, "source is NULL");
-
- if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_CAPSFILTER))))
- return NULL;
-
- if(_set_payload_type(source->webrtc, source, GET_AV_IDX(is_audio), NULL) != WEBRTC_ERROR_NONE) {
- SAFE_GST_OBJECT_UNREF(capsfilter);
- return NULL;
- }
-
- if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), source->av[GET_AV_IDX(is_audio)].pt, source))) {
- g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
- gst_caps_unref(sink_caps);
- }
-
- return capsfilter;
-}
-
-static GstElement * __prepare_fakesink_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
-{
- GstElement *fakesink = NULL;
- GstPad *sink_pad = NULL;
-
- RET_VAL_IF(source == NULL, NULL, "source is NULL");
-
- if (!(fakesink = _create_element(DEFAULT_ELEMENT_FAKESINK, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_FAKESINK))))
- return NULL;
-
- sink_pad = gst_element_get_static_pad(fakesink, "sink");
- gst_pad_add_probe(sink_pad, GST_PAD_PROBE_TYPE_BUFFER, __fakesink_probe_cb, source, NULL);
- gst_object_unref(sink_pad);
-
- g_object_set(G_OBJECT(fakesink),
- "sync", TRUE,
- "signal-handoffs", TRUE,
- NULL);
-
- g_signal_connect(fakesink,
- "handoff",
- G_CALLBACK(is_audio ? __filesrc_pipeline_audio_stream_handoff_cb : __filesrc_pipeline_video_stream_handoff_cb),
- (gpointer)source);
-
- return fakesink;
-}
-
-static int __create_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, GstPad *pad, bool is_audio)
-{
- GstBin *bin;
- GstElement *queue;
- GstElement *payloader;
- GstElement *capsfilter;
- GstElement *fakesink;
- GList *element_list = NULL;
-
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->filesrc_pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "filesrc_pipeline is NULL");
-
- bin = GST_BIN(source->filesrc_pipeline);
-
- if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE))))
- return WEBRTC_ERROR_INVALID_OPERATION;
- APPEND_ELEMENT(element_list, queue);
-
- if (!(payloader = __create_payloader_for_filesrc_pipeline(pad, is_audio)))
- goto exit;
- APPEND_ELEMENT(element_list, payloader);
-
- if (!(capsfilter = __prepare_capsfilter_for_filesrc_pipeline(source, is_audio)))
- goto exit;
- APPEND_ELEMENT(element_list, capsfilter);
-
- if (!(fakesink = __prepare_fakesink_for_filesrc_pipeline(source, is_audio)))
- goto exit;
- APPEND_ELEMENT(element_list, fakesink);
-
- if (!_add_elements_to_bin(bin, element_list)) {
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- if (!_link_elements(element_list))
- goto exit_with_remove_from_bin;
-
- if (!_sync_elements_state_with_parent(element_list))
- goto exit_with_remove_from_bin;
-
- SAFE_G_LIST_FREE(element_list);
-
- return WEBRTC_ERROR_NONE;
-
-exit_with_remove_from_bin:
- _remove_elements_from_bin(bin, element_list);
- SAFE_G_LIST_FREE(element_list);
- return WEBRTC_ERROR_INVALID_OPERATION;
-exit:
- SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
- return WEBRTC_ERROR_INVALID_OPERATION;
-}
-
-static int __link_decodebin_with_queue(GstPad *pad, webrtc_gst_slot_s *source, bool is_audio)
-{
- GstElement *queue = NULL;
- GstPad *sink_pad = NULL;
-
- RET_VAL_IF(pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "pad is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
-
- queue = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE));
- if (!queue) {
- LOG_ERROR("failed to get element [%s]", _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE));
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- sink_pad = gst_element_get_static_pad(queue, "sink");
- if (!sink_pad) {
- LOG_ERROR("sink_pad is NULL for [%s]", GST_ELEMENT_NAME(queue));
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- if (gst_pad_link(pad, sink_pad) != GST_PAD_LINK_OK) {
- LOG_ERROR("failed to gst_pad_link()");
- g_object_unref(sink_pad);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
-
- g_object_unref(sink_pad);
-
- LOG_INFO("decodebin is linked to [%s]", GST_ELEMENT_NAME(queue));
-
- return WEBRTC_ERROR_NONE;
-}
-
-static void __filesrc_pipeline_decodebin_pad_added_cb(GstElement *element, GstPad *pad, gpointer data)
-{
- int ret = WEBRTC_ERROR_NONE;
- webrtc_gst_slot_s *source = data;
- gchar *media_type = NULL;
- bool is_audio;
- int av_idx;
- GstElement *queue = NULL;
-
- RET_IF(source == NULL, "source is NULL");
- RET_IF(source->filesrc_pipeline == NULL, "filesrc_pipeline is NULL");
-
- media_type = _get_mime_type_from_pad(pad);
- RET_IF(media_type == NULL, "media_type is NULL");
-
- if (!_is_supported_media_type(media_type)) {
- g_free(media_type);
- return;
- }
-
- LOG_INFO("[%s] new_pad[%s] media_type[%s]", GST_ELEMENT_NAME(element), GST_PAD_NAME(pad), media_type);
-
- is_audio = _is_audio_media_type(media_type);
- av_idx = GET_AV_IDX(is_audio);
-
- g_free(media_type);
-
- if (source->av[av_idx].src_pad_probe_id > 0) {
- LOG_INFO("Pipeline already built");
- ret = __link_decodebin_with_queue(pad, source, is_audio);
- if (ret != WEBRTC_ERROR_NONE)
- LOG_ERROR("failed to __link_decodebin_with_queue()");
- return;
- }
-
- ret = __create_rest_of_elements_for_filesrc_pipeline(source, pad, is_audio);
- if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to __create_rest_of_elements_for_filesrc_pipeline()");
- return;
- }
-
- queue = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE));
- RET_IF(queue == NULL, "queue is NULL");
-
- ret = __link_decodebin_with_queue(pad, source, is_audio);
- if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to __link_decodebin_with_queue()");
- __remove_rest_of_elements_for_filesrc_pipeline(source, is_audio);
- return;
- }
-
- ret = __build_filesrc_bin(source, is_audio ? MEDIA_TYPE_AUDIO : MEDIA_TYPE_VIDEO);
- if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to __build_filesrc_bin()");
- __remove_rest_of_elements_for_filesrc_pipeline(source, is_audio);
- return;
- }
-
- ret = _link_source_with_webrtcbin(source, source->webrtc->gst.webrtcbin);
- if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to _link_source_with_webrtcbin()");
- __remove_rest_of_elements_for_filesrc_pipeline(source, is_audio);
- g_hash_table_remove(source->webrtc->gst.source_slots, GST_ELEMENT_NAME(source->bin));
- return;
- }
-
- source->av[av_idx].render.need_decoding = true;
- source->av[av_idx].render.appsrc_caps = gst_pad_get_current_caps(pad);
- _add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), _source_data_probe_cb);
-
- GENERATE_DOT(source->webrtc, source->filesrc_pipeline, "%s.%s-%s",
- GST_ELEMENT_NAME(source->filesrc_pipeline), GST_ELEMENT_NAME(element), GST_PAD_NAME(pad));
-}
-
-static GstAutoplugSelectResult __filesrc_pipeline_decodebin_autoplug_select_cb(GstElement *bin, GstPad *pad, GstCaps *caps, GstElementFactory* factory, gpointer udata)
-{
- const gchar *klass = gst_element_factory_get_metadata(factory, GST_ELEMENT_METADATA_KLASS);
-
- if (g_strrstr(klass, "Codec/Decoder")) {
- LOG_INFO("expose [%s]", klass);
- return GST_AUTOPLUG_SELECT_EXPOSE;
- }
-
- return GST_AUTOPLUG_SELECT_TRY;
-}
-
-static gboolean __filesrc_pipeline_bus_watch_cb(GstBus *bus, GstMessage *message, gpointer user_data)
-{
- webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
- GError *err = NULL;
- GstState gst_state_old = GST_STATE_VOID_PENDING;
- GstState gst_state_new = GST_STATE_VOID_PENDING;
- GstState gst_state_pending = GST_STATE_VOID_PENDING;
-
- RET_VAL_IF(source == NULL, FALSE, "source is NULL");
- RET_VAL_IF(source->filesrc_pipeline == NULL, FALSE, "pipeline is NULL");
-
- if (message == NULL) {
- LOG_DEBUG("message is null");
- return TRUE;
- }
-
- switch (GST_MESSAGE_TYPE(message)) {
- case GST_MESSAGE_ERROR:
- gst_message_parse_error(message, &err, NULL);
-
- LOG_ERROR("Error[from %s]: message[%s], code[%d]",
- GST_OBJECT_NAME(GST_OBJECT_CAST(GST_ELEMENT(GST_MESSAGE_SRC(message)))), err->message, err->code);
-
- g_error_free(err);
- break;
-
- case GST_MESSAGE_STATE_CHANGED:
- if (GST_MESSAGE_SRC(message) != GST_OBJECT(source->filesrc_pipeline))
- return TRUE;
-
- gst_message_parse_state_changed(message, &gst_state_old, &gst_state_new, &gst_state_pending);
-
- LOG_INFO("GST_MESSAGE_STATE_CHANGED: Old[GST_STATE_%s] New[GST_STATE_%s] Pending[GST_STATE_%s]",
- gst_element_state_get_name(gst_state_old), gst_element_state_get_name(gst_state_new),
- gst_element_state_get_name(gst_state_pending));
- break;
-
- case GST_MESSAGE_ASYNC_DONE:
- if (GST_MESSAGE_SRC(message) != GST_OBJECT(source->filesrc_pipeline))
- return TRUE;
-
- LOG_INFO("GST_MESSAGE_ASYNC_DONE");
- break;
-
- case GST_MESSAGE_EOS:
- LOG_INFO("GST_MESSAGE_EOS end-of-stream");
-
- if (source->filesrc_loop) {
- gst_element_seek(source->filesrc_pipeline,
- 1.0,
- GST_FORMAT_TIME,
- GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
- GST_SEEK_TYPE_SET, 0,
- GST_SEEK_TYPE_NONE, 0);
- }
- break;
-
- default:
- break;
- }
-
- return TRUE;
-}
//LCOV_EXCL_STOP
-static void __destroy_filesrc_pipeline(webrtc_gst_slot_s *source)
-{
- RET_IF(source == NULL, "source is NULL");
-
- if (source->filesrc_bus_watcher > 0) {
- gst_bus_remove_watch(source->filesrc_bus);
- source->filesrc_bus_watcher = 0;
- }
-
- if (source->filesrc_bus) {
- gst_object_unref(source->filesrc_bus);
- source->filesrc_bus = NULL;
- }
-
- if (source->filesrc_pipeline) {
- gst_object_unref(source->filesrc_pipeline);
- source->filesrc_pipeline = NULL;
- }
-}
-
-static int __build_filesrc_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source)
-{
- GstElement *filesrc = NULL;
- GstElement *decodebin = NULL;
- gchar *pipeline_name;
-
- RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
- RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
-
- pipeline_name = g_strdup_printf("filesrc-pipeline-for-source_%u", source->id);
- source->filesrc_pipeline = gst_pipeline_new(pipeline_name);
- g_free(pipeline_name);
- RET_VAL_IF(source->filesrc_pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "pipeline is NULL");
-
- if (!(source->filesrc_bus = gst_pipeline_get_bus(GST_PIPELINE(source->filesrc_pipeline)))) {
- LOG_ERROR("failed to gst_pipeline_get_bus()");
- goto error;
- }
-
- if ((source->filesrc_bus_watcher = gst_bus_add_watch(source->filesrc_bus, (GstBusFunc)__filesrc_pipeline_bus_watch_cb, source)) == 0) {
- LOG_ERROR("failed to gst_bus_add_watch()");
- goto error;
- }
-
- if (!(filesrc = _create_element(DEFAULT_ELEMENT_FILESRC, ELEMENT_NAME_FILE_SRC)))
- goto error;
-
- if (!(decodebin = _create_element("decodebin", NULL))) {
- SAFE_GST_OBJECT_UNREF(filesrc);
- goto error;
- }
-
- gst_bin_add_many(GST_BIN(source->filesrc_pipeline), filesrc, decodebin, NULL);
-
- if (!gst_element_link(filesrc, decodebin)) {
- gst_bin_remove_many(GST_BIN(source->filesrc_pipeline), filesrc, decodebin, NULL);
- LOG_ERROR("failed to gst_element_link()");
- goto error;
- }
-
- g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(__filesrc_pipeline_decodebin_autoplug_select_cb), NULL);
- g_signal_connect(decodebin, "pad-added", G_CALLBACK(__filesrc_pipeline_decodebin_pad_added_cb), (gpointer)source);
-
- return WEBRTC_ERROR_NONE;
-
-error:
- __destroy_filesrc_pipeline(source);
- return WEBRTC_ERROR_INVALID_OPERATION;
-
-}
-
static int __build_source_bin(webrtc_s *webrtc, webrtc_gst_slot_s *source)
{
RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
return __build_screensrc(webrtc, source);
case WEBRTC_MEDIA_SOURCE_TYPE_FILE:
- return __build_filesrc_pipeline(webrtc, source);
+ return _build_filesrc_pipeline(webrtc, source);
case WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET:
return _build_mediapacketsrc(webrtc, source);
free(source->sound_stream_info.type);
if (source->type == WEBRTC_MEDIA_SOURCE_TYPE_FILE)
- __destroy_filesrc_pipeline(source);
+ _destroy_filesrc_pipeline(source);
g_free(source);
}
SAFE_GST_OBJECT_UNREF(source->av[av_idx].render.pipeline);
}
- __remove_rest_of_elements_for_filesrc_pipeline(source, (av_idx == AV_IDX_AUDIO));
+ _remove_rest_of_elements_for_filesrc_pipeline(source, (av_idx == AV_IDX_AUDIO));
if ((appsrc = gst_bin_get_by_name(source->bin, _get_element_name(av_idx, ELEMENT_APPSRC))))
APPEND_ELEMENT(element_list, appsrc);
if (video_tracks_num)
source->media_types |= MEDIA_TYPE_VIDEO;
}
+
+static GstElement * __create_payloader_for_filesrc_pipeline(GstPad *pad, bool is_audio)
+{
+ element_info_s elem_info;
+ GstElement *payloader = NULL;
+
+ RET_VAL_IF(pad == NULL, NULL, "pad is NULL");
+
+ CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
+ gst_pad_get_current_caps(pad),
+ NULL,
+ NULL,
+ payloader);
+ RET_VAL_IF(payloader == NULL, NULL, "payloader is NULL");
+
+ gst_element_set_name(payloader, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_PAYLOADER));
+
+ return payloader;
+}
+
+static GstElement * __prepare_capsfilter_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
+{
+ GstElement *capsfilter = NULL;
+ GstCaps *sink_caps = NULL;
+
+ RET_VAL_IF(source == NULL, NULL, "source is NULL");
+
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_CAPSFILTER))))
+ return NULL;
+
+ if(_set_payload_type(source->webrtc, source, GET_AV_IDX(is_audio), NULL) != WEBRTC_ERROR_NONE) {
+ SAFE_GST_OBJECT_UNREF(capsfilter);
+ return NULL;
+ }
+
+ if ((sink_caps = _make_rtp_caps(GET_MEDIA_TYPE_NAME(is_audio), source->av[GET_AV_IDX(is_audio)].pt, source))) {
+ g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
+ gst_caps_unref(sink_caps);
+ }
+
+ return capsfilter;
+}
+
+static void __filesrc_pipeline_audio_stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
+{
+ webrtc_gst_slot_s *source = data;
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+
+ g_signal_emit_by_name(gst_bin_get_by_name(source->bin, _get_element_name(AV_IDX_AUDIO, ELEMENT_APPSRC)), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
+}
+
+static void __filesrc_pipeline_video_stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
+{
+ webrtc_gst_slot_s *source = data;
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+
+ g_signal_emit_by_name(gst_bin_get_by_name(source->bin, _get_element_name(AV_IDX_VIDEO, ELEMENT_APPSRC)), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
+}
+
+static GstPadProbeReturn __fakesink_block_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+{
+ webrtc_gst_slot_s *source = u_data;
+ gchar *media_type = NULL;
+
+ media_type = _get_media_type_from_pad(pad);
+ RET_VAL_IF(media_type == NULL, GST_PAD_PROBE_OK, "media_type is NULL");
+
+ LOG_DEBUG("source[%p, id:%u] fakesink pad[%p] for [%s] is blocked", source, source->id, pad, media_type);
+ g_free(media_type);
+
+ return GST_PAD_PROBE_OK;
+}
+
+static GstPadProbeReturn __fakesink_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+{
+ webrtc_gst_slot_s *source = u_data;
+ GstCaps *caps = NULL;
+ GstElement *appsrc = NULL;
+ gchar *media_type = NULL;
+ int av_idx;
+
+ media_type = _get_media_type_from_pad(pad);
+ RET_VAL_IF(media_type == NULL, GST_PAD_PROBE_OK, "media_type is NULL");
+
+ if (!_is_supported_media_type(media_type)) {
+ g_free(media_type);
+ return GST_PAD_PROBE_OK;
+ }
+
+ av_idx = GET_AV_IDX(_is_audio_media_type(media_type));
+ g_free(media_type);
+
+ appsrc = gst_bin_get_by_name(source->bin, _get_element_name(av_idx, ELEMENT_APPSRC));
+ RET_VAL_IF(appsrc == NULL, GST_PAD_PROBE_OK, "There is no appsrc for [%s]", (av_idx == AV_IDX_AUDIO) ? "audio" : "video");
+
+ caps = gst_pad_get_current_caps(pad);
+ g_object_set(G_OBJECT(appsrc), "caps", caps, NULL);
+
+ LOG_INFO("setting caps for [%s appsrc] successfully", (av_idx == AV_IDX_AUDIO) ? "audio" : "video");
+ PRINT_CAPS(caps, "appsrc");
+
+ source->filesrc_av[av_idx].sink_pad = pad;
+ source->filesrc_av[av_idx].sink_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER | GST_PAD_PROBE_TYPE_BLOCK,
+ __fakesink_block_probe_cb, source, NULL);
+ source->av[av_idx].codec = gst_structure_get_string(gst_caps_get_structure(caps, 0), "encoding-name");
+
+ if (source->av[av_idx].direction == WEBRTC_TRANSCEIVER_DIRECTION_RECVONLY) {
+ rtp_payload_info_s payload_info = {
+ .media_type = _get_media_type_from_pad(pad),
+ .encoding_name = source->av[av_idx].codec,
+ };
+ gst_structure_get_int(gst_caps_get_structure(caps, 0), "clock-rate", &payload_info.clock_rate);
+ _add_transceiver(source, (av_idx == AV_IDX_AUDIO) ? WEBRTC_MEDIA_TYPE_AUDIO : WEBRTC_MEDIA_TYPE_VIDEO, &payload_info);
+ g_free((gchar *)payload_info.media_type);
+ }
+ gst_caps_unref(caps);
+
+ return GST_PAD_PROBE_REMOVE;
+}
+
+static GstElement * __prepare_fakesink_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
+{
+ GstElement *fakesink = NULL;
+ GstPad *sink_pad = NULL;
+
+ RET_VAL_IF(source == NULL, NULL, "source is NULL");
+
+ if (!(fakesink = _create_element(DEFAULT_ELEMENT_FAKESINK, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_FAKESINK))))
+ return NULL;
+
+ sink_pad = gst_element_get_static_pad(fakesink, "sink");
+ gst_pad_add_probe(sink_pad, GST_PAD_PROBE_TYPE_BUFFER, __fakesink_probe_cb, source, NULL);
+ gst_object_unref(sink_pad);
+
+ g_object_set(G_OBJECT(fakesink),
+ "sync", TRUE,
+ "signal-handoffs", TRUE,
+ NULL);
+
+ g_signal_connect(fakesink,
+ "handoff",
+ G_CALLBACK(is_audio ? __filesrc_pipeline_audio_stream_handoff_cb : __filesrc_pipeline_video_stream_handoff_cb),
+ (gpointer)source);
+
+ return fakesink;
+}
+
+static int __create_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, GstPad *pad, bool is_audio)
+{
+ GstBin *bin;
+ GstElement *queue;
+ GstElement *payloader;
+ GstElement *capsfilter;
+ GstElement *fakesink;
+ GList *element_list = NULL;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->filesrc_pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "filesrc_pipeline is NULL");
+
+ bin = GST_BIN(source->filesrc_pipeline);
+
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE))))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ APPEND_ELEMENT(element_list, queue);
+
+ if (!(payloader = __create_payloader_for_filesrc_pipeline(pad, is_audio)))
+ goto exit;
+ APPEND_ELEMENT(element_list, payloader);
+
+ if (!(capsfilter = __prepare_capsfilter_for_filesrc_pipeline(source, is_audio)))
+ goto exit;
+ APPEND_ELEMENT(element_list, capsfilter);
+
+ if (!(fakesink = __prepare_fakesink_for_filesrc_pipeline(source, is_audio)))
+ goto exit;
+ APPEND_ELEMENT(element_list, fakesink);
+
+ if (!_add_elements_to_bin(bin, element_list)) {
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!_link_elements(element_list))
+ goto exit_with_remove_from_bin;
+
+ if (!_sync_elements_state_with_parent(element_list))
+ goto exit_with_remove_from_bin;
+
+ SAFE_G_LIST_FREE(element_list);
+
+ return WEBRTC_ERROR_NONE;
+
+exit_with_remove_from_bin:
+ _remove_elements_from_bin(bin, element_list);
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+exit:
+ SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+void _remove_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
+{
+ GstBin *bin;
+ GstElement *queue;
+ GstElement *payloader;
+ GstElement *capsfilter;
+ GstElement *fakesink;
+ GList *element_list = NULL;
+ int av_idx = GET_AV_IDX(is_audio);
+
+ RET_IF(source == NULL, "pad is NULL");
+ RET_IF(source->webrtc == NULL, "webrtc is NULL");
+ RET_IF(source->filesrc_pipeline == NULL, "filesrc_pipeline is NULL");
+
+ bin = GST_BIN(source->filesrc_pipeline);
+
+ if ((queue = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_QUEUE))))
+ APPEND_ELEMENT(element_list, queue);
+ else
+ LOG_ERROR("queue is NULL");
+
+ if ((payloader = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_PAYLOADER))))
+ APPEND_ELEMENT(element_list, payloader);
+ else
+ LOG_ERROR("payloader is NULL");
+
+ if ((capsfilter = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_CAPSFILTER))))
+ APPEND_ELEMENT(element_list, capsfilter);
+ else
+ LOG_ERROR("capsfilter is NULL");
+
+ if ((fakesink = gst_bin_get_by_name(bin, _get_element_name(av_idx, ELEMENT_FAKESINK))))
+ APPEND_ELEMENT(element_list, fakesink);
+ else
+ LOG_ERROR("fakesink is NULL");
+
+ _remove_elements_from_bin(bin, element_list);
+
+ SAFE_G_LIST_FREE(element_list);
+}
+
+static gboolean __filesrc_pipeline_bus_watch_cb(GstBus *bus, GstMessage *message, gpointer user_data)
+{
+ webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
+ GError *err = NULL;
+ GstState gst_state_old = GST_STATE_VOID_PENDING;
+ GstState gst_state_new = GST_STATE_VOID_PENDING;
+ GstState gst_state_pending = GST_STATE_VOID_PENDING;
+
+ RET_VAL_IF(source == NULL, FALSE, "source is NULL");
+ RET_VAL_IF(source->filesrc_pipeline == NULL, FALSE, "pipeline is NULL");
+
+ if (message == NULL) {
+ LOG_DEBUG("message is null");
+ return TRUE;
+ }
+
+ switch (GST_MESSAGE_TYPE(message)) {
+ case GST_MESSAGE_ERROR:
+ gst_message_parse_error(message, &err, NULL);
+
+ LOG_ERROR("Error[from %s]: message[%s], code[%d]",
+ GST_OBJECT_NAME(GST_OBJECT_CAST(GST_ELEMENT(GST_MESSAGE_SRC(message)))), err->message, err->code);
+
+ g_error_free(err);
+ break;
+
+ case GST_MESSAGE_STATE_CHANGED:
+ if (GST_MESSAGE_SRC(message) != GST_OBJECT(source->filesrc_pipeline))
+ return TRUE;
+
+ gst_message_parse_state_changed(message, &gst_state_old, &gst_state_new, &gst_state_pending);
+
+ LOG_INFO("GST_MESSAGE_STATE_CHANGED: Old[GST_STATE_%s] New[GST_STATE_%s] Pending[GST_STATE_%s]",
+ gst_element_state_get_name(gst_state_old), gst_element_state_get_name(gst_state_new),
+ gst_element_state_get_name(gst_state_pending));
+ break;
+
+ case GST_MESSAGE_ASYNC_DONE:
+ if (GST_MESSAGE_SRC(message) != GST_OBJECT(source->filesrc_pipeline))
+ return TRUE;
+
+ LOG_INFO("GST_MESSAGE_ASYNC_DONE");
+ break;
+
+ case GST_MESSAGE_EOS:
+ LOG_INFO("GST_MESSAGE_EOS end-of-stream");
+
+ if (source->filesrc_loop) {
+ gst_element_seek(source->filesrc_pipeline,
+ 1.0,
+ GST_FORMAT_TIME,
+ GST_SEEK_FLAG_FLUSH | GST_SEEK_FLAG_ACCURATE,
+ GST_SEEK_TYPE_SET, 0,
+ GST_SEEK_TYPE_NONE, 0);
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ return TRUE;
+}
+
+static GstAutoplugSelectResult __filesrc_pipeline_decodebin_autoplug_select_cb(GstElement *bin, GstPad *pad, GstCaps *caps, GstElementFactory* factory, gpointer udata)
+{
+ const gchar *klass = gst_element_factory_get_metadata(factory, GST_ELEMENT_METADATA_KLASS);
+
+ if (g_strrstr(klass, "Codec/Decoder")) {
+ LOG_INFO("expose [%s]", klass);
+ return GST_AUTOPLUG_SELECT_EXPOSE;
+ }
+
+ return GST_AUTOPLUG_SELECT_TRY;
+}
+
+static int __build_filesrc_bin(webrtc_gst_slot_s *source, media_type_e media_type)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ GstPad *src_pad = NULL;
+ GstElement *appsrc = NULL;
+ GstElement *queue = NULL;
+ GstElement *capsfilter = NULL;
+ GList *element_list = NULL;
+ const int av_idx = GET_AV_IDX_BY_TYPE(media_type);
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+
+ source->media_types |= media_type;
+
+ ret = _add_no_target_ghostpad_to_slot(source, true, &src_pad);
+ RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
+
+ if (!(appsrc = _create_element(DEFAULT_ELEMENT_APPSRC, _get_element_name(av_idx, ELEMENT_APPSRC))))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ APPEND_ELEMENT(element_list, appsrc);
+
+ g_object_set(G_OBJECT(appsrc),
+ "is-live", TRUE,
+ "format", GST_FORMAT_TIME,
+ NULL);
+
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, _get_element_name(av_idx, ELEMENT_QUEUE))))
+ goto exit;
+ APPEND_ELEMENT(element_list, queue);
+
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, _get_element_name(av_idx, ELEMENT_CAPSFILTER))))
+ goto exit;
+ APPEND_ELEMENT(element_list, capsfilter);
+
+ if (!_add_elements_to_bin(source->bin, element_list)) {
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!_link_elements(element_list))
+ goto exit_with_remove_from_bin;
+
+ if (!_sync_elements_state_with_parent(element_list))
+ goto exit_with_remove_from_bin;
+
+ if (_set_ghost_pad_target(src_pad, capsfilter, true) != WEBRTC_ERROR_NONE)
+ goto exit_with_remove_from_bin;
+
+ _add_probe_to_pad_for_pause(source, av_idx, src_pad, _payloaded_data_probe_cb);
+
+ SAFE_G_LIST_FREE(element_list);
+
+ return WEBRTC_ERROR_NONE;
+
+exit_with_remove_from_bin:
+ _remove_elements_from_bin(source->bin, element_list);
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+exit:
+ SAFE_G_LIST_FREE_FULL(element_list, gst_object_unref);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+static int __link_decodebin_with_queue(GstPad *pad, webrtc_gst_slot_s *source, bool is_audio)
+{
+ GstElement *queue = NULL;
+ GstPad *sink_pad = NULL;
+
+ RET_VAL_IF(pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "pad is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+
+ queue = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE));
+ if (!queue) {
+ LOG_ERROR("failed to get element [%s]", _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE));
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ sink_pad = gst_element_get_static_pad(queue, "sink");
+ if (!sink_pad) {
+ LOG_ERROR("sink_pad is NULL for [%s]", GST_ELEMENT_NAME(queue));
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (gst_pad_link(pad, sink_pad) != GST_PAD_LINK_OK) {
+ LOG_ERROR("failed to gst_pad_link()");
+ g_object_unref(sink_pad);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ g_object_unref(sink_pad);
+
+ LOG_INFO("decodebin is linked to [%s]", GST_ELEMENT_NAME(queue));
+
+ return WEBRTC_ERROR_NONE;
+}
+
+static void __filesrc_pipeline_decodebin_pad_added_cb(GstElement *element, GstPad *pad, gpointer data)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source = data;
+ gchar *media_type = NULL;
+ bool is_audio;
+ int av_idx;
+ GstElement *queue = NULL;
+
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(source->filesrc_pipeline == NULL, "filesrc_pipeline is NULL");
+
+ media_type = _get_mime_type_from_pad(pad);
+ RET_IF(media_type == NULL, "media_type is NULL");
+
+ if (!_is_supported_media_type(media_type)) {
+ g_free(media_type);
+ return;
+ }
+
+ LOG_INFO("[%s] new_pad[%s] media_type[%s]", GST_ELEMENT_NAME(element), GST_PAD_NAME(pad), media_type);
+
+ is_audio = _is_audio_media_type(media_type);
+ av_idx = GET_AV_IDX(is_audio);
+
+ g_free(media_type);
+
+ if (source->av[av_idx].src_pad_probe_id > 0) {
+ LOG_INFO("Pipeline already built");
+ ret = __link_decodebin_with_queue(pad, source, is_audio);
+ if (ret != WEBRTC_ERROR_NONE)
+ LOG_ERROR("failed to __link_decodebin_with_queue()");
+ return;
+ }
+
+ ret = __create_rest_of_elements_for_filesrc_pipeline(source, pad, is_audio);
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to __create_rest_of_elements_for_filesrc_pipeline()");
+ return;
+ }
+
+ queue = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _get_element_name(GET_AV_IDX(is_audio), ELEMENT_QUEUE));
+ RET_IF(queue == NULL, "queue is NULL");
+
+ ret = __link_decodebin_with_queue(pad, source, is_audio);
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to __link_decodebin_with_queue()");
+ goto exit;
+ }
+
+ ret = __build_filesrc_bin(source, is_audio ? MEDIA_TYPE_AUDIO : MEDIA_TYPE_VIDEO);
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to __build_filesrc_bin()");
+ goto exit;
+ }
+
+ ret = _link_source_with_webrtcbin(source, source->webrtc->gst.webrtcbin);
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to _link_source_with_webrtcbin()");
+ goto exit;
+ }
+
+ source->av[av_idx].render.need_decoding = true;
+ source->av[av_idx].render.appsrc_caps = gst_pad_get_current_caps(pad);
+ _add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), _source_data_probe_cb);
+
+ GENERATE_DOT(source->webrtc, source->filesrc_pipeline, "%s.%s-%s",
+ GST_ELEMENT_NAME(source->filesrc_pipeline), GST_ELEMENT_NAME(element), GST_PAD_NAME(pad));
+
+ return;
+
+exit:
+ _remove_rest_of_elements_for_filesrc_pipeline(source, is_audio);
+ return;
+}
+
+int _build_filesrc_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source)
+{
+ GstElement *filesrc = NULL;
+ GstElement *decodebin = NULL;
+ gchar *pipeline_name;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+
+ pipeline_name = g_strdup_printf("filesrc-pipeline-for-source_%u", source->id);
+ source->filesrc_pipeline = gst_pipeline_new(pipeline_name);
+ g_free(pipeline_name);
+ RET_VAL_IF(source->filesrc_pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "pipeline is NULL");
+
+ if (!(source->filesrc_bus = gst_pipeline_get_bus(GST_PIPELINE(source->filesrc_pipeline)))) {
+ LOG_ERROR("failed to gst_pipeline_get_bus()");
+ goto error;
+ }
+
+ if ((source->filesrc_bus_watcher = gst_bus_add_watch(source->filesrc_bus, (GstBusFunc)__filesrc_pipeline_bus_watch_cb, source)) == 0) {
+ LOG_ERROR("failed to gst_bus_add_watch()");
+ goto error;
+ }
+
+ if (!(filesrc = _create_element(DEFAULT_ELEMENT_FILESRC, ELEMENT_NAME_FILE_SRC)))
+ goto error;
+
+ if (!(decodebin = _create_element("decodebin", NULL))) {
+ SAFE_GST_OBJECT_UNREF(filesrc);
+ goto error;
+ }
+
+ gst_bin_add_many(GST_BIN(source->filesrc_pipeline), filesrc, decodebin, NULL);
+
+ if (!gst_element_link(filesrc, decodebin)) {
+ gst_bin_remove_many(GST_BIN(source->filesrc_pipeline), filesrc, decodebin, NULL);
+ LOG_ERROR("failed to gst_element_link()");
+ goto error;
+ }
+
+ g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(__filesrc_pipeline_decodebin_autoplug_select_cb), NULL);
+ g_signal_connect(decodebin, "pad-added", G_CALLBACK(__filesrc_pipeline_decodebin_pad_added_cb), (gpointer)source);
+
+ return WEBRTC_ERROR_NONE;
+
+error:
+ _destroy_filesrc_pipeline(source);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+
+}
+
+void _destroy_filesrc_pipeline(webrtc_gst_slot_s *source)
+{
+ RET_IF(source == NULL, "source is NULL");
+
+ if (source->filesrc_bus_watcher > 0) {
+ gst_bus_remove_watch(source->filesrc_bus);
+ source->filesrc_bus_watcher = 0;
+ }
+
+ if (source->filesrc_bus) {
+ gst_object_unref(source->filesrc_bus);
+ source->filesrc_bus = NULL;
+ }
+
+ if (source->filesrc_pipeline) {
+ gst_object_unref(source->filesrc_pipeline);
+ source->filesrc_pipeline = NULL;
+ }
+}