#define DEFAULT_ELEMENT_VOLUME "volume"
#define DEFAULT_ELEMENT_INPUT_SELECTOR "input-selector"
#define DEFAULT_ELEMENT_VIDEOCROP "videocrop"
+#define DEFAULT_ELEMENT_FILESRC "filesrc"
+
#define ELEMENT_NAME_FIRST_CAPSFILTER "firstCapsfilter"
#define ELEMENT_NAME_RTP_CAPSFILTER "rtpCapsfilter"
return true;
}
+static bool __sync_elements_state_with_parent(GList *element_list)
+{
+ GList *list;
+
+ RET_VAL_IF(element_list == NULL, false, "element_list is NULL");
+
+ for (list = element_list; list; list = g_list_next(list))
+ RET_VAL_IF(!gst_element_sync_state_with_parent((GstElement *)list->data), false,
+ "failed to gst_element_sync_state_with_parent(), [%s]", GST_ELEMENT_NAME((GstElement *)list->data));
+
+ return true;
+}
+
static bool __link_switch_srcs(GstElement *switch_element, GList *switch_src_list)
{
GstElement *element;
GstElement *appsrc = NULL;
GstElement *queue = NULL;
GstElement *capsfilter = NULL;
- int idx = GET_AV_IDX_BY_TYPE(media_type);
+ int av_idx = GET_AV_IDX_BY_TYPE(media_type);
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(source->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
ret = _add_no_target_ghostpad_to_slot(source, true, &src_pad);
RET_VAL_IF(ret != WEBRTC_ERROR_NONE, ret, "failed to _add_no_target_ghostpad_to_slot()");
- if (!(appsrc = _create_element("appsrc", _av_tbl[idx].appsrc_name)))
+ if (!(appsrc = _create_element(DEFAULT_ELEMENT_APPSRC, _av_tbl[av_idx].appsrc_name)))
return WEBRTC_ERROR_INVALID_OPERATION;
g_object_set(G_OBJECT(appsrc),
"format", GST_FORMAT_TIME,
NULL);
- if (!(queue = _create_element("queue", _av_tbl[idx].queue_name))) {
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, _av_tbl[av_idx].queue_name))) {
SAFE_GST_OBJECT_UNREF(appsrc);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- if (!(capsfilter = _create_element("capsfilter", _av_tbl[idx].capsfilter_name))) {
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, _av_tbl[av_idx].capsfilter_name))) {
SAFE_GST_OBJECT_UNREF(appsrc);
SAFE_GST_OBJECT_UNREF(queue);
return WEBRTC_ERROR_INVALID_OPERATION;
if (ret != WEBRTC_ERROR_NONE)
goto error;
- __add_probe_to_pad_for_pause(source, idx, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, av_idx, src_pad, __payloaded_data_probe_cb);
return WEBRTC_ERROR_NONE;
static void __remove_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, bool is_audio)
{
GstBin *bin = NULL;
+ GstElement *queue = NULL;
GstElement *payload = NULL;
GstElement *capsfilter = NULL;
GstElement *fakesink = NULL;
- int idx = GET_AV_IDX(is_audio);
+ int av_idx = GET_AV_IDX(is_audio);
RET_IF(source == NULL, "pad is NULL");
bin = GST_BIN(source->filesrc_pipeline);
- payload = gst_bin_get_by_name(bin, _av_tbl[idx].payload_name);
+ queue = gst_bin_get_by_name(bin, _av_tbl[av_idx].queue_name);
+ RET_IF(queue == NULL, "queue is NULL");
+
+ payload = gst_bin_get_by_name(bin, _av_tbl[av_idx].payload_name);
RET_IF(payload == NULL, "payload is NULL");
- capsfilter = gst_bin_get_by_name(bin, _av_tbl[idx].capsfilter_name);
+ capsfilter = gst_bin_get_by_name(bin, _av_tbl[av_idx].capsfilter_name);
RET_IF(capsfilter == NULL, "capsfilter is NULL");
- fakesink = gst_bin_get_by_name(bin, _av_tbl[idx].fakesink_name);
+ fakesink = gst_bin_get_by_name(bin, _av_tbl[av_idx].fakesink_name);
RET_IF(fakesink == NULL, "fakesink is NULL");
- gst_bin_remove_many(bin, payload, capsfilter, fakesink, NULL);
+ gst_bin_remove_many(bin, queue, payload, capsfilter, fakesink, NULL);
}
static void __filesrc_pipeline_audio_stream_handoff_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data)
RET_VAL_IF(source == NULL, NULL, "source is NULL");
- if (!(fakesink = _create_element("fakesink", _av_tbl[GET_AV_IDX(is_audio)].fakesink_name)))
+ if (!(fakesink = _create_element(DEFAULT_ELEMENT_FAKESINK, _av_tbl[GET_AV_IDX(is_audio)].fakesink_name)))
return NULL;
sink_pad = gst_element_get_static_pad(fakesink, "sink");
static int __create_rest_of_elements_for_filesrc_pipeline(webrtc_gst_slot_s *source, GstPad *pad, bool is_audio)
{
GstBin *bin = NULL;
+ GstElement *queue = NULL;
GstElement *payload = NULL;
GstElement *capsfilter = NULL;
GstElement *fakesink = NULL;
+ GList *element_list = NULL;
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(source->filesrc_pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "filesrc_pipeline is NULL");
bin = GST_BIN(source->filesrc_pipeline);
- if (!(payload = __create_payload_for_filesrc_pipeline(pad, is_audio)))
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, _av_tbl[GET_AV_IDX(is_audio)].queue_name)))
return WEBRTC_ERROR_INVALID_OPERATION;
+ APPEND_ELEMENT(element_list, queue);
- if (!(capsfilter = __prepare_capsfilter_for_filesrc_pipeline(source, is_audio))) {
- SAFE_GST_OBJECT_UNREF(payload);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- if (!(fakesink = __prepare_fakesink_for_filesrc_pipeline(source, is_audio))) {
- SAFE_GST_OBJECT_UNREF(payload);
- SAFE_GST_OBJECT_UNREF(capsfilter);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
+ if (!(payload = __create_payload_for_filesrc_pipeline(pad, is_audio)))
+ goto exit;
+ APPEND_ELEMENT(element_list, payload);
- gst_bin_add_many(bin, payload, capsfilter, fakesink, NULL);
+ if (!(capsfilter = __prepare_capsfilter_for_filesrc_pipeline(source, is_audio)))
+ goto exit;
+ APPEND_ELEMENT(element_list, capsfilter);
- if (!gst_element_link_many(payload, capsfilter, fakesink, NULL)) {
- LOG_ERROR("failed to gst_element_link_many()");
- goto error;
- }
+ if (!(fakesink = __prepare_fakesink_for_filesrc_pipeline(source, is_audio)))
+ goto exit;
+ APPEND_ELEMENT(element_list, fakesink);
- if (!gst_element_sync_state_with_parent(payload)) {
- LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(payload));
- goto error;
- }
+ if (!__add_elements_to_bin(bin, element_list))
+ goto exit;
- if (!gst_element_sync_state_with_parent(capsfilter)) {
- LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(capsfilter));
- goto error;
- }
+ if (!__link_elements(element_list))
+ goto exit_with_remove_from_bin;
- if (!gst_element_sync_state_with_parent(fakesink)) {
- LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(fakesink));
- goto error;
- }
+ if (!__sync_elements_state_with_parent(element_list))
+ goto exit_with_remove_from_bin;
+
+ SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_NONE;
-error:
- gst_bin_remove_many(bin, payload, capsfilter, fakesink, NULL);
+exit_with_remove_from_bin:
+ /* elements will be dereferenced */
+ __remove_elements_from_bin(bin, element_list);
+ SAFE_G_LIST_FREE(element_list);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+exit:
+ __unref_elements(element_list);
+ SAFE_G_LIST_FREE(element_list);
return WEBRTC_ERROR_INVALID_OPERATION;
}
-static int __link_decodebin_with_payload(GstPad *pad, webrtc_gst_slot_s *source, bool is_audio)
+static int __link_decodebin_with_queue(GstPad *pad, webrtc_gst_slot_s *source, bool is_audio)
{
- GstElement *payload = NULL;
+ GstElement *queue = NULL;
GstPad *sink_pad = NULL;
RET_VAL_IF(pad == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "pad is NULL");
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
- payload = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _av_tbl[GET_AV_IDX(is_audio)].payload_name);
- if (!payload) {
- LOG_ERROR("failed to get element [%s]", _av_tbl[GET_AV_IDX(is_audio)].payload_name);
+ queue = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _av_tbl[GET_AV_IDX(is_audio)].queue_name);
+ if (!queue) {
+ LOG_ERROR("failed to get element [%s]", _av_tbl[GET_AV_IDX(is_audio)].queue_name);
return WEBRTC_ERROR_INVALID_OPERATION;
}
- sink_pad = gst_element_get_static_pad(payload, "sink");
+ sink_pad = gst_element_get_static_pad(queue, "sink");
if (!sink_pad) {
- LOG_ERROR("sink_pad is NULL for [%s]", GST_ELEMENT_NAME(payload));
+ LOG_ERROR("sink_pad is NULL for [%s]", GST_ELEMENT_NAME(queue));
return WEBRTC_ERROR_INVALID_OPERATION;
}
g_object_unref(sink_pad);
- LOG_INFO("decodebin is linked to [%s]", GST_ELEMENT_NAME(payload));
+ LOG_INFO("decodebin is linked to [%s]", GST_ELEMENT_NAME(queue));
return WEBRTC_ERROR_NONE;
}
const gchar *media_type = NULL;
gboolean is_audio;
int av_idx;
+ GstElement *queue = NULL;
RET_IF(source == NULL, "source is NULL");
RET_IF(source->filesrc_pipeline == NULL, "filesrc_pipeline is NULL");
if (source->av[av_idx].src_pad_probe_id > 0) {
LOG_INFO("Pipeline already built");
- __link_decodebin_with_payload(pad, source, is_audio);
+ ret = __link_decodebin_with_queue(pad, source, is_audio);
+ if (ret != WEBRTC_ERROR_NONE)
+ LOG_ERROR("failed to __link_decodebin_with_queue()");
return;
}
return;
}
- ret = __link_decodebin_with_payload(pad, source, is_audio);
+ queue = gst_bin_get_by_name(GST_BIN(source->filesrc_pipeline), _av_tbl[GET_AV_IDX(is_audio)].queue_name);
+ RET_IF(queue == NULL, "queue is NULL");
+
+ ret = __link_decodebin_with_queue(pad, source, is_audio);
if (ret != WEBRTC_ERROR_NONE) {
- LOG_ERROR("failed to __link_decodebin_with_payload()");
+ LOG_ERROR("failed to __link_decodebin_with_queue()");
__remove_rest_of_elements_for_filesrc_pipeline(source, is_audio);
return;
}
source->av[av_idx].render.need_decoding = true;
source->av[av_idx].render.appsrc_caps = gst_pad_get_current_caps(pad);
- __add_probe_to_pad_for_render(source, av_idx, pad, __source_data_probe_cb);
+ __add_probe_to_pad_for_render(source, av_idx, gst_element_get_static_pad(queue, "src"), __source_data_probe_cb);
}
static GstAutoplugSelectResult __filesrc_pipeline_decodebin_autoplug_select_cb(GstElement *bin, GstPad *pad, GstCaps *caps, GstElementFactory* factory, gpointer udata)
return GST_AUTOPLUG_SELECT_TRY;
}
-static void __filesrc_pipeline_decodebin_pad_removed_cb(GstElement *element, GstPad *pad, gpointer data)
-{
- webrtc_gst_slot_s *source = data;
- int idx = 0;
-
- RET_IF(source == NULL, "source is NULL");
-
- if (GST_PAD_DIRECTION(pad) != GST_PAD_SRC)
- return;
-
- LOG_INFO("[%s] removed_pad[%s]", GST_ELEMENT_NAME(element), GST_PAD_NAME(pad));
-
- for (idx = 0; idx < AV_IDX_MAX; idx++) {
- if (source->av[idx].render.src_pad_probe_id > 0) {
- source->av[idx].render.src_pad_probe_id = 0;
- source->av[idx].render.src_pad = NULL;
-
- gst_caps_unref(source->av[idx].render.appsrc_caps);
- source->av[idx].render.appsrc_caps = NULL;
- }
- }
-}
-
static gboolean __filesrc_pipeline_bus_watch_cb(GstBus *bus, GstMessage *message, gpointer user_data)
{
webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
goto error;
}
- if (!(filesrc = _create_element("filesrc", DEFAULT_NAME_FILE_SRC)))
+ if (!(filesrc = _create_element(DEFAULT_ELEMENT_FILESRC, DEFAULT_NAME_FILE_SRC)))
goto error;
if (!(decodebin = _create_element("decodebin", NULL))) {
g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(__filesrc_pipeline_decodebin_autoplug_select_cb), NULL);
g_signal_connect(decodebin, "pad-added", G_CALLBACK(__filesrc_pipeline_decodebin_pad_added_cb), (gpointer)source);
- g_signal_connect(decodebin, "pad-removed", G_CALLBACK(__filesrc_pipeline_decodebin_pad_removed_cb), (gpointer)source);
return WEBRTC_ERROR_NONE;
g_free(pipeline_name);
RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");
- appsrc = _create_element("appsrc", NULL);
+ appsrc = _create_element(DEFAULT_ELEMENT_APPSRC, NULL);
if (!appsrc)
goto error;
return;
}
- LOG_INFO("change pipeline state to [%s]", gst_element_state_get_name(state));
+ LOG_INFO("change filesrc pipeline state to [%s]", gst_element_state_get_name(state));
}
int _gst_filesrc_pipeline_set_state(webrtc_s *webrtc, GstState state)