webrtc->payload_ids ^= bitmask;
}
+static GstPadProbeReturn __source_data_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ GstBuffer *buffer;
+ GstElement *appsrc;
+ GstFlowReturn gst_ret = GST_FLOW_OK;
+
+ RET_VAL_IF(info == NULL, GST_PAD_PROBE_REMOVE, "info is NULL");
+ RET_VAL_IF(info->data == NULL, GST_PAD_PROBE_REMOVE, "info->data is NULL");
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+ RET_VAL_IF(probe_data->source == NULL, GST_PAD_PROBE_REMOVE, "probe_data->source is NULL");
+
+ switch (probe_data->av_idx) {
+ case AV_IDX_AUDIO:
+ /* TODO: implementation */
+ break;
+ case AV_IDX_VIDEO:
+ if (!probe_data->source->display)
+ return GST_PAD_PROBE_OK;
+ appsrc = probe_data->source->av[probe_data->av_idx].render.appsrc;
+ if (appsrc) {
+ buffer = gst_pad_probe_info_get_buffer(info);
+ LOG_DEBUG("push buffer[%p] to the render pipeline, appsrc[%p]", buffer, appsrc);
+ g_signal_emit_by_name(G_OBJECT(appsrc), "push-buffer", buffer, &gst_ret, NULL);
+ if (gst_ret != GST_FLOW_OK)
+ LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return GST_PAD_PROBE_OK;
+}
+
+static void __add_probe_to_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+{
+ probe_userdata_s *probe_userdata;
+
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+ RET_IF(pad == NULL, "pad is NULL");
+ RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+ probe_userdata = g_new0(probe_userdata_s, 1);
+ probe_userdata->source = source;
+ probe_userdata->av_idx = idx;
+ source->av[idx].render.src_pad = pad;
+ source->av[idx].render.src_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ probe_cb, probe_userdata, g_free);
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe[id:%lu, callback:%p]",
+ source->id, idx, pad, source->av[idx].render.src_pad_probe_id, probe_cb);
+}
+
+static void __remove_probe_from_pad_for_render(webrtc_gst_slot_s *source, unsigned int idx)
+{
+ RET_IF(source == NULL, "source is NULL");
+ RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
+
+ if (source->av[idx].render.src_pad_probe_id == 0)
+ return;
+
+ LOG_DEBUG("source[id:%u, av_idx:%u] pad[%p] probe_id[%lu]",
+ source->id, idx, source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+ gst_pad_remove_probe(source->av[idx].render.src_pad, source->av[idx].render.src_pad_probe_id);
+ source->av[idx].render.src_pad_probe_id = 0;
+ gst_object_unref(source->av[idx].render.src_pad);
+ source->av[idx].render.src_pad = NULL;
+ if (source->av[idx].render.appsrc_caps) {
+ gst_caps_unref(source->av[idx].render.appsrc_caps);
+ source->av[idx].render.appsrc_caps = NULL;
+ }
+}
+
static int __create_rest_of_elements(webrtc_s *webrtc, webrtc_gst_slot_s *source, bool need_capsfilter, GList **element_list)
{
GstElement *capsfilter = NULL;
gchar *media_type = NULL;
gchar *encoder_name = NULL;
unsigned int payload_id;
+ int idx;
RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
LOG_ERROR("not ready for this media_types[0x%x]", source->media_types);
return WEBRTC_ERROR_INVALID_OPERATION;
}
+ idx = (source->media_types == MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
if (need_capsfilter) {
if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_FIRST_CAPSFILTER)))
g_object_set(G_OBJECT(capsfilter), "caps", sink_caps, NULL);
gst_caps_unref(sink_caps);
}
+
+ source->av[idx].render.need_decoding = true;
+ __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+
goto skip_encoder;
}
__make_default_raw_caps(source, &webrtc->ini),
__make_default_encoded_caps(source, &webrtc->ini, NULL),
encoder);
- if (encoder == NULL)
+ if (encoder == NULL) {
+ __remove_probe_from_pad_for_render(source, idx);
return WEBRTC_ERROR_INVALID_OPERATION;
+ }
APPEND_ELEMENT(*element_list, encoder);
encoder_name = gst_element_get_name(encoder);
}
g_free(encoder_name);
+ source->av[idx].render.need_decoding = false;
+ __add_probe_to_pad_for_render(source, idx, gst_element_get_static_pad(capsfilter, "src"), __source_data_probe_cb);
+
skip_encoder:
CREATE_ELEMENT_FROM_REGISTRY(elem_info, GST_KLASS_NAME_PAYLOADER_RTP,
__make_default_encoded_caps(source, &webrtc->ini, &media_type),
NULL,
payloader);
- if (payloader == NULL) {
- g_free(media_type);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
+ if (payloader == NULL)
+ goto error;
APPEND_ELEMENT(*element_list, payloader);
- if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL))) {
- g_free(media_type);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
+ if (!(queue = _create_element(DEFAULT_ELEMENT_QUEUE, NULL)))
+ goto error;
APPEND_ELEMENT(*element_list, queue);
- if (!(capsfilter2 = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER))) {
- g_free(media_type);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
+ if (!(capsfilter2 = _create_element(DEFAULT_ELEMENT_CAPSFILTER, ELEMENT_NAME_RTP_CAPSFILTER)))
+ goto error;
APPEND_ELEMENT(*element_list, capsfilter2);
- payload_id = __get_available_payload_id(webrtc);
- if (payload_id == 0) {
- g_free(media_type);
- return WEBRTC_ERROR_INVALID_OPERATION;
- }
- source->av[source->media_types == MEDIA_TYPE_VIDEO ? AV_IDX_VIDEO : AV_IDX_AUDIO].payload_id = payload_id;
+ if ((payload_id = __get_available_payload_id(webrtc)) == 0)
+ goto error;
+
+ source->av[idx].payload_id = payload_id;
if ((sink_caps = __make_rtp_caps(media_type, payload_id))) {
g_object_set(G_OBJECT(capsfilter2), "caps", sink_caps, NULL);
g_free(media_type);
+ g_object_get(G_OBJECT(capsfilter), "caps", &sink_caps, NULL);
+ if (sink_caps)
+ source->av[idx].render.appsrc_caps = sink_caps;
+
return WEBRTC_ERROR_NONE;
+
+error:
+ __remove_probe_from_pad_for_render(source, idx);
+ g_free(media_type);
+
+ return WEBRTC_ERROR_INVALID_OPERATION;
}
static int __create_rest_of_elements_for_encoded_format(webrtc_s *webrtc, webrtc_gst_slot_s *source,
return GST_PAD_PROBE_OK;
}
-static void __add_probe_to_pad(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
+static void __add_probe_to_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx, GstPad *pad, void *probe_cb)
{
probe_userdata_s *probe_userdata;
source->id, idx, pad, source->av[idx].src_pad_probe_id, probe_cb);
}
-static void __remove_probe_from_pad(webrtc_gst_slot_s *source, unsigned int idx)
+static void __remove_probe_from_pad_for_pause(webrtc_gst_slot_s *source, unsigned int idx)
{
RET_IF(source == NULL, "source is NULL");
RET_IF(idx >= AV_IDX_MAX, "invalid idx(%u)", idx);
if (ret != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
SAFE_G_LIST_FREE(switch_src_list);
SAFE_G_LIST_FREE(element_list);
if (ret != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
if (ret != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
if (ret != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
if (ret != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, AV_IDX_VIDEO, src_pad, __payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
if (ret != WEBRTC_ERROR_NONE)
goto exit_with_remove_from_bin;
- __add_probe_to_pad(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
+ __add_probe_to_pad_for_pause(source, AV_IDX_AUDIO, src_pad, __payloaded_data_probe_cb);
SAFE_G_LIST_FREE(element_list);
gst_element_foreach_src_pad(GST_ELEMENT(source->bin), __foreach_src_pad_cb, source);
for (i = 0; i < AV_IDX_MAX; i++) {
- __remove_probe_from_pad(source, i);
+ __remove_probe_from_pad_for_pause(source, i);
+ __remove_probe_from_pad_for_render(source, i);
if (source->av[i].payload_id > 0)
__return_payload_id(source->webrtc, source->av[i].payload_id);
+
+ if (source->av[i].render.pipeline) {
+ gst_element_set_state(source->av[i].render.pipeline, GST_STATE_NULL);
+ SAFE_GST_OBJECT_UNREF(source->av[i].render.pipeline);
+ }
}
gst_bin_remove(GST_BIN(gst_element_get_parent(source->bin)), GST_ELEMENT(source->bin));
if (source->allocator)
gst_object_unref(source->allocator);
+ if (source->display)
+ _release_display(source->display);
+
g_free(source);
}
return WEBRTC_ERROR_NONE;
}
+
+static int __build_loopback_videosink(webrtc_gst_slot_s *source, GstElement *link_with)
+{
+ webrtc_s *webrtc;
+ const char *videosink_factory_name;
+ GstElement *videosink;
+ GstElement *videoconvert;
+ int ret = WEBRTC_ERROR_NONE;
+
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(link_with == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "link_with is NULL");
+ RET_VAL_IF(source->webrtc == NULL, WEBRTC_ERROR_INVALID_OPERATION, "webrtc is NULL");
+ RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
+ RET_VAL_IF(source->display->object == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->object is NULL");
+
+ webrtc = source->webrtc;
+
+ switch (source->display->type) {
+ case WEBRTC_DISPLAY_TYPE_OVERLAY:
+ case WEBRTC_DISPLAY_TYPE_ECORE_WL:
+ videosink_factory_name = webrtc->ini.rendering_sink.v_sink_element;
+ break;
+ case WEBRTC_DISPLAY_TYPE_EVAS:
+ videosink_factory_name = DEFAULT_ELEMENT_FAKESINK;
+ break;
+ default:
+ LOG_ERROR_IF_REACHED("invalid display type(%d)", source->display->type);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ if (!(videosink = _create_element(videosink_factory_name, NULL)))
+ return WEBRTC_ERROR_INVALID_OPERATION;
+
+ if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL))) {
+ SAFE_GST_OBJECT_UNREF(videosink);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+#ifndef TIZEN_TV
+ if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY && webrtc->ini.resource_acquisition.video_overlay) {
+ if ((ret = _acquire_resource_for_type(webrtc, MM_RESOURCE_MANAGER_RES_TYPE_VIDEO_OVERLAY))) {
+ SAFE_GST_OBJECT_UNREF(videosink);
+ SAFE_GST_OBJECT_UNREF(videoconvert);
+ return ret;
+ }
+ }
+#endif
+ if (source->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY ||
+ source->display->type == WEBRTC_DISPLAY_TYPE_ECORE_WL) {
+ gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(videosink), source->display->overlay_surface_id);
+
+ } else if (source->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
+ g_object_set(videosink, "signal-handoffs", TRUE, NULL);
+ _connect_and_append_signal(&source->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), source->display);
+ }
+
+ gst_bin_add_many(GST_BIN(source->av[AV_IDX_VIDEO].render.pipeline), videoconvert, videosink, NULL);
+
+ if (!gst_element_sync_state_with_parent(videoconvert)) {
+ LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(videoconvert));
+ goto error;
+ }
+
+ if (!gst_element_sync_state_with_parent(videosink)) {
+ LOG_ERROR("failed to gst_element_sync_state_with_parent() for [%s]", GST_ELEMENT_NAME(videosink));
+ goto error;
+ }
+
+ if (!gst_element_link_many(link_with, videoconvert, videosink, NULL)) {
+ LOG_ERROR("failed to gst_element_link_many()");
+ goto error;
+ }
+
+ return WEBRTC_ERROR_NONE;
+
+error:
+ SAFE_GST_OBJECT_UNREF(source->av[AV_IDX_VIDEO].render.pipeline);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+static void __loopback_decodebin_pad_added_cb(GstElement *decodebin, GstPad *new_pad, gpointer user_data)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source = (webrtc_gst_slot_s *)user_data;
+ const gchar *media_type;
+
+ RET_IF(source == NULL, "source is NULL");
+
+ if (GST_PAD_DIRECTION(new_pad) != GST_PAD_SRC)
+ return;
+
+ media_type = gst_structure_get_name(gst_caps_get_structure(gst_pad_get_current_caps(new_pad), 0));
+ LOG_INFO("source_id[%u], media_type[%s], new_pad[%s]", source->id, media_type, GST_PAD_NAME(new_pad));
+
+ if (g_strrstr(media_type, "video")) {
+ ret = __build_loopback_videosink(source, decodebin);
+
+ } else if (g_strrstr(media_type, "audio")) {
+ /* TODO : Implementation */
+
+ } else {
+ LOG_ERROR("not supported media type[%s]", media_type);
+ return;
+ }
+
+ if (ret != WEBRTC_ERROR_NONE) {
+ LOG_ERROR("failed to build loopback rendering sink, ret[0x%x]", ret);
+ _post_error_cb_in_idle(source->webrtc, ret);
+ }
+}
+
+static int __build_loopback_render_pipeline(webrtc_s *webrtc, webrtc_gst_slot_s *source, media_type_e type)
+{
+ int idx = (type == MEDIA_TYPE_AUDIO) ? AV_IDX_AUDIO : AV_IDX_VIDEO;
+ GstElement *appsrc;
+ gchar *pipeline_name;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
+ RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display is NULL");
+ RET_VAL_IF(source->display->object == NULL, WEBRTC_ERROR_INVALID_OPERATION, "display->object is NULL");
+
+ pipeline_name = g_strdup_printf("webrtc-source_%u-%s-render-pipeline", source->id, type == MEDIA_TYPE_AUDIO ? "audio" : "video");
+ source->av[idx].render.pipeline = gst_pipeline_new(pipeline_name);
+ g_free(pipeline_name);
+ RET_VAL_IF(source->av[idx].render.pipeline == NULL, WEBRTC_ERROR_INVALID_OPERATION, "render_pipeline is NULL");
+
+ appsrc = _create_element("appsrc", NULL);
+ if (!appsrc)
+ goto error;
+
+ g_object_set(G_OBJECT(appsrc),
+ "is-live", TRUE,
+ "format", GST_FORMAT_TIME,
+ "caps", source->av[idx].render.appsrc_caps,
+ NULL);
+
+ if (source->av[idx].render.need_decoding) {
+ GstElement *decodebin = _create_element("decodebin", NULL);
+ if (!decodebin) {
+ SAFE_GST_OBJECT_UNREF(appsrc);
+ goto error;
+ }
+
+ gst_bin_add_many(GST_BIN(source->av[idx].render.pipeline), appsrc, decodebin, NULL);
+ if (!gst_element_link(appsrc, decodebin)) {
+ LOG_ERROR("failed to gst_element_link()");
+ goto error;
+ }
+
+ g_signal_connect(decodebin, "pad-added", G_CALLBACK(__loopback_decodebin_pad_added_cb), source);
+ g_signal_connect(decodebin, "autoplug-select", G_CALLBACK(_decodebin_autoplug_select_cb), webrtc);
+
+ } else {
+ gst_bin_add(GST_BIN(source->av[idx].render.pipeline), appsrc);
+ if (__build_loopback_videosink(source, appsrc) != WEBRTC_ERROR_NONE) {
+ SAFE_GST_OBJECT_UNREF(appsrc);
+ goto error;
+ }
+ }
+
+ source->av[idx].render.appsrc = appsrc;
+
+ LOG_INFO("source_id[%u] pipeline[%p, %s] appsrc[%p]", source->id, source->av[idx].render.pipeline,
+ GST_ELEMENT_NAME(source->av[idx].render.pipeline), source->av[idx].render.appsrc);
+
+ gst_element_set_state(source->av[idx].render.pipeline, GST_STATE_PLAYING);
+
+ return WEBRTC_ERROR_NONE;
+
+error:
+ SAFE_GST_OBJECT_UNREF(source->av[idx].render.pipeline);
+ return WEBRTC_ERROR_INVALID_OPERATION;
+}
+
+int _set_video_loopback(webrtc_s *webrtc, unsigned int source_id, unsigned int type, void *display)
+{
+ int ret = WEBRTC_ERROR_NONE;
+ webrtc_gst_slot_s *source;
+
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+ RET_VAL_IF(source_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "source_id is 0");
+ RET_VAL_IF(display == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "display is NULL");
+ RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL,
+ WEBRTC_ERROR_INVALID_PARAMETER, "could not find source");
+ RET_VAL_IF((source->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_PARAMETER,
+ "invalid media_type for source[media_types:0x%x, id:%u]", source->media_types, source_id);
+ RET_VAL_IF((source->type == WEBRTC_MEDIA_SOURCE_TYPE_MEDIA_PACKET), WEBRTC_ERROR_INVALID_PARAMETER,
+ "this API does not support the media packet source");
+
+ if (source->display == NULL) {
+ source->display = _alloc_display();
+ RET_VAL_IF(source->display == NULL, WEBRTC_ERROR_INVALID_OPERATION, "source->display is NULL");
+ }
+
+ LOG_INFO("source_id[%u] type[%d] display[%p]", source_id, type, display);
+
+ source->display->type = type;
+ source->display->object = display;
+
+ ret = _apply_display(source->display);
+ if (ret != WEBRTC_ERROR_NONE)
+ goto error;
+
+ ret = __build_loopback_render_pipeline(webrtc, source, MEDIA_TYPE_VIDEO);
+ if (ret != WEBRTC_ERROR_NONE)
+ goto error;
+
+ return WEBRTC_ERROR_NONE;
+
+error:
+ _release_display(source->display);
+ source->display = NULL;
+ return ret;
+}