source->media_types = MEDIA_TYPE_VIDEO;
source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->av[AV_IDX_VIDEO].raw_format = g_strdup(ini_source->v_raw_format);
if (!(screensrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_SCREEN), ELEMENT_NAME_SCREENSRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
RET_VAL_IF(src_pad == NULL, WEBRTC_ERROR_INVALID_OPERATION, "src_pad is NULL");
if (mute && source->camerasrc_probe_id == 0) {
- source->camerasrc_probe_id = gst_pad_add_probe(src_pad, GST_PAD_PROBE_TYPE_BUFFER, __camerasrc_probe_cb, NULL, NULL);
+ source->camerasrc_probe_id = gst_pad_add_probe(src_pad, GST_PAD_PROBE_TYPE_BUFFER, __camerasrc_probe_cb, source, NULL);
if (source->camerasrc_probe_id == 0) {
LOG_ERROR("failed to gst_pad_add_probe()");
return WEBRTC_ERROR_INVALID_OPERATION;
source->media_types = MEDIA_TYPE_VIDEO;
source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->av[AV_IDX_VIDEO].raw_format = g_strdup(ini_source->v_raw_format);
+
#ifndef TIZEN_TV
if (webrtc->ini.resource_acquisition.camera)
webrtc->resource.need_to_acquire[MM_RESOURCE_MANAGER_RES_TYPE_CAMERA] = true;
source->media_types = MEDIA_TYPE_AUDIO;
source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->av[AV_IDX_AUDIO].raw_format = g_strdup(ini_source->a_raw_format);
source_factory_name = __get_source_element(webrtc, use_mic ? WEBRTC_MEDIA_SOURCE_TYPE_MIC : WEBRTC_MEDIA_SOURCE_TYPE_AUDIOTEST);
if (!(audiosrc = _create_element(source_factory_name, use_mic ? ELEMENT_NAME_MIC_SRC : ELEMENT_NAME_AUDIO_SRC)))
source->media_types = MEDIA_TYPE_VIDEO;
source->zerocopy_enabled = __is_hw_encoder_used(webrtc, source->type, source->media_types);
+ source->av[AV_IDX_VIDEO].raw_format = g_strdup(ini_source->v_raw_format);
if (!(videotestsrc = _create_element(__get_source_element(webrtc, WEBRTC_MEDIA_SOURCE_TYPE_VIDEOTEST), ELEMENT_NAME_VIDEO_SRC)))
return WEBRTC_ERROR_INVALID_OPERATION;
LOG_ERROR("failed to 'push-buffer', gst_ret[0x%x]", gst_ret);
}
-static GstPadProbeReturn __fakesink_block_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+static GstPadProbeReturn __fakesink_block_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
webrtc_gst_slot_s *source = u_data;
gchar *media_type = NULL;
return GST_PAD_PROBE_OK;
}
-static GstPadProbeReturn __fakesink_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+static GstPadProbeReturn __fakesink_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
webrtc_gst_slot_s *source = u_data;
GstCaps *caps = NULL;
if (source->av[i].transceiver)
gst_object_unref(source->av[i].transceiver);
+
+ g_free(source->av[i].raw_format);
}
gst_bin_remove(GST_BIN(gst_element_get_parent(source->bin)), GST_ELEMENT(source->bin));
}
if (source->type == WEBRTC_MEDIA_SOURCE_TYPE_FILE) {
- if ((source->media_types & MEDIA_TYPE_AUDIO) && !__is_linked_pad(source, "audio"))
+ if ((source->media_types & MEDIA_TYPE_AUDIO) && !__is_linked_pad(source, "audio"))
srcpad_name = g_strdup_printf("audio_src_%u", source->id);
- else if ((source->media_types & MEDIA_TYPE_VIDEO) && !__is_linked_pad(source, "video"))
+ else if ((source->media_types & MEDIA_TYPE_VIDEO) && !__is_linked_pad(source, "video"))
srcpad_name = g_strdup_printf("video_src_%u", source->id);
if (!srcpad_name) {
if (access(path, R_OK) < 0) {
if (errno == EACCES || errno == EPERM) {
LOG_ERROR("Fail to open path: Permission Denied [%s]", path);
- return WEBRTC_ERROR_PERMISSION_DENIED;
+ return WEBRTC_ERROR_PERMISSION_DENIED;
} else {
LOG_ERROR("Fail to open path: Invalid Path [%s] errno[%d]", path, errno);
return WEBRTC_ERROR_INVALID_PARAMETER;
}
//LCOV_EXCL_START
-static GstPadProbeReturn __camerasrc_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
+static GstPadProbeReturn __camerasrc_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer u_data)
{
+ webrtc_gst_slot_s *source = u_data;
GstBuffer *buffer = gst_pad_probe_info_get_buffer(info);
GstMemory *mem = NULL;
- tbm_surface_h src_tsurface;
- tbm_surface_info_s ts_info;
- int ret = TBM_SURFACE_ERROR_NONE;
- unsigned int i = 0;
+ RET_VAL_IF(source == NULL, GST_PAD_PROBE_OK, "source is NULL");
RET_VAL_IF(buffer == NULL, GST_PAD_PROBE_OK, "buffer is NULL");
RET_VAL_IF(gst_buffer_get_size(buffer) == 0, GST_PAD_PROBE_OK, "empty buffer");
mem = gst_buffer_peek_memory(buffer, 0);
RET_VAL_IF(mem == NULL, GST_PAD_PROBE_OK, "mem is NULL");
- src_tsurface = (tbm_surface_h)gst_tizen_memory_get_surface(mem);
- if (!src_tsurface) {
- LOG_ERROR("failed to gst_tizen_memory_get_surface()");
- return GST_PAD_PROBE_OK;
- }
+ if (gst_is_tizen_memory(mem)) {
+ tbm_surface_h src_tsurface = NULL;
+ tbm_surface_info_s ts_info;
+ unsigned int i = 0;
- ret = tbm_surface_get_info(src_tsurface, &ts_info);
- if (ret != TBM_SURFACE_ERROR_NONE) {
- LOG_ERROR("failed to tbm_surface_get_info() [%d]", ret);
- return GST_PAD_PROBE_OK;
- }
+ src_tsurface = (tbm_surface_h)gst_tizen_memory_get_surface(mem);
+ RET_VAL_IF(src_tsurface == NULL, GST_PAD_PROBE_OK, "failed to gst_tizen_memory_get_surface()");
- /* fill the buffer with black (NV12, YUV, RGB) */
- for (i = 0 ; i < ts_info.num_planes ; i++) {
- if (i == 0)
- memset(ts_info.planes[i].ptr, 0x00, ts_info.planes[i].size);
- else
- memset(ts_info.planes[i].ptr, 0x80, ts_info.planes[i].size);
+ if (tbm_surface_get_info(src_tsurface, &ts_info) != TBM_SURFACE_ERROR_NONE) {
+ LOG_ERROR("failed to tbm_surface_get_info()");
+ return GST_PAD_PROBE_OK;
+ }
+
+ /* fill the buffer with black */
+ if (g_strrstr(source->av[AV_IDX_VIDEO].raw_format, "SR32")) { /*RGB*/
+ for (i = 0 ; i < ts_info.num_planes ; i++)
+ memset(ts_info.planes[i].ptr, 0x00, ts_info.planes[i].size);
+ } else { /*YUV*/
+ for (i = 0 ; i < ts_info.num_planes ; i++)
+ memset(ts_info.planes[i].ptr, (i == 0) ? 0x10 : 0x80, ts_info.planes[i].size);
+ }
+ } else {
+ GstMapInfo map_info;
+ memset(&map_info, 0x0, sizeof(GstMapInfo));
+ gsize mem_size = gst_memory_get_sizes(mem, NULL, NULL);
+
+ if (!gst_memory_map(mem, &map_info, GST_MAP_READWRITE)) {
+ LOG_ERROR("failed to gst_memory_map()");
+ return GST_PAD_PROBE_OK;
+ }
+
+ /* fill the buffer with black */
+ if (g_strrstr(source->av[AV_IDX_VIDEO].raw_format, "I420") || (g_strrstr(source->av[AV_IDX_VIDEO].raw_format, "NV12"))) {
+ memset(map_info.data, 0x10, mem_size * 2 / 3);
+ memset(map_info.data + mem_size * 2 / 3, 0x80, mem_size / 3);
+ } else {
+ memset(map_info.data, 0x00, mem_size);
+ }
+
+ gst_memory_unmap(mem, &map_info);
}
return GST_PAD_PROBE_OK;
RET_VAL_IF(width == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "width is NULL");
RET_VAL_IF(height == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "height is NULL");
- screen_source = gst_bin_get_by_name(source->bin, ELEMENT_NAME_SCREENSRC);
+ screen_source = gst_bin_get_by_name(source->bin, ELEMENT_NAME_SCREENSRC);
RET_VAL_IF(screen_source == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sreen_source is NULL");
- videocrop = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEOCROP);
+ videocrop = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEOCROP);
RET_VAL_IF(videocrop == NULL, WEBRTC_ERROR_INVALID_OPERATION, "videocrop is NULL");
LOG_INFO("set source crop x:%d, y:%d, width:%d, height:%d, mode:%s", x, y, w, h, (portrait_mode) ? "portrait" : "landscape");
RET_VAL_IF((source = _get_slot_by_id(webrtc->gst.source_slots, source_id)) == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "source is NULL");
RET_VAL_IF(source->type != WEBRTC_MEDIA_SOURCE_TYPE_SCREEN, WEBRTC_ERROR_INVALID_PARAMETER, "source type is not screen");
- screen_source = gst_bin_get_by_name(source->bin, ELEMENT_NAME_SCREENSRC);
+ screen_source = gst_bin_get_by_name(source->bin, ELEMENT_NAME_SCREENSRC);
RET_VAL_IF(screen_source == NULL, WEBRTC_ERROR_INVALID_OPERATION, "sreen_source is NULL");
- videocrop = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEOCROP);
+ videocrop = gst_bin_get_by_name(source->bin, ELEMENT_NAME_VIDEOCROP);
RET_VAL_IF(videocrop == NULL, WEBRTC_ERROR_INVALID_OPERATION, "videocrop is NULL");
g_object_get(G_OBJECT(videocrop),