#include <tbm_surface_internal.h>
#include <media_packet.h>
#include <media_format.h>
+#include <gst/allocators/gsttizenmemory.h>
#define BUFFER_MAX_PLANE_NUM 4
int height; /**< height of video buffer */
unsigned int timestamp; /**< timestamp of buffer (msec)*/
unsigned int length_total; /**< total length of buffer (in byte)*/
+ void *internal_buffer;
void *data[BUFFER_MAX_PLANE_NUM];
void *bo[BUFFER_MAX_PLANE_NUM]; /**< TBM buffer object */
int stride[BUFFER_MAX_PLANE_NUM]; /**< stride of plane */
return info;
}
+static void
+__mmplayer_zerocopy_set_stride_elevation_bo(video_decoded_data_info_s *info, GstMemory *mem)
+{
+ unsigned int pitch = 0;
+ unsigned int size = 0;
+ int index = 0;
+ tbm_surface_h surface = gst_tizen_memory_get_surface(mem);
+
+ for (index = 0; index < info->plane_num; index++) {
+ tbm_surface_internal_get_plane_data(surface, index, &size, NULL, &pitch);
+ info->stride[index] = pitch;
+ if (pitch)
+ info->elevation[index] = size / pitch;
+ else
+ info->elevation[index] = info->height;
+ }
+}
+
static bool __swcodec_set_stride_elevation(video_decoded_data_info_s *info)
{
RET_VAL_IF(info == NULL, false, "info is NULL");
tbm_surface_destroy(ts);
}
- if (finalize_data->info->bo[0])
+ if (finalize_data->info->internal_buffer) {
+ gst_buffer_unref((GstBuffer *)finalize_data->info->internal_buffer);
+ } else if (finalize_data->info->bo[0]) { /* sw codec */
_release_tbm_bo(finalize_data->tbm, finalize_data->info->bo[0]);
+ }
g_free(finalize_data->info);
g_free(finalize_data);
info->length_total = (unsigned int)gst_memory_get_sizes(mem, NULL, NULL);
info->timestamp = (unsigned int)(GST_TIME_AS_MSECONDS(GST_BUFFER_PTS(buffer))); /* nano sec -> milli sec */
- if (!__swcodec_set_stride_elevation(info) || !__swcodec_set_bo(display, info, mem))
- goto ERROR;
-
- ts = __get_tbm_surface(info);
- if (ts == NULL) {
- LOG_ERROR("failed to __get_tbm_surface()");
- goto ERROR;
+ if (display->zerocopy &&
+ gst_is_tizen_memory(mem)) {
+ __mmplayer_zerocopy_set_stride_elevation_bo(info, mem);
+ info->internal_buffer = gst_buffer_ref(buffer);
+ ts = (tbm_surface_h) gst_tizen_memory_get_surface(mem);
+ } else { /* sw codec */
+ if (!__swcodec_set_stride_elevation(info) || !__swcodec_set_bo(display, info, mem))
+ goto ERROR;
+
+ ts = __get_tbm_surface(info);
+ if (ts == NULL) {
+ LOG_ERROR("failed to __get_tbm_surface()");
+ goto ERROR;
+ }
}
m_packet = __get_media_packet(display->tbm, info, ts);
ERROR:
LOG_ERROR("release video data resource");
-
- if (info->bo[0])
+ if (info->internal_buffer) {
+ gst_buffer_unref (info->internal_buffer);
+ } else if (info->bo[0]) {
_release_tbm_bo(display->tbm, info->bo[0]);
+ }
g_free(info);
return NULL;
factory_name = ini->rendering_sink.v_sink_element;
break;
case WEBRTC_DISPLAY_TYPE_EVAS:
- factory_name = DEFAULT_ELEMENT_FAKESINK;
+ factory_name = display->zerocopy ? "mesonvideosink" : DEFAULT_ELEMENT_FAKESINK;
break;
default:
LOG_ERROR_IF_REACHED("invalid type(%d)", display->type);
{
webrtc_gst_slot_s *sink;
GstElement *videoconvert = NULL;
- GstElement *capsfilter;
GstElement *videosink;
GstElement *link_to;
GstPad *sink_pad;
"qos", (gboolean)sink->display->qos,
NULL);
+ if (sink->display->zerocopy) {
+ g_object_set(G_OBJECT(videosink),
+ "use-drm", TRUE,
+ "use-tizen-video-object", FALSE,
+ NULL);
+ }
+
/* FIXME: The order of setting property and display is important when 'use-tbm' is true. The reverse does not work */
gst_video_overlay_set_wl_window_wl_surface_id(GST_VIDEO_OVERLAY(videosink), sink->display->overlay_surface_id);
LOG_DEBUG("overlay_surface_id[%d]", sink->display->overlay_surface_id);
}
} else if (sink->display->type == WEBRTC_DISPLAY_TYPE_EVAS) {
- GstCaps *caps;
g_object_set(G_OBJECT(videosink),
"sync", TRUE,
"qos", (gboolean)sink->display->qos,
NULL);
_connect_and_append_signal(&sink->signals, (GObject *)videosink, "handoff", G_CALLBACK(_video_stream_decoded_cb), sink->display);
- if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, NULL)))
- goto exit;
- PREPEND_ELEMENT(element_list, capsfilter);
-
- caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW,
- "format", G_TYPE_STRING, webrtc->ini.rendering_sink.evas_native_surface_tbm_format,
- NULL);
- g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL);
- gst_caps_unref(caps);
+ if (!sink->display->zerocopy) {
+ GstCaps *caps;
+ GstElement *capsfilter;
+ if (!(capsfilter = _create_element(DEFAULT_ELEMENT_CAPSFILTER, NULL)))
+ goto exit;
+ PREPEND_ELEMENT(element_list, capsfilter);
+
+ caps = gst_caps_new_simple(MEDIA_TYPE_VIDEO_RAW,
+ "format", G_TYPE_STRING, webrtc->ini.rendering_sink.evas_native_surface_tbm_format,
+ NULL);
+ g_object_set(G_OBJECT(capsfilter), "caps", caps, NULL);
+ gst_caps_unref(caps);
+ } else {
+ g_object_set(G_OBJECT(videosink),
+ "convert-mode", 1,
+ NULL);
+ }
}
- if (sink->av[AV_IDX_VIDEO].render.hw_decoder_used &&
- (sink->display->type == WEBRTC_DISPLAY_TYPE_OVERLAY ||
- sink->display->type == WEBRTC_DISPLAY_TYPE_ECORE_WL)) {
+ if (sink->av[AV_IDX_VIDEO].render.hw_decoder_used) {
link_to = videosink;
-
} else {
if (!(videoconvert = _create_element(DEFAULT_ELEMENT_VIDEOCONVERT, NULL)))
goto exit;
/* send SPS and PPS Insertion Interval every second */
g_object_set(G_OBJECT(element), "config-interval", 1, NULL);
_add_probe_to_pad_for_dump(sink, element, _depayloaded_data_probe_cb);
+ } else if (g_strrstr(factory_name, "v4l2")) {
+ LOG_INFO("[%s] set capture-io-mode to dmabuf-import", factory_name);
+ g_object_set(G_OBJECT(element), "capture-io-mode", 5, NULL);
}
/* all other depayloaders except h264 */
sink->media_types |= MEDIA_TYPE_VIDEO;
__invoke_track_added_cb(webrtc, GST_ELEMENT_NAME(decodebin), false, true);
_apply_display(sink->display);
+ sink->display->zerocopy = sink->av[AV_IDX_VIDEO].render.hw_decoder_used;
ret = __build_videosink(webrtc, decodebin, new_pad);
}