--- /dev/null
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "webrtc.h"
+#include "webrtc_private.h"
+#include <gst/video/video-info.h>
+#include <mm_error.h>
+#include <mm_util_type.h>
+#include <mm_util_image.h>
+#include <mm_util_imgp.h>
+
+typedef struct {
+ unsigned char *data;
+ size_t size;
+ unsigned int width;
+ unsigned int height;
+} webrtc_video_frame_s;
+
+//LCOV_EXCL_START
+static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement *videosink, void *probe_cb)
+{
+ GstPad *pad;
+ probe_userdata_s *probe_userdata;
+ g_autofree gchar *media_type = NULL;
+
+ RET_IF(sink == NULL, "sink is NULL");
+ RET_IF(videosink == NULL, "videosink is NULL");
+ RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+ pad = gst_element_get_static_pad(videosink, "sink");
+ media_type = _get_mime_type_from_pad(pad);
+
+ LOG_DEBUG("media_type[%s]", media_type);
+
+ probe_userdata = g_new0(probe_userdata_s, 1);
+ probe_userdata->slot = sink;
+ sink->snapshot.sink_pad = pad;
+ sink->snapshot.sink_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+ probe_cb, probe_userdata, g_free);
+
+ LOG_DEBUG("sink[id:%u] for [%s] pad[%p] probe[id:%lu, callback:%p]",
+ sink->id, "snapshot", pad, sink->snapshot.sink_pad_probe_id, probe_cb);
+}
+
+static void __remove_probe_from_pad_for_snapshot(webrtc_gst_slot_s *sink)
+{
+ RET_IF(sink == NULL, "sink is NULL");
+
+ if (sink->snapshot.sink_pad_probe_id == 0)
+ return;
+
+ LOG_DEBUG("sink[id:%u] for SNAPSHOT pad[%p] probe_id[%lu]",
+ sink->id, sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
+
+ gst_pad_remove_probe(sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
+ gst_object_unref(sink->snapshot.sink_pad);
+
+ sink->snapshot.sink_pad_probe_id = 0;
+ sink->snapshot.sink_pad = NULL;
+}
+
+static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad, mm_util_color_format_e *color_format)
+{
+ GstCaps *caps;
+ GstVideoFrame vframe;
+ GstVideoInfo vinfo;
+ MMVideoBuffer *vbuffer = NULL;
+ int i;
+ guint plane_size;
+
+ RET_VAL_IF(buffer == NULL, NULL, "buffer is NULL");
+ RET_VAL_IF(pad == NULL, NULL, "pad is NULL");
+ RET_VAL_IF(color_format == NULL, NULL, "color_format is NULL");
+
+ caps = gst_pad_get_current_caps(pad);
+ gst_video_info_from_caps(&vinfo, caps);
+ gst_caps_unref(caps);
+
+ RET_VAL_IF(!gst_video_frame_map(&vframe, &vinfo, buffer, GST_MAP_READ), NULL, "failed to gst_video_frame_map()");
+
+ LOG_INFO("format[%s], resolution[%dx%d], plane_num[%d]",
+ gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(&vinfo)),
+ GST_VIDEO_INFO_WIDTH(&vinfo), GST_VIDEO_INFO_HEIGHT(&vinfo),
+ GST_VIDEO_FRAME_N_PLANES(&vframe));
+
+ vbuffer = (MMVideoBuffer *)g_malloc0(sizeof(MMVideoBuffer));
+ vbuffer->width[0] = GST_VIDEO_INFO_WIDTH(&vinfo);
+ vbuffer->height[0] = GST_VIDEO_INFO_HEIGHT(&vinfo);
+ vbuffer->plane_num = GST_VIDEO_FRAME_N_PLANES(&vframe);
+
+ switch (GST_VIDEO_INFO_FORMAT(&vinfo)) {
+ case GST_VIDEO_FORMAT_NV12:
+ case GST_VIDEO_FORMAT_SN12:
+ vbuffer->width[1] = vbuffer->width[0];
+ vbuffer->height[1] = vbuffer->height[0] >> 1;
+ *color_format = MM_UTIL_COLOR_NV12;
+ break;
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_S420:
+ vbuffer->width[1] = vbuffer->width[2] = vbuffer->width[0] >> 1;
+ vbuffer->height[1] = vbuffer->height[2] = vbuffer->height[0] >> 1;
+ *color_format = MM_UTIL_COLOR_I420;
+ break;
+ case GST_VIDEO_FORMAT_BGRx:
+ *color_format = MM_UTIL_COLOR_BGRX;
+ break;
+ default:
+ LOG_ERROR_IF_REACHED("type(%d)", GST_VIDEO_INFO_FORMAT(&vinfo));
+ g_free(vbuffer);
+ return NULL;
+ }
+
+ for (i = 0; i < vbuffer->plane_num; i++) {
+ vbuffer->stride_width[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&vframe, i);
+ vbuffer->stride_height[i] = vbuffer->height[i];
+ plane_size = vbuffer->stride_width[i] * vbuffer->stride_height[i];
+ if (plane_size == 0) {
+ LOG_ERROR("plane size is 0");
+ gst_video_frame_unmap(&vframe);
+ g_free(vbuffer);
+ return NULL;
+ }
+ LOG_DEBUG("plane[%d, %p, stride_width:%d, stride_height:%d, size:%u]",
+ i, GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), vbuffer->stride_width[i], vbuffer->stride_height[i], plane_size);
+ vbuffer->data[i] = g_malloc(plane_size);
+ memcpy(vbuffer->data[i], GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), plane_size);
+ }
+
+ gst_video_frame_unmap(&vframe);
+
+ return vbuffer;
+}
+
+static int __convert_colorspace(unsigned char *src_data, size_t src_size, int src_w, int src_h, mm_util_color_format_e src_fmt, mm_util_color_format_e dst_fmt, webrtc_video_frame_s *result)
+{
+ int ret;
+ mm_util_image_h src_image;
+ mm_util_image_h dst_image;
+
+ RET_VAL_IF(src_data == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_data is NULL");
+ RET_VAL_IF(result == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "result is NULL");
+
+ LOG_DEBUG_ENTER();
+
+ ret = mm_image_create_image(src_w, src_h, src_fmt, src_data, src_size, &src_image);
+ RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_create_image()");
+
+ ret = mm_util_convert_colorspace(src_image, dst_fmt, &dst_image);
+ mm_image_destroy_image(src_image);
+ RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_util_convert_colorspace()");
+
+ mm_image_debug_image(dst_image, NULL);
+
+ ret = mm_image_get_image(dst_image, &result->width, &result->height, NULL, &result->data, &result->size);
+ mm_image_destroy_image(dst_image);
+ RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_get_image()");
+
+ LOG_DEBUG("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, fmt:%d]",
+ src_data, src_size, src_w, src_h, src_fmt,
+ result->data, result->size, result->width, result->height, dst_fmt);
+
+ return WEBRTC_ERROR_NONE;
+}
+
+static int __get_src_size(MMVideoBuffer *vbuffer, mm_util_color_format_e color_format)
+{
+ RET_VAL_IF(vbuffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "vbuffer is NULL");
+
+ switch (color_format) {
+ case MM_UTIL_COLOR_NV12:
+ return (vbuffer->stride_width[0] * vbuffer->stride_height[0]) +
+ (vbuffer->stride_width[1] * vbuffer->stride_height[1]);
+ case MM_UTIL_COLOR_I420:
+ return (vbuffer->stride_width[0] * vbuffer->stride_height[0]) +
+ ((vbuffer->stride_width[1] * vbuffer->stride_height[1]) << 1);
+ case MM_UTIL_COLOR_BGRX:
+ return vbuffer->stride_width[0] * vbuffer->stride_height[0];
+ default:
+ LOG_ERROR_IF_REACHED("color_format(%d)", color_format);
+ return 0;
+ }
+}
+
+static int __change_colorspace_to_rgb24(MMVideoBuffer *vbuffer, mm_util_color_format_e color_format, webrtc_video_frame_s *result_video_frame)
+{
+ guint src_size;
+ unsigned char *dst_buffer;
+ unsigned char *tmp_dst;
+ unsigned char *tmp_src;
+ int i;
+
+ RET_VAL_IF(vbuffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "vbuffer is NULL");
+ RET_VAL_IF(result_video_frame == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "result_video_frame is NULL");
+ RET_VAL_IF((src_size = __get_src_size(vbuffer, color_format)) == 0, WEBRTC_ERROR_INVALID_OPERATION, "src_size is 0");
+
+ dst_buffer = (unsigned char *)g_malloc(src_size);
+
+ switch (color_format) {
+ case MM_UTIL_COLOR_NV12:
+ tmp_dst = dst_buffer;
+
+ /* Y plane */
+ tmp_src = vbuffer->data[0];
+ for (i = 0; i < vbuffer->height[0]; i++) {
+ memcpy(tmp_dst, tmp_src, vbuffer->width[0]);
+ tmp_dst += vbuffer->width[0];
+ tmp_src += vbuffer->stride_width[0];
+ }
+ /* UV plane*/
+ tmp_src = vbuffer->data[1];
+ for (i = 0; i < vbuffer->height[1]; i++) {
+ memcpy(tmp_dst, tmp_src, vbuffer->width[1]);
+ tmp_dst += vbuffer->width[1];
+ tmp_src += vbuffer->stride_width[1];
+ }
+ break;
+ case MM_UTIL_COLOR_I420:
+ tmp_dst = dst_buffer;
+
+ /* Y plane */
+ tmp_src = vbuffer->data[0];
+ for (i = 0; i < vbuffer->height[0]; i++) {
+ memcpy(tmp_dst, tmp_src, vbuffer->width[0]);
+ tmp_dst += vbuffer->width[0];
+ tmp_src += vbuffer->stride_width[0];
+ }
+ /* U plane */
+ tmp_src = vbuffer->data[1];
+ for (i = 0; i < vbuffer->height[1]; i++) {
+ memcpy(tmp_dst, tmp_src, vbuffer->width[1]);
+ tmp_dst += vbuffer->width[1];
+ tmp_src += vbuffer->stride_width[1];
+ }
+ /* V plane */
+ tmp_src = vbuffer->data[2];
+ for (i = 0; i < vbuffer->height[2]; i++) {
+ memcpy(tmp_dst, tmp_src, vbuffer->width[2]);
+ tmp_dst += vbuffer->width[2];
+ tmp_src += vbuffer->stride_width[2];
+ }
+ break;
+ case MM_UTIL_COLOR_BGRX:
+ memcpy(dst_buffer, vbuffer->data[0], src_size);
+ break;
+ default:
+ LOG_ERROR_IF_REACHED("color_format(%d)", color_format);
+ g_free(dst_buffer);
+ return WEBRTC_ERROR_INVALID_PARAMETER;
+ }
+
+ return __convert_colorspace(dst_buffer, src_size, vbuffer->width[0], vbuffer->height[0], color_format,
+ MM_UTIL_COLOR_RGB24, result_video_frame);
+}
+
+#ifdef DUMP_CONVERTED_RESULT
+static void __dump_converted_result(webrtc_video_frame_s *result)
+{
+ g_autofree gchar *file_path = NULL;
+ g_autoptr(GError) error = NULL;
+ static unsigned int count = 1;
+
+ RET_IF(result == NULL, "result is NULL");
+
+ LOG_DEBUG_ENTER();
+
+ if (!result->data)
+ return;
+
+ file_path = g_strdup_printf("/tmp/rgb24_%ux%u_%u.dump", result->width, result->height, count++);
+
+ if (!g_file_set_contents(file_path, (char *)result->data, result->size, &error)) {
+ LOG_ERROR("failed to g_file_set_contents() for %s, error:%s", file_path, error->message);
+ return;
+ }
+
+ LOG_DEBUG("converted rgb24 frame is dumped to [%s]", file_path);
+}
+#endif
+
+static GstPadProbeReturn __snapshot_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+ probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+ MMVideoBuffer *vbuffer;
+ mm_util_color_format_e color_format;
+ webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+
+ RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+
+ if (!(vbuffer = __get_video_frame_raw_data(gst_pad_probe_info_get_buffer(info), pad, &color_format)))
+ goto out;
+
+ if (__change_colorspace_to_rgb24(vbuffer, color_format, &result_frame) != WEBRTC_ERROR_NONE)
+ goto out;
+
+ /* TODO: Append data to user callback */
+ /* TODO: Make a thread to process the conversion */
+
+#ifdef DUMP_CONVERTED_RESULT
+ __dump_converted_result(&result_frame);
+#endif
+out:
+ __remove_probe_from_pad_for_snapshot(probe_data->slot);
+ if (vbuffer) {
+ int i;
+ for (i = 0; i < vbuffer->plane_num; i++)
+ g_free(vbuffer->data[i]);
+ g_free(vbuffer);
+ }
+ if (result_frame.data)
+ g_free(result_frame.data);
+
+ return GST_PAD_PROBE_OK;
+}
+
+int _capture_video_frame(webrtc_gst_slot_s *sink)
+{
+ GstElement *videosink;
+
+ RET_VAL_IF(sink == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "sink is NULL");
+ RET_VAL_IF(sink->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+ RET_VAL_IF(!(videosink = gst_bin_get_by_name(sink->bin, ELEMENT_NAME_VIDEO_SINK)), WEBRTC_ERROR_INVALID_OPERATION, "could not find video sink element");
+
+ __add_probe_to_pad_for_snapshot(sink, videosink, __snapshot_probe_cb);
+
+ return WEBRTC_ERROR_NONE;
+}
+//LCOV_EXCL_STOP
\ No newline at end of file