webrtc_sink_snapshot: Add support for capture video frame 74/285274/1
authorSangchul Lee <sc11.lee@samsung.com>
Fri, 2 Dec 2022 00:04:09 +0000 (09:04 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Thu, 8 Dec 2022 08:42:23 +0000 (08:42 +0000)
A video frame of receiving video stream can be captured by this new
functionality.

As of now, it only supports
1. to capture the decoded video frame and convert it only to RGB24 format.
2. to save dump file only with a macro definition.

These could be expanded to get improved and have more functionalities
next time.
1. support a user callback.
2. support various output formats.
3. adopt a dedicated thread for converting format.

[Version] 0.3.268
[Issue Type] New feature

Change-Id: I662f41ef639182b78f2ffc95d7e036625cbe5c53
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
(cherry picked from commit 4bb87effbb964b1d6e6ece2604a6aac053d7fe99)

CMakeLists.txt
include/webrtc_private.h
packaging/capi-media-webrtc.spec
src/webrtc_sink.c
src/webrtc_sink_snapshot.c [new file with mode: 0644]

index 2efec4f15c192229d553b432d4d92ef26bb76bd0..d7a68db8c662567c0a46b7e7fa5eabb21b504bdc 100644 (file)
@@ -12,7 +12,8 @@ INCLUDE_DIRECTORIES(${INC_DIR})
 
 SET(dependents "dlog glib-2.0 gstreamer-1.0 gstreamer-webrtc-1.0 gstreamer-video-1.0 gstreamer-audio-1.0 \
                 gstreamer-allocators-1.0 libpulse json-glib-1.0 iniparser mm-common mm-display-interface capi-media-tool \
-                libtbm libwebsockets cynara-client libsmack capi-system-info libsoup-2.4 bundle capi-media-sound-manager mm-fileinfo")
+                libtbm libwebsockets cynara-client libsmack capi-system-info libsoup-2.4 bundle capi-media-sound-manager \
+                mm-fileinfo mmutil-common mmutil-imgp")
 IF(NOT TIZEN_PROFILE_TV)
     SET(dependents "${dependents} mm-resource-manager")
     IF(TIZEN_FEATURE_UI)
index 652f90d99181f3bdd9954a7a6fddf755cfd204be..2994a1b352ccb79b69a1fda3f1320421375d4570 100644 (file)
@@ -254,6 +254,7 @@ do { \
 
 #define ELEMENT_NAME_AUDIO_ENCODER      "audioEncoder"
 #define ELEMENT_NAME_VIDEO_ENCODER      "videoEncoder"
+#define ELEMENT_NAME_VIDEO_SINK         "videoSink"
 
 #define MEDIA_TYPE_AUDIO_RAW            "audio/x-raw"
 #define MEDIA_TYPE_AUDIO_MULAW          "audio/x-mulaw"
@@ -595,6 +596,11 @@ typedef struct _webrtc_gst_slot_s {
 
        webrtc_display_s *display;
 
+       struct {
+               GstPad *sink_pad;
+               gulong sink_pad_probe_id;
+       } snapshot;
+
        struct {
                gchar *path;
                FILE *fp;
@@ -771,6 +777,10 @@ GstPadProbeReturn _depayloaded_data_probe_cb(GstPad *pad, GstPadProbeInfo *info,
 void _add_probe_to_pad_for_dump(webrtc_gst_slot_s *sink, GstElement *element, void *probe_cb);
 void _remove_probe_from_pad_for_dump(webrtc_gst_slot_s *sink);
 
+/* sink capture video frame */
+int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id);
+int _capture_video_frame(webrtc_gst_slot_s *sink);
+
 /* display */
 void _video_stream_decoded_cb(GstElement *object, GstBuffer *buffer, GstPad *pad, gpointer data);
 int _apply_display(webrtc_display_s *display);
index 7358eac5189fbfd672b1e24ded7a54a8bcb1b12f..8180e4acddceb48e8224cc6f506d015cb0b3a273 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.3.267
+Version:    0.3.268
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
@@ -35,6 +35,8 @@ BuildRequires:  pkgconfig(capi-system-info)
 BuildRequires:  pkgconfig(capi-media-sound-manager)
 BuildRequires:  pkgconfig(bundle)
 BuildRequires:  pkgconfig(mm-fileinfo)
+BuildRequires:  pkgconfig(mmutil-common)
+BuildRequires:  pkgconfig(mmutil-imgp)
 %if "%{tizen_profile_name}" != "tv"
 BuildRequires:  pkgconfig(mm-resource-manager)
 BuildRequires:  pkgconfig(capi-system-sensor)
index ff7bba4046b427f0dd9a09856fa9ad9fdcd8cb5f..511ad665394684ed6af76d7ca46e908c24e7b1ed 100644 (file)
@@ -177,7 +177,7 @@ static int __build_videosink(webrtc_s *webrtc, GstElement *decodebin, GstPad *sr
        sink->media_types = MEDIA_TYPE_VIDEO;
        display_is_set = !!sink->display;
 
-       if (!(videosink = _create_element(videosink_factory_name, NULL)))
+       if (!(videosink = _create_element(videosink_factory_name, ELEMENT_NAME_VIDEO_SINK)))
                return WEBRTC_ERROR_INVALID_OPERATION;
        APPEND_ELEMENT(element_list, videosink);
        link_to = videosink;
@@ -1305,4 +1305,22 @@ int _get_video_resolution_from_sink(webrtc_s *webrtc, unsigned int track_id, int
        return WEBRTC_ERROR_NONE;
 }
 
+int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id)
+{
+       webrtc_gst_slot_s *sink;
+
+       RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+       RET_VAL_IF(track_id == 0, WEBRTC_ERROR_INVALID_PARAMETER, "track id is 0");
+
+       sink = __find_sink_slot_by_id(webrtc, track_id);
+       RET_VAL_IF(sink == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "sink is NULL");
+       RET_VAL_IF(sink->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+       RET_VAL_IF(sink->encoded_frame_cb != NULL, WEBRTC_ERROR_INVALID_OPERATION, "it may be a forwarding sink for encoded frame callback");
+       RET_VAL_IF((sink->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
+
+       LOG_INFO("webrtc[%p] track_id[%u]", webrtc, track_id);
+
+       return _capture_video_frame(sink);
+}
+
 //LCOV_EXCL_STOP
diff --git a/src/webrtc_sink_snapshot.c b/src/webrtc_sink_snapshot.c
new file mode 100644 (file)
index 0000000..a169606
--- /dev/null
@@ -0,0 +1,340 @@
+/*
+ * Copyright (c) 2022 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "webrtc.h"
+#include "webrtc_private.h"
+#include <gst/video/video-info.h>
+#include <mm_error.h>
+#include <mm_util_type.h>
+#include <mm_util_image.h>
+#include <mm_util_imgp.h>
+
+typedef struct {
+       unsigned char *data;
+       size_t size;
+       unsigned int width;
+       unsigned int height;
+} webrtc_video_frame_s;
+
+//LCOV_EXCL_START
+static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement *videosink, void *probe_cb)
+{
+       GstPad *pad;
+       probe_userdata_s *probe_userdata;
+       g_autofree gchar *media_type = NULL;
+
+       RET_IF(sink == NULL, "sink is NULL");
+       RET_IF(videosink == NULL, "videosink is NULL");
+       RET_IF(probe_cb == NULL, "probe_cb is NULL");
+
+       pad = gst_element_get_static_pad(videosink, "sink");
+       media_type = _get_mime_type_from_pad(pad);
+
+       LOG_DEBUG("media_type[%s]", media_type);
+
+       probe_userdata = g_new0(probe_userdata_s, 1);
+       probe_userdata->slot = sink;
+       sink->snapshot.sink_pad = pad;
+       sink->snapshot.sink_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
+                       probe_cb, probe_userdata, g_free);
+
+       LOG_DEBUG("sink[id:%u] for [%s] pad[%p] probe[id:%lu, callback:%p]",
+               sink->id, "snapshot", pad, sink->snapshot.sink_pad_probe_id, probe_cb);
+}
+
+static void __remove_probe_from_pad_for_snapshot(webrtc_gst_slot_s *sink)
+{
+       RET_IF(sink == NULL, "sink is NULL");
+
+       if (sink->snapshot.sink_pad_probe_id == 0)
+               return;
+
+       LOG_DEBUG("sink[id:%u] for SNAPSHOT pad[%p] probe_id[%lu]",
+               sink->id, sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
+
+       gst_pad_remove_probe(sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
+       gst_object_unref(sink->snapshot.sink_pad);
+
+       sink->snapshot.sink_pad_probe_id = 0;
+       sink->snapshot.sink_pad = NULL;
+}
+
+static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad, mm_util_color_format_e *color_format)
+{
+       GstCaps *caps;
+       GstVideoFrame vframe;
+       GstVideoInfo vinfo;
+       MMVideoBuffer *vbuffer = NULL;
+       int i;
+       guint plane_size;
+
+       RET_VAL_IF(buffer == NULL, NULL, "buffer is NULL");
+       RET_VAL_IF(pad == NULL, NULL, "pad is NULL");
+       RET_VAL_IF(color_format == NULL, NULL, "color_format is NULL");
+
+       caps = gst_pad_get_current_caps(pad);
+       gst_video_info_from_caps(&vinfo, caps);
+       gst_caps_unref(caps);
+
+       RET_VAL_IF(!gst_video_frame_map(&vframe, &vinfo, buffer, GST_MAP_READ), NULL, "failed to gst_video_frame_map()");
+
+       LOG_INFO("format[%s], resolution[%dx%d], plane_num[%d]",
+               gst_video_format_to_string(GST_VIDEO_INFO_FORMAT(&vinfo)),
+               GST_VIDEO_INFO_WIDTH(&vinfo), GST_VIDEO_INFO_HEIGHT(&vinfo),
+               GST_VIDEO_FRAME_N_PLANES(&vframe));
+
+       vbuffer = (MMVideoBuffer *)g_malloc0(sizeof(MMVideoBuffer));
+       vbuffer->width[0] = GST_VIDEO_INFO_WIDTH(&vinfo);
+    vbuffer->height[0] = GST_VIDEO_INFO_HEIGHT(&vinfo);
+       vbuffer->plane_num = GST_VIDEO_FRAME_N_PLANES(&vframe);
+
+       switch (GST_VIDEO_INFO_FORMAT(&vinfo)) {
+       case GST_VIDEO_FORMAT_NV12:
+       case GST_VIDEO_FORMAT_SN12:
+               vbuffer->width[1] = vbuffer->width[0];
+               vbuffer->height[1] = vbuffer->height[0] >> 1;
+               *color_format = MM_UTIL_COLOR_NV12;
+               break;
+       case GST_VIDEO_FORMAT_I420:
+       case GST_VIDEO_FORMAT_S420:
+               vbuffer->width[1] = vbuffer->width[2] = vbuffer->width[0] >> 1;
+               vbuffer->height[1] = vbuffer->height[2] = vbuffer->height[0] >> 1;
+               *color_format = MM_UTIL_COLOR_I420;
+               break;
+       case GST_VIDEO_FORMAT_BGRx:
+               *color_format = MM_UTIL_COLOR_BGRX;
+               break;
+       default:
+               LOG_ERROR_IF_REACHED("type(%d)", GST_VIDEO_INFO_FORMAT(&vinfo));
+               g_free(vbuffer);
+               return NULL;
+       }
+
+       for (i = 0; i < vbuffer->plane_num; i++) {
+               vbuffer->stride_width[i] = GST_VIDEO_FRAME_PLANE_STRIDE(&vframe, i);
+               vbuffer->stride_height[i] = vbuffer->height[i];
+               plane_size = vbuffer->stride_width[i] * vbuffer->stride_height[i];
+               if (plane_size == 0) {
+                       LOG_ERROR("plane size is 0");
+                       gst_video_frame_unmap(&vframe);
+                       g_free(vbuffer);
+                       return NULL;
+               }
+               LOG_DEBUG("plane[%d, %p, stride_width:%d, stride_height:%d, size:%u]",
+                       i, GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), vbuffer->stride_width[i], vbuffer->stride_height[i], plane_size);
+               vbuffer->data[i] = g_malloc(plane_size);
+               memcpy(vbuffer->data[i], GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), plane_size);
+       }
+
+       gst_video_frame_unmap(&vframe);
+
+       return vbuffer;
+}
+
+static int __convert_colorspace(unsigned char *src_data, size_t src_size, int src_w, int src_h, mm_util_color_format_e src_fmt, mm_util_color_format_e dst_fmt, webrtc_video_frame_s *result)
+{
+       int ret;
+       mm_util_image_h src_image;
+       mm_util_image_h dst_image;
+
+       RET_VAL_IF(src_data == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_data is NULL");
+       RET_VAL_IF(result == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "result is NULL");
+
+       LOG_DEBUG_ENTER();
+
+       ret = mm_image_create_image(src_w, src_h, src_fmt, src_data, src_size, &src_image);
+       RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_create_image()");
+
+       ret = mm_util_convert_colorspace(src_image, dst_fmt, &dst_image);
+       mm_image_destroy_image(src_image);
+       RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_util_convert_colorspace()");
+
+       mm_image_debug_image(dst_image, NULL);
+
+       ret = mm_image_get_image(dst_image, &result->width, &result->height, NULL, &result->data, &result->size);
+       mm_image_destroy_image(dst_image);
+       RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_get_image()");
+
+       LOG_DEBUG("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, fmt:%d]",
+               src_data, src_size, src_w, src_h, src_fmt,
+               result->data, result->size, result->width, result->height, dst_fmt);
+
+       return WEBRTC_ERROR_NONE;
+}
+
+static int __get_src_size(MMVideoBuffer *vbuffer, mm_util_color_format_e color_format)
+{
+       RET_VAL_IF(vbuffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "vbuffer is NULL");
+
+       switch (color_format) {
+       case MM_UTIL_COLOR_NV12:
+               return (vbuffer->stride_width[0] * vbuffer->stride_height[0]) +
+                       (vbuffer->stride_width[1] * vbuffer->stride_height[1]);
+       case MM_UTIL_COLOR_I420:
+               return (vbuffer->stride_width[0] * vbuffer->stride_height[0]) +
+                       ((vbuffer->stride_width[1] * vbuffer->stride_height[1]) << 1);
+       case MM_UTIL_COLOR_BGRX:
+               return vbuffer->stride_width[0] * vbuffer->stride_height[0];
+       default:
+               LOG_ERROR_IF_REACHED("color_format(%d)", color_format);
+               return 0;
+       }
+}
+
+static int __change_colorspace_to_rgb24(MMVideoBuffer *vbuffer, mm_util_color_format_e color_format, webrtc_video_frame_s *result_video_frame)
+{
+       guint src_size;
+       unsigned char *dst_buffer;
+       unsigned char *tmp_dst;
+       unsigned char *tmp_src;
+       int i;
+
+       RET_VAL_IF(vbuffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "vbuffer is NULL");
+       RET_VAL_IF(result_video_frame == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "result_video_frame is NULL");
+       RET_VAL_IF((src_size = __get_src_size(vbuffer, color_format)) == 0, WEBRTC_ERROR_INVALID_OPERATION, "src_size is 0");
+
+       dst_buffer = (unsigned char *)g_malloc(src_size);
+
+       switch (color_format) {
+       case MM_UTIL_COLOR_NV12:
+               tmp_dst = dst_buffer;
+
+               /* Y plane */
+               tmp_src = vbuffer->data[0];
+               for (i = 0; i < vbuffer->height[0]; i++) {
+                       memcpy(tmp_dst, tmp_src, vbuffer->width[0]);
+                       tmp_dst += vbuffer->width[0];
+                       tmp_src += vbuffer->stride_width[0];
+               }
+               /* UV plane*/
+               tmp_src = vbuffer->data[1];
+               for (i = 0; i < vbuffer->height[1]; i++) {
+                       memcpy(tmp_dst, tmp_src, vbuffer->width[1]);
+                       tmp_dst += vbuffer->width[1];
+                       tmp_src += vbuffer->stride_width[1];
+               }
+               break;
+       case MM_UTIL_COLOR_I420:
+               tmp_dst = dst_buffer;
+
+               /* Y plane */
+               tmp_src = vbuffer->data[0];
+               for (i = 0; i < vbuffer->height[0]; i++) {
+                       memcpy(tmp_dst, tmp_src, vbuffer->width[0]);
+                       tmp_dst += vbuffer->width[0];
+                       tmp_src += vbuffer->stride_width[0];
+               }
+               /* U plane */
+               tmp_src = vbuffer->data[1];
+               for (i = 0; i < vbuffer->height[1]; i++) {
+                       memcpy(tmp_dst, tmp_src, vbuffer->width[1]);
+                       tmp_dst += vbuffer->width[1];
+                       tmp_src += vbuffer->stride_width[1];
+               }
+               /* V plane */
+               tmp_src = vbuffer->data[2];
+               for (i = 0; i < vbuffer->height[2]; i++) {
+                       memcpy(tmp_dst, tmp_src, vbuffer->width[2]);
+                       tmp_dst += vbuffer->width[2];
+                       tmp_src += vbuffer->stride_width[2];
+               }
+               break;
+       case MM_UTIL_COLOR_BGRX:
+               memcpy(dst_buffer, vbuffer->data[0], src_size);
+               break;
+       default:
+               LOG_ERROR_IF_REACHED("color_format(%d)", color_format);
+               g_free(dst_buffer);
+               return WEBRTC_ERROR_INVALID_PARAMETER;
+       }
+
+       return __convert_colorspace(dst_buffer, src_size, vbuffer->width[0], vbuffer->height[0], color_format,
+               MM_UTIL_COLOR_RGB24, result_video_frame);
+}
+
+#ifdef DUMP_CONVERTED_RESULT
+static void __dump_converted_result(webrtc_video_frame_s *result)
+{
+       g_autofree gchar *file_path = NULL;
+       g_autoptr(GError) error = NULL;
+       static unsigned int count = 1;
+
+       RET_IF(result == NULL, "result is NULL");
+
+       LOG_DEBUG_ENTER();
+
+       if (!result->data)
+               return;
+
+       file_path = g_strdup_printf("/tmp/rgb24_%ux%u_%u.dump", result->width, result->height, count++);
+
+       if (!g_file_set_contents(file_path, (char *)result->data, result->size, &error)) {
+               LOG_ERROR("failed to g_file_set_contents() for %s, error:%s", file_path, error->message);
+               return;
+       }
+
+       LOG_DEBUG("converted rgb24 frame is dumped to [%s]", file_path);
+}
+#endif
+
+static GstPadProbeReturn __snapshot_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
+{
+       probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
+       MMVideoBuffer *vbuffer;
+       mm_util_color_format_e color_format;
+       webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+
+       RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
+
+       if (!(vbuffer = __get_video_frame_raw_data(gst_pad_probe_info_get_buffer(info), pad, &color_format)))
+               goto out;
+
+       if (__change_colorspace_to_rgb24(vbuffer, color_format, &result_frame) != WEBRTC_ERROR_NONE)
+               goto out;
+
+       /* TODO: Append data to user callback */
+       /* TODO: Make a thread to process the conversion */
+
+#ifdef DUMP_CONVERTED_RESULT
+       __dump_converted_result(&result_frame);
+#endif
+out:
+       __remove_probe_from_pad_for_snapshot(probe_data->slot);
+       if (vbuffer) {
+               int i;
+               for (i = 0; i < vbuffer->plane_num; i++)
+                       g_free(vbuffer->data[i]);
+               g_free(vbuffer);
+       }
+       if (result_frame.data)
+               g_free(result_frame.data);
+
+       return GST_PAD_PROBE_OK;
+}
+
+int _capture_video_frame(webrtc_gst_slot_s *sink)
+{
+       GstElement *videosink;
+
+       RET_VAL_IF(sink == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "sink is NULL");
+       RET_VAL_IF(sink->bin == NULL, WEBRTC_ERROR_INVALID_OPERATION, "bin is NULL");
+       RET_VAL_IF(!(videosink = gst_bin_get_by_name(sink->bin, ELEMENT_NAME_VIDEO_SINK)), WEBRTC_ERROR_INVALID_OPERATION, "could not find video sink element");
+
+       __add_probe_to_pad_for_snapshot(sink, videosink, __snapshot_probe_cb);
+
+       return WEBRTC_ERROR_NONE;
+}
+//LCOV_EXCL_STOP
\ No newline at end of file