webrtc_sink_snapshot: Support JPEG format 16/285116/6
authorSangchul Lee <sc11.lee@samsung.com>
Tue, 6 Dec 2022 10:34:14 +0000 (19:34 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Thu, 8 Dec 2022 02:57:09 +0000 (11:57 +0900)
Location of locking mutex is moved to guard variables properly.

[Version] 0.3.270
[Issue Type] Add

Change-Id: Iad94e4309c72e6c603d7249ad3460a18feeae9cc
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
CMakeLists.txt
include/webrtc_private.h
packaging/capi-media-webrtc.spec
src/webrtc_sink.c
src/webrtc_sink_snapshot.c

index d7a68db8c662567c0a46b7e7fa5eabb21b504bdc..9bb8d7f954ba25170f33ddd936a1b165498630d6 100644 (file)
@@ -13,7 +13,7 @@ INCLUDE_DIRECTORIES(${INC_DIR})
 SET(dependents "dlog glib-2.0 gstreamer-1.0 gstreamer-webrtc-1.0 gstreamer-video-1.0 gstreamer-audio-1.0 \
                 gstreamer-allocators-1.0 libpulse json-glib-1.0 iniparser mm-common mm-display-interface capi-media-tool \
                 libtbm libwebsockets cynara-client libsmack capi-system-info libsoup-2.4 bundle capi-media-sound-manager \
-                mm-fileinfo mmutil-common mmutil-imgp")
+                mm-fileinfo mmutil-common mmutil-imgp mmutil-jpeg")
 IF(NOT TIZEN_PROFILE_TV)
     SET(dependents "${dependents} mm-resource-manager")
     IF(TIZEN_FEATURE_UI)
index 83af3588b10a83b0aba144bb087e84d583ed89e9..56099f09879192e0bd78ef0616d2f637108b3259 100644 (file)
@@ -538,6 +538,11 @@ typedef struct _webrtc_s {
 #endif
 } webrtc_s;
 
+typedef enum snapshot_format_e {
+       SNAPSHOT_FORMAT_RGB24,
+       SNAPSHOT_FORMAT_JPEG,
+} snapshot_format_e;
+
 /* FIXME: divide into two slot types or use union */
 typedef struct _webrtc_gst_slot_s {
        unsigned int id;
@@ -605,6 +610,8 @@ typedef struct _webrtc_gst_slot_s {
                GstPad *sink_pad;
                gulong sink_pad_probe_id;
                GMutex mutex;
+               snapshot_format_e target_format;
+               int quality;
        } snapshot;
 
        struct {
@@ -786,7 +793,7 @@ void _remove_probe_from_pad_for_dump(webrtc_gst_slot_s *sink);
 /* sink capture video frame */
 int _init_convert_thread(webrtc_s *webrtc);
 void _deinit_convert_thread(webrtc_s *webrtc);
-int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id);
+int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id, snapshot_format_e format, int quality);
 int _capture_video_frame(webrtc_gst_slot_s *sink);
 
 /* display */
index bdf5cd7837fa90975ef4eba60681c738ae4c17a9..5c33dab3cf1feb159de9c0e7112fd7bfd89f027c 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.3.269
+Version:    0.3.270
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
@@ -37,6 +37,7 @@ BuildRequires:  pkgconfig(bundle)
 BuildRequires:  pkgconfig(mm-fileinfo)
 BuildRequires:  pkgconfig(mmutil-common)
 BuildRequires:  pkgconfig(mmutil-imgp)
+BuildRequires:  pkgconfig(mmutil-jpeg)
 %if "%{tizen_profile_name}" != "tv"
 BuildRequires:  pkgconfig(mm-resource-manager)
 BuildRequires:  pkgconfig(capi-system-sensor)
index 6eb718dd6d8e4e7a2a61631eaab2408a1e7fddae..fa3cce4d39a580d56d4e96af56f0f06001295f8f 100644 (file)
@@ -1308,7 +1308,7 @@ int _get_video_resolution_from_sink(webrtc_s *webrtc, unsigned int track_id, int
        return WEBRTC_ERROR_NONE;
 }
 
-int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id)
+int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id, snapshot_format_e format, int quality)
 {
        webrtc_gst_slot_s *sink;
 
@@ -1321,7 +1321,12 @@ int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id)
        RET_VAL_IF(sink->encoded_frame_cb != NULL, WEBRTC_ERROR_INVALID_OPERATION, "it may be a forwarding sink for encoded frame callback");
        RET_VAL_IF((sink->media_types & MEDIA_TYPE_VIDEO) == 0x0, WEBRTC_ERROR_INVALID_OPERATION, "it's not a video track");
 
-       LOG_INFO("webrtc[%p] track_id[%u]", webrtc, track_id);
+       g_mutex_lock(&sink->snapshot.mutex); /* unlock in __remove_probe_from_pad_for_snapshot() */
+
+       sink->snapshot.target_format = format;
+       sink->snapshot.quality = quality;
+
+       LOG_INFO("webrtc[%p] track_id[%u] format[%d] quality[%d]", webrtc, track_id, format, quality);
 
        return _capture_video_frame(sink);
 }
index 918f0cf3edee64bedcd43a4248ba1bd35897f6cc..8bd794da96ad14cdb09b7099c6322b709712905f 100644 (file)
@@ -21,6 +21,7 @@
 #include <mm_util_type.h>
 #include <mm_util_image.h>
 #include <mm_util_imgp.h>
+#include <mm_util_jpeg.h>
 
 typedef struct {
        unsigned char *data;
@@ -33,8 +34,15 @@ typedef struct {
        mm_util_color_format_e color_format;
        MMVideoBuffer *vbuffer;
        bool exit;
+       snapshot_format_e target_format;
+       int quality;
 } queue_data_s;
 
+static const char * __format_str[] = {
+       [SNAPSHOT_FORMAT_RGB24] = "RGB24",
+       [SNAPSHOT_FORMAT_JPEG] = "JPEG",
+};
+
 //LCOV_EXCL_START
 static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement *videosink, void *probe_cb)
 {
@@ -46,8 +54,6 @@ static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement
        RET_IF(videosink == NULL, "videosink is NULL");
        RET_IF(probe_cb == NULL, "probe_cb is NULL");
 
-       g_mutex_lock(&sink->snapshot.mutex); /* unlock in __remove_probe_from_pad_for_snapshot() */
-
        pad = gst_element_get_static_pad(videosink, "sink");
        media_type = _get_mime_type_from_pad(pad);
 
@@ -151,7 +157,7 @@ static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad,
        return vbuffer;
 }
 
-static int __convert_colorspace(unsigned char *src_data, size_t src_size, int src_w, int src_h, mm_util_color_format_e src_fmt, mm_util_color_format_e dst_fmt, webrtc_video_frame_s *result)
+static int __mm_image_convert_colorspace(unsigned char *src_data, size_t src_size, int src_w, int src_h, mm_util_color_format_e src_fmt, mm_util_color_format_e dst_fmt, webrtc_video_frame_s *result)
 {
        int ret;
        mm_util_image_h src_image;
@@ -182,6 +188,40 @@ static int __convert_colorspace(unsigned char *src_data, size_t src_size, int sr
        return WEBRTC_ERROR_NONE;
 }
 
+static int __mm_image_encode(unsigned char *src_data, size_t src_size, int src_w, int src_h, mm_util_color_format_e src_fmt, mm_util_img_codec_type codec, int quality, webrtc_video_frame_s *result)
+{
+       int ret;
+       mm_util_image_h src_image;
+
+       RET_VAL_IF(src_data == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "src_data is NULL");
+       RET_VAL_IF(result == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "result is NULL");
+
+       LOG_DEBUG_ENTER();
+
+       ret = mm_image_create_image(src_w, src_h, src_fmt, src_data, src_size, &src_image);
+       RET_VAL_IF(ret != MM_UTIL_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_create_image()");
+
+       result->width = src_w;
+       result->height = src_h;
+
+       switch (codec) {
+       case IMG_CODEC_JPEG:
+               ret = mm_util_encode_to_jpeg_memory(src_image, quality, (void **)&result->data, &result->size);
+               mm_image_destroy_image(src_image);
+               RET_VAL_IF(ret != MM_UTIL_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_util_encode_to_jpeg_memory()");
+
+               LOG_INFO("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, codec:JPEG, quality:%d]",
+                       src_data, src_size, src_w, src_h, src_fmt,
+                       result->data, result->size, result->width, result->height, quality);
+               return WEBRTC_ERROR_NONE;
+
+       default:
+               LOG_ERROR_IF_REACHED("codec(%d)", codec);
+               mm_image_destroy_image(src_image);
+               return WEBRTC_ERROR_INVALID_PARAMETER;
+       }
+}
+
 static int __get_src_size(MMVideoBuffer *vbuffer, mm_util_color_format_e color_format)
 {
        RET_VAL_IF(vbuffer == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "vbuffer is NULL");
@@ -268,32 +308,49 @@ static int __change_colorspace_to_rgb24(MMVideoBuffer *vbuffer, mm_util_color_fo
                return WEBRTC_ERROR_INVALID_PARAMETER;
        }
 
-       return __convert_colorspace(dst_buffer, src_size, vbuffer->width[0], vbuffer->height[0], color_format,
+       return __mm_image_convert_colorspace(dst_buffer, src_size, vbuffer->width[0], vbuffer->height[0], color_format,
                MM_UTIL_COLOR_RGB24, result_video_frame);
 }
 
 #ifdef DUMP_CONVERTED_RESULT
-static void __dump_converted_result(webrtc_video_frame_s *result)
+static void __dump_rgb24_result(webrtc_video_frame_s *result, unsigned int idx)
 {
        g_autofree gchar *file_path = NULL;
        g_autoptr(GError) error = NULL;
-       static unsigned int count = 1;
 
        RET_IF(result == NULL, "result is NULL");
 
-       LOG_DEBUG_ENTER();
+       if (!result->data)
+               return;
+
+       file_path = g_strdup_printf("/tmp/snapshot_%03u_%ux%u_RGB24.dump", idx, result->width, result->height);
+
+       if (!g_file_set_contents(file_path, (char *)result->data, result->size, &error)) {
+               LOG_ERROR("failed to g_file_set_contents() for %s, error:%s", file_path, error->message);
+               return;
+       }
+
+       LOG_WARNING("converted rgb24 frame is dumped to [%s]", file_path);
+}
+
+static void __dump_encoded_result(webrtc_video_frame_s *result, snapshot_format_e target_format, unsigned int idx)
+{
+       g_autofree gchar *file_path = NULL;
+       g_autoptr(GError) error = NULL;
+
+       RET_IF(result == NULL, "result is NULL");
 
        if (!result->data)
                return;
 
-       file_path = g_strdup_printf("/tmp/snapshot_%03u_%ux%u_RGB24.dump", count++, result->width, result->height);
+       file_path = g_strdup_printf("/tmp/snapshot_%03u_%ux%u_%s.dump", idx, result->width, result->height, __format_str[target_format]);
 
        if (!g_file_set_contents(file_path, (char *)result->data, result->size, &error)) {
                LOG_ERROR("failed to g_file_set_contents() for %s, error:%s", file_path, error->message);
                return;
        }
 
-       LOG_DEBUG("converted rgb24 frame is dumped to [%s]", file_path);
+       LOG_WARNING("encoded %s frame is dumped to [%s]", __format_str[target_format], file_path);
 }
 #endif
 
@@ -326,6 +383,8 @@ static GstPadProbeReturn __snapshot_probe_cb(GstPad *pad, GstPadProbeInfo *info,
        qd = g_new0(queue_data_s, 1);
        qd->color_format = color_format;
        qd->vbuffer = vbuffer;
+       qd->target_format = probe_data->slot->snapshot.target_format;
+       qd->quality = probe_data->slot->snapshot.quality;
        g_async_queue_push(probe_data->slot->webrtc->snapshot.queue, qd);
 
 out:
@@ -336,29 +395,52 @@ out:
 static gpointer __convert_thread(gpointer data)
 {
        webrtc_s *webrtc = (webrtc_s *)data;
+#ifdef DUMP_CONVERTED_RESULT
+       unsigned int idx = 0;
+#endif
 
        while (1) {
-               webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+               webrtc_video_frame_s rgb24_frame = { NULL, 0, 0, 0 };
+               webrtc_video_frame_s encoded_frame = { NULL, 0, 0, 0 };
                queue_data_s *qd;
 
                LOG_DEBUG("wait for data...");
                qd = g_async_queue_pop(webrtc->snapshot.queue);
-               LOG_INFO("process qd[%p, vbuffer:%p, exit:%d]", qd, qd->vbuffer, qd->exit);
+               LOG_INFO("process qd[%p, vbuffer:%p, target_format:%s, exit:%d]",
+                       qd, qd->vbuffer, __format_str[qd->target_format], qd->exit);
                if (qd->exit) {
                        __release_queue_data(qd);
                        break;
                }
 
-               /* convert and dump */
-               if (__change_colorspace_to_rgb24(qd->vbuffer, qd->color_format, &result_frame) == WEBRTC_ERROR_NONE) {
+               /* FIXME: skip this conversion if encoder supports other input formats */
+               if (__change_colorspace_to_rgb24(qd->vbuffer, qd->color_format, &rgb24_frame) == WEBRTC_ERROR_NONE) {
+                       int ret = WEBRTC_ERROR_NONE;
+
+                       /* check target format and encode it if needed */
+                       switch (qd->target_format) {
+                       case SNAPSHOT_FORMAT_RGB24:
+                               break; /* skip encoding */
+                       case SNAPSHOT_FORMAT_JPEG:
+                               ret = __mm_image_encode(rgb24_frame.data, rgb24_frame.size, rgb24_frame.width, rgb24_frame.height, MM_UTIL_COLOR_RGB24,
+                                       IMG_CODEC_JPEG, qd->quality, &encoded_frame);
+                               if (ret != WEBRTC_ERROR_NONE)
+                                       LOG_ERROR("failed to __mm_image_encode() for %s", __format_str[qd->target_format]);
+                               break;
+                       default:
+                               LOG_ERROR_IF_REACHED("target_format(%d)", qd->target_format);
+                               break;
+                       }
 #ifdef DUMP_CONVERTED_RESULT
-                       __dump_converted_result(&result_frame);
+                       __dump_rgb24_result(&rgb24_frame, ++idx);
+                       __dump_encoded_result(&encoded_frame, qd->target_format, idx);
 #endif
                        /* TODO: Append data to user callback */
                }
 
                __release_queue_data(qd);
-               g_free(result_frame.data);
+               g_free(rgb24_frame.data);
+               g_free(encoded_frame.data);
        }
 
        LOG_DEBUG("exit");