webrtc_sink_snapshot: Adopt a thread for converting video format 05/285105/2
authorSangchul Lee <sc11.lee@samsung.com>
Tue, 6 Dec 2022 06:54:28 +0000 (15:54 +0900)
committerSangchul Lee <sc11.lee@samsung.com>
Wed, 7 Dec 2022 07:21:59 +0000 (16:21 +0900)
[Version] 0.3.269
[Issue Type] Improvement

Change-Id: If60e49be6aa87f0011fd3ab4ae10bb10a03a150c
Signed-off-by: Sangchul Lee <sc11.lee@samsung.com>
include/webrtc_private.h
packaging/capi-media-webrtc.spec
src/webrtc.c
src/webrtc_sink.c
src/webrtc_sink_snapshot.c

index 2994a1b352ccb79b69a1fda3f1320421375d4570..83af3588b10a83b0aba144bb087e84d583ed89e9 100644 (file)
@@ -508,6 +508,11 @@ typedef struct _webrtc_s {
        GHashTable *data_channels;
        GHashTable *track_build_contexts;
 
+       struct {
+               GThread *thread;
+               GAsyncQueue *queue;
+       } snapshot;
+
        guint idle_cb_event_source_ids[IDLE_CB_TYPE_NUM];
 
        webrtc_callbacks_s error_cb;
@@ -599,6 +604,7 @@ typedef struct _webrtc_gst_slot_s {
        struct {
                GstPad *sink_pad;
                gulong sink_pad_probe_id;
+               GMutex mutex;
        } snapshot;
 
        struct {
@@ -778,6 +784,8 @@ void _add_probe_to_pad_for_dump(webrtc_gst_slot_s *sink, GstElement *element, vo
 void _remove_probe_from_pad_for_dump(webrtc_gst_slot_s *sink);
 
 /* sink capture video frame */
+int _init_convert_thread(webrtc_s *webrtc);
+void _deinit_convert_thread(webrtc_s *webrtc);
 int _capture_video_frame_from_sink(webrtc_s *webrtc, unsigned int track_id);
 int _capture_video_frame(webrtc_gst_slot_s *sink);
 
index 8180e4acddceb48e8224cc6f506d015cb0b3a273..bdf5cd7837fa90975ef4eba60681c738ae4c17a9 100644 (file)
@@ -1,6 +1,6 @@
 Name:       capi-media-webrtc
 Summary:    A WebRTC library in Tizen Native API
-Version:    0.3.268
+Version:    0.3.269
 Release:    0
 Group:      Multimedia/API
 License:    Apache-2.0
index e971627aa578f10d2ed8164626090ed24f24dfd4..73e06de23128f0d6365b44681779b2bc9a13064a 100644 (file)
@@ -110,6 +110,9 @@ int webrtc_create(webrtc_h *webrtc)
 #endif
        }
 
+       if ((ret = _init_convert_thread(_webrtc)) != WEBRTC_ERROR_NONE)
+               goto error;
+
        if ((ret = _gst_init(_webrtc)) != WEBRTC_ERROR_NONE)
                goto error;
 
@@ -167,6 +170,8 @@ int webrtc_destroy(webrtc_h webrtc)
        g_clear_pointer(&d_locker, g_mutex_locker_free);
        _gst_destroy_pipeline(_webrtc);
 
+       _deinit_convert_thread(_webrtc);
+
        _unload_ini(_webrtc);
 
 #if !defined(TIZEN_TV) && defined(TIZEN_FEATURE_UI)
index 511ad665394684ed6af76d7ca46e908c24e7b1ed..6eb718dd6d8e4e7a2a61631eaab2408a1e7fddae 100644 (file)
@@ -628,6 +628,8 @@ void _sink_slot_destroy_cb(gpointer data)
 
        _remove_probe_from_pad_for_dump(sink);
 
+       g_mutex_clear(&sink->snapshot.mutex);
+
        g_free(sink);
 }
 
@@ -649,6 +651,7 @@ static int __alloc_sink_slot(webrtc_s *webrtc, const gchar *pad_name, bool forwa
        (*sink)->id = id;
        (*sink)->bin = GST_BIN(gst_bin_new(*name));
        (*sink)->webrtc = webrtc;
+       g_mutex_init(&((*sink)->snapshot.mutex));
 
        LOG_DEBUG("webrtc[%p] sink[%p, id:%u, name:%s]", webrtc, *sink, id, *name);
 
index a16960643781cab267ab75fe1619a544d1be7efc..918f0cf3edee64bedcd43a4248ba1bd35897f6cc 100644 (file)
@@ -29,6 +29,12 @@ typedef struct {
        unsigned int height;
 } webrtc_video_frame_s;
 
+typedef struct {
+       mm_util_color_format_e color_format;
+       MMVideoBuffer *vbuffer;
+       bool exit;
+} queue_data_s;
+
 //LCOV_EXCL_START
 static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement *videosink, void *probe_cb)
 {
@@ -40,6 +46,8 @@ static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement
        RET_IF(videosink == NULL, "videosink is NULL");
        RET_IF(probe_cb == NULL, "probe_cb is NULL");
 
+       g_mutex_lock(&sink->snapshot.mutex); /* unlock in __remove_probe_from_pad_for_snapshot() */
+
        pad = gst_element_get_static_pad(videosink, "sink");
        media_type = _get_mime_type_from_pad(pad);
 
@@ -51,18 +59,15 @@ static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement
        sink->snapshot.sink_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
                        probe_cb, probe_userdata, g_free);
 
-       LOG_DEBUG("sink[id:%u] for [%s] pad[%p] probe[id:%lu, callback:%p]",
-               sink->id, "snapshot", pad, sink->snapshot.sink_pad_probe_id, probe_cb);
+       LOG_DEBUG("sink[id:%u], SNAPSHOT pad[%p] probe[id:%lu, callback:%p]",
+               sink->id, pad, sink->snapshot.sink_pad_probe_id, probe_cb);
 }
 
 static void __remove_probe_from_pad_for_snapshot(webrtc_gst_slot_s *sink)
 {
        RET_IF(sink == NULL, "sink is NULL");
 
-       if (sink->snapshot.sink_pad_probe_id == 0)
-               return;
-
-       LOG_DEBUG("sink[id:%u] for SNAPSHOT pad[%p] probe_id[%lu]",
+       LOG_DEBUG("sink[id:%u], SNAPSHOT pad[%p] probe_id[%lu]",
                sink->id, sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
 
        gst_pad_remove_probe(sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
@@ -70,6 +75,8 @@ static void __remove_probe_from_pad_for_snapshot(webrtc_gst_slot_s *sink)
 
        sink->snapshot.sink_pad_probe_id = 0;
        sink->snapshot.sink_pad = NULL;
+
+       g_mutex_unlock(&sink->snapshot.mutex);
 }
 
 static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad, mm_util_color_format_e *color_format)
@@ -96,7 +103,7 @@ static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad,
                GST_VIDEO_INFO_WIDTH(&vinfo), GST_VIDEO_INFO_HEIGHT(&vinfo),
                GST_VIDEO_FRAME_N_PLANES(&vframe));
 
-       vbuffer = (MMVideoBuffer *)g_malloc0(sizeof(MMVideoBuffer));
+       vbuffer = g_new0(MMVideoBuffer, 1);
        vbuffer->width[0] = GST_VIDEO_INFO_WIDTH(&vinfo);
     vbuffer->height[0] = GST_VIDEO_INFO_HEIGHT(&vinfo);
        vbuffer->plane_num = GST_VIDEO_FRAME_N_PLANES(&vframe);
@@ -133,8 +140,8 @@ static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad,
                        g_free(vbuffer);
                        return NULL;
                }
-               LOG_DEBUG("plane[%d, %p, stride_width:%d, stride_height:%d, size:%u]",
-                       i, GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), vbuffer->stride_width[i], vbuffer->stride_height[i], plane_size);
+               LOG_DEBUG("vbuffer[%p] plane[%d, %p, stride_width:%d, stride_height:%d, size:%u]",
+                       vbuffer, i, GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), vbuffer->stride_width[i], vbuffer->stride_height[i], plane_size);
                vbuffer->data[i] = g_malloc(plane_size);
                memcpy(vbuffer->data[i], GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), plane_size);
        }
@@ -168,7 +175,7 @@ static int __convert_colorspace(unsigned char *src_data, size_t src_size, int sr
        mm_image_destroy_image(dst_image);
        RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_get_image()");
 
-       LOG_DEBUG("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, fmt:%d]",
+       LOG_INFO("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, fmt:%d]",
                src_data, src_size, src_w, src_h, src_fmt,
                result->data, result->size, result->width, result->height, dst_fmt);
 
@@ -279,7 +286,7 @@ static void __dump_converted_result(webrtc_video_frame_s *result)
        if (!result->data)
                return;
 
-       file_path = g_strdup_printf("/tmp/rgb24_%ux%u_%u.dump", result->width, result->height, count++);
+       file_path = g_strdup_printf("/tmp/snapshot_%03u_%ux%u_RGB24.dump", count++, result->width, result->height);
 
        if (!g_file_set_contents(file_path, (char *)result->data, result->size, &error)) {
                LOG_ERROR("failed to g_file_set_contents() for %s, error:%s", file_path, error->message);
@@ -290,39 +297,103 @@ static void __dump_converted_result(webrtc_video_frame_s *result)
 }
 #endif
 
+static void __release_queue_data(gpointer data)
+{
+       queue_data_s *qd = (queue_data_s *)data;
+       int i;
+
+       for (i = 0; qd->vbuffer && i < qd->vbuffer->plane_num; i++)
+               g_free(qd->vbuffer->data[i]);
+
+       LOG_DEBUG("release qd[%p, vbuffer:%p, exit:%d] done", qd, qd->vbuffer, qd->exit);
+
+       g_free(qd->vbuffer);
+       g_free(qd);
+}
+
 static GstPadProbeReturn __snapshot_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
 {
        probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
        MMVideoBuffer *vbuffer;
        mm_util_color_format_e color_format;
-       webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+       queue_data_s *qd;
 
        RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
 
        if (!(vbuffer = __get_video_frame_raw_data(gst_pad_probe_info_get_buffer(info), pad, &color_format)))
                goto out;
 
-       if (__change_colorspace_to_rgb24(vbuffer, color_format, &result_frame) != WEBRTC_ERROR_NONE)
-               goto out;
+       qd = g_new0(queue_data_s, 1);
+       qd->color_format = color_format;
+       qd->vbuffer = vbuffer;
+       g_async_queue_push(probe_data->slot->webrtc->snapshot.queue, qd);
 
-       /* TODO: Append data to user callback */
-       /* TODO: Make a thread to process the conversion */
+out:
+       __remove_probe_from_pad_for_snapshot(probe_data->slot);
+       return GST_PAD_PROBE_OK;
+}
+
+static gpointer __convert_thread(gpointer data)
+{
+       webrtc_s *webrtc = (webrtc_s *)data;
+
+       while (1) {
+               webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+               queue_data_s *qd;
+
+               LOG_DEBUG("wait for data...");
+               qd = g_async_queue_pop(webrtc->snapshot.queue);
+               LOG_INFO("process qd[%p, vbuffer:%p, exit:%d]", qd, qd->vbuffer, qd->exit);
+               if (qd->exit) {
+                       __release_queue_data(qd);
+                       break;
+               }
 
+               /* convert and dump */
+               if (__change_colorspace_to_rgb24(qd->vbuffer, qd->color_format, &result_frame) == WEBRTC_ERROR_NONE) {
 #ifdef DUMP_CONVERTED_RESULT
-       __dump_converted_result(&result_frame);
+                       __dump_converted_result(&result_frame);
 #endif
-out:
-       __remove_probe_from_pad_for_snapshot(probe_data->slot);
-       if (vbuffer) {
-               int i;
-               for (i = 0; i < vbuffer->plane_num; i++)
-                       g_free(vbuffer->data[i]);
-               g_free(vbuffer);
-       }
-       if (result_frame.data)
+                       /* TODO: Append data to user callback */
+               }
+
+               __release_queue_data(qd);
                g_free(result_frame.data);
+       }
 
-       return GST_PAD_PROBE_OK;
+       LOG_DEBUG("exit");
+       return NULL;
+}
+
+int _init_convert_thread(webrtc_s *webrtc)
+{
+       RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+
+       if (!(webrtc->snapshot.thread = g_thread_try_new("convert_thread", __convert_thread, (gpointer)webrtc, NULL))) {
+               LOG_ERROR("failed to g_thread_try_new()");
+               return WEBRTC_ERROR_INVALID_OPERATION;
+       }
+
+       webrtc->snapshot.queue = g_async_queue_new_full(__release_queue_data);
+
+       return WEBRTC_ERROR_NONE;
+}
+
+void _deinit_convert_thread(webrtc_s *webrtc)
+{
+       queue_data_s *qd;
+
+       RET_IF(webrtc == NULL, "webrtc is NULL");
+
+       qd = g_new0(queue_data_s, 1);
+       qd->exit = true;
+       g_async_queue_push_front(webrtc->snapshot.queue, qd);
+
+       LOG_DEBUG("waiting for thread join...");
+       g_thread_join(webrtc->snapshot.thread);
+       LOG_DEBUG("convert thread exits");
+
+       g_async_queue_unref(webrtc->snapshot.queue);
 }
 
 int _capture_video_frame(webrtc_gst_slot_s *sink)