unsigned int height;
} webrtc_video_frame_s;
+typedef struct {
+ mm_util_color_format_e color_format;
+ MMVideoBuffer *vbuffer;
+ bool exit;
+} queue_data_s;
+
//LCOV_EXCL_START
static void __add_probe_to_pad_for_snapshot(webrtc_gst_slot_s *sink, GstElement *videosink, void *probe_cb)
{
RET_IF(videosink == NULL, "videosink is NULL");
RET_IF(probe_cb == NULL, "probe_cb is NULL");
+ g_mutex_lock(&sink->snapshot.mutex); /* unlock in __remove_probe_from_pad_for_snapshot() */
+
pad = gst_element_get_static_pad(videosink, "sink");
media_type = _get_mime_type_from_pad(pad);
sink->snapshot.sink_pad_probe_id = gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER,
probe_cb, probe_userdata, g_free);
- LOG_DEBUG("sink[id:%u] for [%s] pad[%p] probe[id:%lu, callback:%p]",
- sink->id, "snapshot", pad, sink->snapshot.sink_pad_probe_id, probe_cb);
+ LOG_DEBUG("sink[id:%u], SNAPSHOT pad[%p] probe[id:%lu, callback:%p]",
+ sink->id, pad, sink->snapshot.sink_pad_probe_id, probe_cb);
}
static void __remove_probe_from_pad_for_snapshot(webrtc_gst_slot_s *sink)
{
RET_IF(sink == NULL, "sink is NULL");
- if (sink->snapshot.sink_pad_probe_id == 0)
- return;
-
- LOG_DEBUG("sink[id:%u] for SNAPSHOT pad[%p] probe_id[%lu]",
+ LOG_DEBUG("sink[id:%u], SNAPSHOT pad[%p] probe_id[%lu]",
sink->id, sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
gst_pad_remove_probe(sink->snapshot.sink_pad, sink->snapshot.sink_pad_probe_id);
sink->snapshot.sink_pad_probe_id = 0;
sink->snapshot.sink_pad = NULL;
+
+ g_mutex_unlock(&sink->snapshot.mutex);
}
static MMVideoBuffer* __get_video_frame_raw_data(GstBuffer *buffer, GstPad *pad, mm_util_color_format_e *color_format)
GST_VIDEO_INFO_WIDTH(&vinfo), GST_VIDEO_INFO_HEIGHT(&vinfo),
GST_VIDEO_FRAME_N_PLANES(&vframe));
- vbuffer = (MMVideoBuffer *)g_malloc0(sizeof(MMVideoBuffer));
+ vbuffer = g_new0(MMVideoBuffer, 1);
vbuffer->width[0] = GST_VIDEO_INFO_WIDTH(&vinfo);
vbuffer->height[0] = GST_VIDEO_INFO_HEIGHT(&vinfo);
vbuffer->plane_num = GST_VIDEO_FRAME_N_PLANES(&vframe);
g_free(vbuffer);
return NULL;
}
- LOG_DEBUG("plane[%d, %p, stride_width:%d, stride_height:%d, size:%u]",
- i, GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), vbuffer->stride_width[i], vbuffer->stride_height[i], plane_size);
+ LOG_DEBUG("vbuffer[%p] plane[%d, %p, stride_width:%d, stride_height:%d, size:%u]",
+ vbuffer, i, GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), vbuffer->stride_width[i], vbuffer->stride_height[i], plane_size);
vbuffer->data[i] = g_malloc(plane_size);
memcpy(vbuffer->data[i], GST_VIDEO_FRAME_PLANE_DATA(&vframe, i), plane_size);
}
mm_image_destroy_image(dst_image);
RET_VAL_IF(ret != MM_ERROR_NONE, WEBRTC_ERROR_INVALID_OPERATION, "failed to mm_image_get_image()");
- LOG_DEBUG("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, fmt:%d]",
+ LOG_INFO("src[data:%p, size:%zu, %dx%d, fmt:%d] -> dst[data:%p, size:%zu, %ux%u, fmt:%d]",
src_data, src_size, src_w, src_h, src_fmt,
result->data, result->size, result->width, result->height, dst_fmt);
if (!result->data)
return;
- file_path = g_strdup_printf("/tmp/rgb24_%ux%u_%u.dump", result->width, result->height, count++);
+ file_path = g_strdup_printf("/tmp/snapshot_%03u_%ux%u_RGB24.dump", count++, result->width, result->height);
if (!g_file_set_contents(file_path, (char *)result->data, result->size, &error)) {
LOG_ERROR("failed to g_file_set_contents() for %s, error:%s", file_path, error->message);
}
#endif
+static void __release_queue_data(gpointer data)
+{
+ queue_data_s *qd = (queue_data_s *)data;
+ int i;
+
+ for (i = 0; qd->vbuffer && i < qd->vbuffer->plane_num; i++)
+ g_free(qd->vbuffer->data[i]);
+
+ LOG_DEBUG("release qd[%p, vbuffer:%p, exit:%d] done", qd, qd->vbuffer, qd->exit);
+
+ g_free(qd->vbuffer);
+ g_free(qd);
+}
+
static GstPadProbeReturn __snapshot_probe_cb(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
probe_userdata_s *probe_data = (probe_userdata_s *)user_data;
MMVideoBuffer *vbuffer;
mm_util_color_format_e color_format;
- webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+ queue_data_s *qd;
RET_VAL_IF(probe_data == NULL, GST_PAD_PROBE_REMOVE, "probe_data is NULL");
if (!(vbuffer = __get_video_frame_raw_data(gst_pad_probe_info_get_buffer(info), pad, &color_format)))
goto out;
- if (__change_colorspace_to_rgb24(vbuffer, color_format, &result_frame) != WEBRTC_ERROR_NONE)
- goto out;
+ qd = g_new0(queue_data_s, 1);
+ qd->color_format = color_format;
+ qd->vbuffer = vbuffer;
+ g_async_queue_push(probe_data->slot->webrtc->snapshot.queue, qd);
- /* TODO: Append data to user callback */
- /* TODO: Make a thread to process the conversion */
+out:
+ __remove_probe_from_pad_for_snapshot(probe_data->slot);
+ return GST_PAD_PROBE_OK;
+}
+
+static gpointer __convert_thread(gpointer data)
+{
+ webrtc_s *webrtc = (webrtc_s *)data;
+
+ while (1) {
+ webrtc_video_frame_s result_frame = { NULL, 0, 0, 0 };
+ queue_data_s *qd;
+
+ LOG_DEBUG("wait for data...");
+ qd = g_async_queue_pop(webrtc->snapshot.queue);
+ LOG_INFO("process qd[%p, vbuffer:%p, exit:%d]", qd, qd->vbuffer, qd->exit);
+ if (qd->exit) {
+ __release_queue_data(qd);
+ break;
+ }
+ /* convert and dump */
+ if (__change_colorspace_to_rgb24(qd->vbuffer, qd->color_format, &result_frame) == WEBRTC_ERROR_NONE) {
#ifdef DUMP_CONVERTED_RESULT
- __dump_converted_result(&result_frame);
+ __dump_converted_result(&result_frame);
#endif
-out:
- __remove_probe_from_pad_for_snapshot(probe_data->slot);
- if (vbuffer) {
- int i;
- for (i = 0; i < vbuffer->plane_num; i++)
- g_free(vbuffer->data[i]);
- g_free(vbuffer);
- }
- if (result_frame.data)
+ /* TODO: Append data to user callback */
+ }
+
+ __release_queue_data(qd);
g_free(result_frame.data);
+ }
- return GST_PAD_PROBE_OK;
+ LOG_DEBUG("exit");
+ return NULL;
+}
+
+int _init_convert_thread(webrtc_s *webrtc)
+{
+ RET_VAL_IF(webrtc == NULL, WEBRTC_ERROR_INVALID_PARAMETER, "webrtc is NULL");
+
+ if (!(webrtc->snapshot.thread = g_thread_try_new("convert_thread", __convert_thread, (gpointer)webrtc, NULL))) {
+ LOG_ERROR("failed to g_thread_try_new()");
+ return WEBRTC_ERROR_INVALID_OPERATION;
+ }
+
+ webrtc->snapshot.queue = g_async_queue_new_full(__release_queue_data);
+
+ return WEBRTC_ERROR_NONE;
+}
+
+void _deinit_convert_thread(webrtc_s *webrtc)
+{
+ queue_data_s *qd;
+
+ RET_IF(webrtc == NULL, "webrtc is NULL");
+
+ qd = g_new0(queue_data_s, 1);
+ qd->exit = true;
+ g_async_queue_push_front(webrtc->snapshot.queue, qd);
+
+ LOG_DEBUG("waiting for thread join...");
+ g_thread_join(webrtc->snapshot.thread);
+ LOG_DEBUG("convert thread exits");
+
+ g_async_queue_unref(webrtc->snapshot.queue);
}
int _capture_video_frame(webrtc_gst_slot_s *sink)