--- /dev/null
+/*
+ * vision_source_v4l2.c
+ *
+ * Copyright (c) 2022 - 2023 Samsung Electronics Co., Ltd. All rights reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ * Most of code is from hal_camera_v4l2
+ */
+
+#include <algorithm>
+#include <atomic>
+#include <cstdint>
+#include <string>
+#include <thread>
+#include <vector>
+
+#include <dlog.h>
+#include <fcntl.h>
+#include <glob.h>
+#include <linux/videodev2.h>
+#include <media_packet.h>
+#include <stdlib.h>
+#include <sys/mman.h>
+#include <vision_source_interface.h>
+
+#ifdef LOG_TAG
+#undef LOG_TAG
+#endif
+
+#define LOG_TAG "VISION_SOURCE_V4L2"
+#ifdef HAVE_LIBV4L2
+#include <libv4l2.h>
+#else
+#include <sys/ioctl.h>
+#include <unistd.h>
+
+#define v4l2_fd_open(fd, flags) (fd)
+#define v4l2_close close
+#define v4l2_dup dup
+#define v4l2_ioctl ioctl
+#define v4l2_read read
+#define v4l2_mmap mmap
+#define v4l2_munmap munmap
+#endif /* ENABLE_LIBV4L2 */
+
+#define VISION_SOURCE_CHECK_CONDITION(condition, error, msg) \
+ do { \
+ if (!(condition)) { \
+ LOGE("[%s] %s(0x%08x)", __FUNCTION__, msg, error); \
+ return error; \
+ } \
+ } while (0)
+
+#define VISION_SOURCE_NULL_ARG_CHECK(arg) \
+ VISION_SOURCE_CHECK_CONDITION(arg != NULL, VISION_SOURCE_ERROR_INVALID_PARAMETER, \
+ "VISION_SOURCE_ERROR_INVALID_PARAMETER")
+
+#define VISION_SOURCE_INITIAL_FD -1
+#define BUFFER_MAX 4
+
+#define DEVICE_NODE_PATH_PREFIX "/dev/video"
+#define FOURCC_FORMAT "%c%c%c%c"
+#define FOURCC_CONVERT(fourcc) fourcc & 0xff, (fourcc >> 8) & 0xff, (fourcc >> 16) & 0xff, (fourcc >> 24) & 0xff
+
+#ifdef USE_DL
+#define vision_source_init vision_source_v4l2_init
+#define vision_source_exit vision_source_v4l2_exit
+#define vision_source_list_devices vision_source_v4l2_list_devices
+#define vision_source_list_device_caps vision_source_v4l2_list_device_caps
+#define vision_source_open_device vision_source_v4l2_open_device
+#define vision_source_close_device vision_source_v4l2_close_device
+#define vision_source_set_stream_format vision_source_v4l2_set_stream_format
+#define vision_source_start_stream vision_source_v4l2_start_stream
+#define vision_source_stop_stream vision_source_v4l2_stop_stream
+#define vision_source_set_stream_cb vision_source_v4l2_set_stream_cb
+#else
+#include <vision_source.h>
+#endif
+
+using namespace std;
+
+struct fmt_info {
+ media_format_mimetype_e type;
+ uint32_t width;
+ uint32_t height;
+ uint32_t fps;
+
+ bool operator==(const fmt_info &rhs)
+ {
+ return (type == rhs.type) && (width == rhs.width) && (height == rhs.height) && (fps == rhs.fps);
+ }
+};
+
+struct pkt_dispose_cb_data {
+ int fd;
+ int index;
+};
+
+struct vision_source_v4l2_s {
+ /* device */
+ vision_source_device_info_s *dev_info {};
+ media_format_h **fmt {};
+ vector<string> dev_name {};
+ vector<vector<fmt_info> > fmt_list {};
+
+ /* current user setting */
+ int device_index = VISION_SOURCE_INITIAL_FD;
+ int device_fd = VISION_SOURCE_INITIAL_FD;
+ int fmt_index = VISION_SOURCE_INITIAL_FD;
+ vision_source_status_e status;
+
+ /* buffer */
+ vector<v4l2_buffer> v4l2_buf {};
+ vector<void *> mmap_addr {};
+ struct pkt_dispose_cb_data pkt_data[BUFFER_MAX];
+
+ /* thead */
+ thread buffer_thread;
+ atomic_bool buffer_thread_run {};
+ stream_cb stream_callback {};
+ void *stream_user_data {};
+};
+
+static int __vision_source_get_format(uint32_t fourcc, int *pixel_format)
+{
+ switch (fourcc) {
+ case V4L2_PIX_FMT_RGB24:
+ *pixel_format = MEDIA_FORMAT_RGB888;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ *pixel_format = MEDIA_FORMAT_YUYV;
+ break;
+ case V4L2_PIX_FMT_NV12:
+ *pixel_format = MEDIA_FORMAT_NV12;
+ break;
+ case V4L2_PIX_FMT_NV21:
+ *pixel_format = MEDIA_FORMAT_NV21;
+ break;
+ default:
+ LOGW("unknown fourcc " FOURCC_FORMAT, FOURCC_CONVERT(fourcc));
+ return VISION_SOURCE_ERROR_NOT_SUPPORTED_FORMAT;
+ }
+
+ LOGD("fourcc " FOURCC_FORMAT " -> %d", FOURCC_CONVERT(fourcc), *pixel_format);
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __vision_source_get_fourcc_plane_num(media_format_mimetype_e pixel_format, uint32_t *fourcc)
+{
+ switch (pixel_format) {
+ case MEDIA_FORMAT_RGB888:
+ *fourcc = V4L2_PIX_FMT_RGB24;
+ break;
+ case MEDIA_FORMAT_YUYV:
+ *fourcc = V4L2_PIX_FMT_YUYV;
+ break;
+ case MEDIA_FORMAT_NV12:
+ *fourcc = V4L2_PIX_FMT_NV12;
+ break;
+ case MEDIA_FORMAT_NV21:
+ *fourcc = V4L2_PIX_FMT_NV21;
+ break;
+ default:
+ LOGE("unknown format %d", pixel_format);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ LOGD("format %d -> fourcc " FOURCC_FORMAT, pixel_format, FOURCC_CONVERT(*fourcc));
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static vector<uint32_t> __vision_source_get_fps_list(int device_fd, uint32_t pixel_format, int width, int height)
+{
+ vector<uint32_t> fps_list;
+ struct v4l2_frmivalenum ival;
+
+ ival.index = 0;
+ ival.pixel_format = pixel_format;
+ ival.width = width;
+ ival.height = height;
+
+ if (v4l2_ioctl(device_fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0) {
+ LOGE("VIDIOC_ENUM_FRAMEINTERVALS failed[%d]", errno);
+ return fps_list;
+ }
+
+ if (ival.type != V4L2_FRMIVAL_TYPE_DISCRETE) {
+ LOGE("NOT V4L2_FRMIVAL_TYPE_DISCRETE -> [%u]", ival.type);
+ return fps_list;
+ }
+
+ do {
+ LOGI("\t\t\t\tFramerate[%u/%u]", ival.discrete.denominator, ival.discrete.numerator);
+ fps_list.push_back(ival.discrete.denominator);
+ ival.index++;
+ } while (v4l2_ioctl(device_fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0);
+
+ return fps_list;
+}
+
+static int __vision_source_v4l2_qbuf(int device_fd, int type, int memory, int index)
+{
+ struct v4l2_buffer v4l2_buf;
+
+ memset(&v4l2_buf, 0x0, sizeof(struct v4l2_buffer));
+
+ v4l2_buf.index = index;
+ v4l2_buf.type = type;
+ v4l2_buf.memory = memory;
+
+ if (v4l2_ioctl(device_fd, VIDIOC_QBUF, &v4l2_buf) < 0) {
+ LOGE("qbuf failed. [i: %d, t: %d, m: %d] errno %d", index, type, memory, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ // LOGD("QBUF done [i: %d, t: %d, m: %d]", index, type, memory);
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __vision_source_get_valid_fmt(int device_fd, vector<fmt_info> &found_fmt)
+{
+ int pixel_format = 0;
+ int pixel_index = 0;
+ int pixel_count = 0;
+
+ while (1) {
+ struct v4l2_fmtdesc v4l2_format;
+ memset(&v4l2_format, 0x0, sizeof(struct v4l2_fmtdesc));
+
+ v4l2_format.index = pixel_index;
+ v4l2_format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (v4l2_ioctl(device_fd, VIDIOC_ENUM_FMT, &v4l2_format) < 0) {
+ LOGW("\tformat : end of enumeration");
+ break;
+ }
+
+ LOGD("\tformat[%d] " FOURCC_FORMAT " (emulated:%d)", pixel_index, FOURCC_CONVERT(v4l2_format.pixelformat),
+ ((v4l2_format.flags & V4L2_FMT_FLAG_EMULATED) ? 1 : 0));
+
+ if (__vision_source_get_format(v4l2_format.pixelformat, &pixel_format) != VISION_SOURCE_ERROR_NONE) {
+ pixel_index++;
+ continue;
+ }
+
+ int resolution_index = 0;
+ int resolution_count = 0;
+
+ while (1) {
+ struct v4l2_frmsizeenum v4l2_frame;
+ memset(&v4l2_frame, 0x0, sizeof(struct v4l2_frmsizeenum));
+
+ v4l2_frame.index = resolution_index;
+ v4l2_frame.pixel_format = v4l2_format.pixelformat;
+
+ if (v4l2_ioctl(device_fd, VIDIOC_ENUM_FRAMESIZES, &v4l2_frame) < 0) {
+ LOGW("\t\tframe : end of enumeration ");
+ break;
+ }
+
+ // TODO : support other type
+ if (v4l2_frame.type != V4L2_FRMSIZE_TYPE_DISCRETE) {
+ LOGW("\t\tframe type %d needs to support", v4l2_frame.type);
+ return VISION_SOURCE_ERROR_NONE;
+ }
+
+ uint32_t width = v4l2_frame.discrete.width;
+ uint32_t height = v4l2_frame.discrete.height;
+
+ LOGD("\t\tsize[%d] %ux%u", resolution_index, width, height);
+
+ auto fps_list = __vision_source_get_fps_list(device_fd, v4l2_frame.pixel_format, width, height);
+ if (fps_list.empty()) {
+ return VISION_SOURCE_ERROR_NOT_SUPPORTED_FORMAT;
+ }
+ resolution_count++;
+ resolution_index++;
+
+ for (auto fps : fps_list) {
+ fmt_info fmt { (media_format_mimetype_e) pixel_format, width, height, fps };
+ found_fmt.push_back(fmt);
+ }
+ }
+
+ LOGD("\t\tresolution count [%d]", resolution_count);
+ pixel_index++;
+ pixel_count++;
+ }
+
+ LOGD("\tformat count [%d]", pixel_count);
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __dev_info_from_path(const char *path, vector<fmt_info> &fmt_list)
+{
+ LOGD("check device [%s]", path);
+ int ret = VISION_SOURCE_ERROR_NONE;
+
+ int device_fd = open(path, O_RDWR);
+ if (device_fd < 0) {
+ LOGE("open failed [%s] errno %d", path, errno);
+ return ret;
+ }
+
+#ifdef HAVE_LIBV4L2
+ int libv4l2_fd = v4l2_fd_open(device_fd, V4L2_ENABLE_ENUM_FMT_EMULATION);
+
+ LOGI("device_fd[%d], libv4l2_fd[%d]", device_fd, libv4l2_fd);
+
+ if (libv4l2_fd > 0)
+ device_fd = libv4l2_fd;
+#endif /* HAVE_LIBV4L2 */
+
+ struct v4l2_capability v4l2_cap;
+ memset(&v4l2_cap, 0x0, sizeof(struct v4l2_capability));
+
+ if (v4l2_ioctl(device_fd, VIDIOC_QUERYCAP, &v4l2_cap) < 0) {
+ LOGE("querycap failed. errno %d", errno);
+ v4l2_close(device_fd);
+ return ret;
+ }
+
+ unsigned int dev_caps;
+
+ if (v4l2_cap.capabilities & V4L2_CAP_DEVICE_CAPS)
+ dev_caps = v4l2_cap.device_caps;
+ else
+ dev_caps = v4l2_cap.capabilities;
+
+ if (!(dev_caps & V4L2_CAP_VIDEO_CAPTURE) || (dev_caps & V4L2_CAP_VIDEO_OUTPUT)) {
+ LOGW("[%s] is not a capture device 0x%x", path, dev_caps);
+ v4l2_close(device_fd);
+ return ret;
+ }
+
+ ret = __vision_source_get_valid_fmt(device_fd, fmt_list);
+
+ v4l2_close(device_fd);
+ return ret;
+}
+
+static int __vision_source_list_devices(vision_source_v4l2_s *vs_handle)
+{
+ glob_t glob_buf;
+ memset(&glob_buf, 0x0, sizeof(glob_t));
+ int ret = glob(DEVICE_NODE_PATH_PREFIX "*", 0, 0, &glob_buf);
+ if (ret != 0) {
+ switch (ret) {
+ case GLOB_NOSPACE:
+ LOGE("out of memory");
+ return VISION_SOURCE_ERROR_OUT_OF_MEMORY;
+ case GLOB_ABORTED:
+ LOGE("read error");
+ return VISION_SOURCE_ERROR_INTERNAL;
+ case GLOB_NOMATCH:
+ LOGE("match not found");
+ return VISION_SOURCE_ERROR_NO_DATA;
+ }
+ }
+
+ LOGD("device node count : %zu", glob_buf.gl_pathc);
+
+ vector<string> &dev_name = vs_handle->dev_name;
+ vector<vector<fmt_info> > &dev_fmt_list = vs_handle->fmt_list;
+
+ for (size_t i = 0; i < glob_buf.gl_pathc; i++) {
+ vector<fmt_info> fmt_list;
+ ret = __dev_info_from_path(glob_buf.gl_pathv[i], fmt_list);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ break;
+ }
+
+ if (!fmt_list.empty()) {
+ dev_name.push_back(glob_buf.gl_pathv[i]);
+ dev_fmt_list.push_back(fmt_list);
+ }
+ }
+ globfree(&glob_buf);
+
+ if (dev_name.empty())
+ return ret;
+
+ size_t dev_count = dev_name.size();
+
+ vs_handle->dev_info = (vision_source_device_info_s *) calloc(dev_count, sizeof(vision_source_device_info_s));
+ if (!vs_handle->dev_info) {
+ return VISION_SOURCE_ERROR_OUT_OF_MEMORY;
+ }
+
+ vs_handle->fmt = (media_format_h **) calloc(dev_count, sizeof(media_format_h *));
+ if (!vs_handle->fmt) {
+ return VISION_SOURCE_ERROR_OUT_OF_MEMORY;
+ }
+
+ for (size_t i = 0; i < dev_count; i++) {
+ vs_handle->dev_info[i].index = i;
+ strncpy(vs_handle->dev_info[i].name, dev_name[i].c_str(), DEVICE_NAME_LENGTH_MAX);
+ vs_handle->fmt[i] = (media_format_h *) calloc(dev_fmt_list[i].size(), sizeof(media_format_h));
+ if (!vs_handle->fmt[i]) {
+ return VISION_SOURCE_ERROR_OUT_OF_MEMORY;
+ }
+
+ for (size_t j = 0; j < dev_fmt_list[i].size(); j++) {
+ media_format_h fmt;
+ media_format_create(&fmt);
+ media_format_set_video_mime(fmt, dev_fmt_list[i][j].type);
+ media_format_set_video_width(fmt, dev_fmt_list[i][j].width);
+ media_format_set_video_height(fmt, dev_fmt_list[i][j].height);
+ media_format_set_video_frame_rate(fmt, dev_fmt_list[i][j].fps);
+ vs_handle->fmt[i][j] = fmt;
+ }
+ }
+
+ return ret;
+}
+
+static int __vision_source_v4l2_stream(int device_fd, int type, bool onoff)
+{
+ if (v4l2_ioctl(device_fd, onoff ? VIDIOC_STREAMON : VIDIOC_STREAMOFF, &type) < 0) {
+ LOGE("stream %d failed. [t:%d] errno %d", onoff, type, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ LOGD("stream %d (1:on, 0:off) done [t:%d]", onoff, type);
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __vision_source_v4l2_reqbufs(int device_fd, int type, int memory, uint32_t count, uint32_t *result_count)
+{
+ struct v4l2_requestbuffers v4l2_reqbuf;
+
+ memset(&v4l2_reqbuf, 0x0, sizeof(struct v4l2_requestbuffers));
+
+ v4l2_reqbuf.type = type;
+ v4l2_reqbuf.memory = memory;
+ v4l2_reqbuf.count = count;
+
+ if (v4l2_ioctl(device_fd, VIDIOC_REQBUFS, &v4l2_reqbuf) < 0) {
+ LOGE("REQBUFS[count %d] failed. errno %d", count, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ if (v4l2_reqbuf.count != count) {
+ LOGE("different count [req:%d, result:%d]", count, v4l2_reqbuf.count);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ *result_count = v4l2_reqbuf.count;
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __vision_source_stop_stream(vision_source_v4l2_s *handle)
+{
+ LOGD("__vision_source_stop_stream");
+
+ /* stream off */
+ int ret = __vision_source_v4l2_stream(handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, false);
+
+ /* munmap */
+ for (size_t i = 0; i < handle->mmap_addr.size(); i++) {
+ v4l2_munmap(handle->mmap_addr[i], handle->v4l2_buf[i].length);
+ }
+ handle->v4l2_buf.clear();
+ handle->mmap_addr.clear();
+
+ /* reqbufs 0 */
+ uint32_t buffer_count;
+ ret = __vision_source_v4l2_reqbufs(handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, 0,
+ &buffer_count);
+
+ LOGD("reqbufs 0 : 0x%x", ret);
+
+ return ret;
+}
+
+static int __vision_source_set_stream(vision_source_v4l2_s *handle)
+{
+ fmt_info &fmt = handle->fmt_list[handle->device_index][handle->fmt_index];
+ struct v4l2_format v4l2_fmt;
+
+ unsigned int fourcc = 0;
+
+ int ret = __vision_source_get_fourcc_plane_num(fmt.type, &fourcc);
+ if (ret != VISION_SOURCE_ERROR_NONE)
+ return ret;
+
+ memset(&v4l2_fmt, 0x0, sizeof(struct v4l2_format));
+
+ v4l2_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ v4l2_fmt.fmt.pix.width = fmt.width;
+ v4l2_fmt.fmt.pix.height = fmt.height;
+ v4l2_fmt.fmt.pix.pixelformat = fourcc;
+
+ if (v4l2_ioctl(handle->device_fd, VIDIOC_S_FMT, &v4l2_fmt) < 0) {
+ LOGE("S_FMT failed. errno %d", errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ struct v4l2_streamparm v4l2_parm;
+ /* G_PARM */
+ memset(&v4l2_parm, 0x0, sizeof(struct v4l2_streamparm));
+
+ v4l2_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (v4l2_ioctl(handle->device_fd, VIDIOC_G_PARM, &v4l2_parm) < 0) {
+ LOGE("G_PARM failed. errno %d", errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ /* S_PARM to set fps */
+ v4l2_parm.parm.capture.timeperframe.numerator = 1;
+ v4l2_parm.parm.capture.timeperframe.denominator = fmt.fps;
+
+ if (v4l2_ioctl(handle->device_fd, VIDIOC_S_PARM, &v4l2_parm) < 0) {
+ LOGE("S_PARM failed. errno %d", errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __vision_source_v4l2_dqbuf(int device_fd, int type, int memory, int *index, unsigned int *used_size)
+{
+ struct v4l2_buffer v4l2_buf;
+ memset(&v4l2_buf, 0x0, sizeof(struct v4l2_buffer));
+
+ v4l2_buf.type = type;
+ v4l2_buf.memory = memory;
+
+ int ret = v4l2_ioctl(device_fd, VIDIOC_DQBUF, &v4l2_buf);
+ if (ret < 0) {
+ ret = errno;
+ LOGE("dqbuf failed. [t: %d, m: %d] errno %d", type, memory, ret);
+ if (ret == EIO)
+ LOGE("EIO is internal hw error");
+
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ *index = v4l2_buf.index;
+ *used_size = v4l2_buf.bytesused;
+
+ return ret;
+}
+
+static int __vision_source_v4l2_wait_frame(int device_fd, int wait_time)
+{
+ fd_set fds;
+ FD_ZERO(&fds);
+ FD_SET(device_fd, &fds);
+
+ struct timeval timeout;
+ memset(&timeout, 0x0, sizeof(struct timeval));
+
+ timeout.tv_sec = wait_time;
+ timeout.tv_usec = 0;
+
+ /*LOGD("select : %d sec", wait_time);*/
+
+ int ret = select(device_fd + 1, &fds, NULL, NULL, &timeout);
+ if (ret == -1) {
+ ret = errno;
+ if (ret == EINTR) {
+ LOGD("select error : EINTR");
+ return VISION_SOURCE_ERROR_NONE;
+ }
+ LOGE("select failed. errno %d", ret);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+ if (ret == 0) {
+ LOGE("select timeout.");
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static int __vision_source_mmap(vision_source_v4l2_s *handle)
+{
+ /* query buffer, mmap and qbuf */
+ for (int i = 0; i < BUFFER_MAX; i++) {
+ struct v4l2_buffer v4l2_buf;
+ memset(&v4l2_buf, 0x0, sizeof(struct v4l2_buffer));
+
+ v4l2_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ v4l2_buf.memory = V4L2_MEMORY_MMAP;
+ v4l2_buf.index = i;
+
+ if (v4l2_ioctl(handle->device_fd, VIDIOC_QUERYBUF, &v4l2_buf) < 0) {
+ LOGE("[%d] query buf failed. errno %d", i, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ handle->v4l2_buf.push_back(v4l2_buf);
+
+ void *data =
+ v4l2_mmap(0, v4l2_buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, handle->device_fd, v4l2_buf.m.offset);
+
+ if (data == MAP_FAILED) {
+ LOGE("[%d] mmap failed (errno %d)", i, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ handle->mmap_addr.push_back(data);
+
+ if (__vision_source_v4l2_qbuf(handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, i) !=
+ VISION_SOURCE_ERROR_NONE) {
+ LOGE("[%d] qbuf failed (errno %d)", i, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+ }
+
+ /* stream on */
+ return __vision_source_v4l2_stream(handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, true);
+}
+
+static int __vision_source_start_stream(vision_source_v4l2_s *handle)
+{
+ uint32_t buffer_count = 0;
+ /* request buffer */
+ int ret = __vision_source_v4l2_reqbufs(handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, BUFFER_MAX,
+ &buffer_count);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ return ret;
+ }
+
+ LOGD("buffer count : request %d -> result %d", BUFFER_MAX, buffer_count);
+
+ ret = __vision_source_mmap(handle);
+ if (ret != VISION_SOURCE_ERROR_NONE)
+ __vision_source_v4l2_reqbufs(handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, 0,
+ &buffer_count);
+
+ return ret;
+}
+
+int vision_source_init(vision_source_h *handle)
+{
+ vision_source_v4l2_s *v4l2_handle = new vision_source_v4l2_s;
+
+ int ret = __vision_source_list_devices(v4l2_handle);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ free(v4l2_handle);
+ LOGE("get device info failed");
+ return ret;
+ }
+
+ v4l2_handle->status = VISION_SOURCE_STATUS_INITIALIZED;
+ *handle = v4l2_handle;
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static void __vision_source_release_handle(vision_source_v4l2_s *handle)
+{
+ for (size_t i = 0; i < handle->fmt_list.size(); i++) {
+ for (size_t j = 0; j < handle->fmt_list[i].size(); j++) {
+ media_format_unref(handle->fmt[i][j]);
+ }
+ free(handle->fmt[i]);
+ }
+ free(handle->fmt);
+ free(handle->dev_info);
+
+ LOGD("vision_source_v4l2_s %p destroy", handle);
+
+ return;
+}
+
+int vision_source_exit(vision_source_h handle)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (v4l2_handle->dev_info)
+ __vision_source_release_handle(v4l2_handle);
+ delete v4l2_handle;
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+int vision_source_list_devices(vision_source_h handle, const vision_source_device_info_s **dev_list, int *dev_count)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ *dev_list = v4l2_handle->dev_info;
+ *dev_count = v4l2_handle->dev_name.size();
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+int vision_source_list_device_caps(vision_source_h handle, int dev_index, const media_format_h **fmt_list,
+ int *fmt_count)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (dev_index < 0 || (size_t) dev_index >= v4l2_handle->fmt_list.size())
+ return VISION_SOURCE_ERROR_INVALID_PARAMETER;
+
+ *fmt_list = v4l2_handle->fmt[dev_index];
+ *fmt_count = v4l2_handle->fmt_list[dev_index].size();
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+int vision_source_open_device(vision_source_h handle, int dev_index)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (v4l2_handle->status != VISION_SOURCE_STATUS_INITIALIZED) {
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+ }
+
+ if (dev_index < 0 || (size_t) dev_index >= v4l2_handle->fmt_list.size())
+ return VISION_SOURCE_ERROR_INVALID_PARAMETER;
+
+ int device_fd = VISION_SOURCE_INITIAL_FD;
+#ifdef HAVE_LIBV4L2
+ int libv4l2_fd = VISION_SOURCE_INITIAL_FD;
+#endif /* HAVE_LIBV4L2 */
+
+ char *node_path = v4l2_handle->dev_info[dev_index].name;
+
+ device_fd = open(node_path, O_RDWR);
+ if (device_fd < 0) {
+ LOGE("open [%s] failed [errno %d]", node_path, errno);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+#ifdef HAVE_LIBV4L2
+ libv4l2_fd = v4l2_fd_open(device_fd, V4L2_ENABLE_ENUM_FMT_EMULATION);
+
+ LOGI("device_fd[%d], libv4l2_fd[%d]", device_fd, libv4l2_fd);
+
+ if (libv4l2_fd != VISION_SOURCE_INITIAL_FD)
+ device_fd = libv4l2_fd;
+#endif /* HAVE_LIBV4L2 */
+
+ v4l2_handle->device_index = dev_index;
+ v4l2_handle->device_fd = device_fd;
+ v4l2_handle->status = VISION_SOURCE_STATUS_OPENDED;
+
+ LOGD("[%d] device[%s] opened [fd %d]", dev_index, node_path, device_fd);
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+int vision_source_close_device(vision_source_h handle)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (v4l2_handle->status != VISION_SOURCE_STATUS_OPENDED) {
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+ }
+
+ if (v4l2_handle->device_fd < 0) {
+ LOGE("invalid fd %d", v4l2_handle->device_fd);
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ LOGD("close fd %d", v4l2_handle->device_fd);
+
+ v4l2_close(v4l2_handle->device_fd);
+
+ v4l2_handle->status = VISION_SOURCE_STATUS_INITIALIZED;
+ LOGD("device [%d] closed", v4l2_handle->device_index);
+ v4l2_handle->device_fd = VISION_SOURCE_INITIAL_FD;
+ v4l2_handle->device_index = VISION_SOURCE_INITIAL_FD;
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+int vision_source_set_stream_format(vision_source_h handle, media_format_h fmt)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (!v4l2_handle->fmt || v4l2_handle->status != VISION_SOURCE_STATUS_OPENDED) {
+ LOGE("Invalid state");
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+ }
+
+ media_format_mimetype_e mimetype;
+ int width, height, fps;
+ int ret = media_format_get_video_info(fmt, &mimetype, &width, &height, nullptr, nullptr);
+ if (ret != MEDIA_FORMAT_ERROR_NONE) {
+ LOGE("media_format_get_video_info failed");
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+ ret = media_format_get_video_frame_rate(fmt, &fps);
+ if (ret != MEDIA_FORMAT_ERROR_NONE) {
+ LOGE("media_format_get_video_info failed");
+ return VISION_SOURCE_ERROR_INTERNAL;
+ }
+
+ LOGI("Try set format width: %d, height: %d, fps: %d", width, height, fps);
+
+ fmt_info request { mimetype, (uint32_t) width, (uint32_t) height, (uint32_t) fps };
+ vector<fmt_info> &vec = v4l2_handle->fmt_list[v4l2_handle->device_index];
+ const auto pos = find(vec.begin(), vec.end(), request);
+ if (pos == vec.end()) {
+ LOGE("Not supported format");
+ return VISION_SOURCE_ERROR_INVALID_PARAMETER;
+ }
+
+ auto index = distance(vec.begin(), pos);
+ if (v4l2_handle->fmt_index == index) {
+ LOGD("no need to restart preview stream");
+ return VISION_SOURCE_ERROR_NONE;
+ }
+
+ v4l2_handle->fmt_index = index;
+
+ ret = __vision_source_set_stream(v4l2_handle);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ LOGE("failed to set stream");
+ return ret;
+ }
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+static void __vision_source_pkt_dispose_cb(media_packet_h packet, void *user_data)
+{
+ pkt_dispose_cb_data *data = (pkt_dispose_cb_data *) user_data;
+
+ int ret = __vision_source_v4l2_qbuf(data->fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, data->index);
+ if (ret != VISION_SOURCE_ERROR_NONE)
+ LOGE("qbuf failed");
+}
+
+static void __fetch_buffer_and_callback(vision_source_v4l2_s *v4l2_handle)
+{
+ LOGD("__fetch_buffer_and_callback");
+
+ int index;
+ unsigned int byte_size;
+ int ret;
+
+ for (size_t i = 0; i < BUFFER_MAX; i++) {
+ v4l2_handle->pkt_data[i].fd = v4l2_handle->device_fd;
+ v4l2_handle->pkt_data[i].index = i;
+ }
+
+ while (v4l2_handle->buffer_thread_run) {
+ LOGD("__fetch_buffer_and_callback: waiting for buffer");
+ ret = __vision_source_v4l2_wait_frame(v4l2_handle->device_fd, 5);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ LOGE("frame wait failed");
+ break;
+ }
+
+ if (v4l2_handle->buffer_thread_run == false) {
+ LOGD("stop buffer handler thread");
+ break;
+ }
+ ret = __vision_source_v4l2_dqbuf(v4l2_handle->device_fd, V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_MEMORY_MMAP, &index,
+ &byte_size);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ LOGE("dqbuf failed");
+ break;
+ }
+
+ media_format_h fmt = v4l2_handle->fmt[v4l2_handle->device_index][v4l2_handle->fmt_index];
+
+ media_packet_h pkt;
+ ret = media_packet_new_from_external_memory(fmt, v4l2_handle->mmap_addr[index], byte_size,
+ __vision_source_pkt_dispose_cb, &v4l2_handle->pkt_data[index],
+ &pkt);
+ if (ret != MEDIA_PACKET_ERROR_NONE) {
+ LOGE("media_packet_new_from_external_memory failed");
+ continue;
+ }
+ if (v4l2_handle->stream_callback) {
+ v4l2_handle->stream_callback(pkt, v4l2_handle->stream_user_data);
+ }
+
+ media_packet_unref(pkt);
+ sched_yield();
+ }
+
+ return;
+}
+
+int vision_source_start_stream(vision_source_h handle)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (v4l2_handle->status != VISION_SOURCE_STATUS_OPENDED) {
+ LOGE("status is not VISION_SOURCE_STATUS_OPENDED");
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+ }
+ if (v4l2_handle->fmt_index < 0) {
+ LOGE("format is not set");
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+ }
+
+ int ret = __vision_source_start_stream(v4l2_handle);
+ if (ret != VISION_SOURCE_ERROR_NONE) {
+ LOGE("__vision_source_start_stream failed[0x%x]", ret);
+ return ret;
+ }
+
+ v4l2_handle->buffer_thread_run = true;
+ v4l2_handle->buffer_thread = thread(__fetch_buffer_and_callback, v4l2_handle);
+
+ v4l2_handle->status = VISION_SOURCE_STATUS_STARTED;
+ LOGD("start preview done");
+
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+/*
+ * TODO: buffer_thread has some problem, what happend to buffer?
+ */
+int vision_source_stop_stream(vision_source_h handle)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (v4l2_handle->status != VISION_SOURCE_STATUS_STARTED)
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+
+ v4l2_handle->buffer_thread_run = false;
+ v4l2_handle->buffer_thread.join();
+ LOGI("buffer thread stopped and joined");
+
+ int ret = __vision_source_stop_stream(v4l2_handle);
+ if (ret != VISION_SOURCE_ERROR_NONE)
+ LOGE("__vision_source_stop_stream failed, but buffer thread will be stopped");
+
+ v4l2_handle->fmt_index = VISION_SOURCE_INITIAL_FD;
+ v4l2_handle->stream_callback = nullptr;
+ v4l2_handle->stream_user_data = nullptr;
+ v4l2_handle->status = VISION_SOURCE_STATUS_OPENDED;
+ LOGD("stop preview done [0x%x]", ret);
+
+ return ret;
+}
+
+/* TODO: run-time callback change*/
+int vision_source_set_stream_cb(vision_source_h handle, stream_cb callback, void *user_data)
+{
+ vision_source_v4l2_s *v4l2_handle = (vision_source_v4l2_s *) handle;
+
+ if (v4l2_handle->status != VISION_SOURCE_STATUS_OPENDED)
+ return VISION_SOURCE_ERROR_INVALID_OPERATION;
+
+ v4l2_handle->stream_callback = callback;
+ v4l2_handle->stream_user_data = user_data;
+ return VISION_SOURCE_ERROR_NONE;
+}
+
+#ifdef USE_DL
+void attach_backend(vision_source_func_s *funcp)
+{
+ funcp->init = vision_source_v4l2_init;
+ funcp->exit = vision_source_v4l2_exit;
+ funcp->list_devices = vision_source_v4l2_list_devices;
+ funcp->list_device_caps = vision_source_v4l2_list_device_caps;
+ funcp->open_device = vision_source_v4l2_open_device;
+ funcp->close_device = vision_source_v4l2_close_device;
+ funcp->set_stream_format = vision_source_v4l2_set_stream_format;
+ funcp->start_stream = vision_source_v4l2_start_stream;
+ funcp->stop_stream = vision_source_v4l2_stop_stream;
+ funcp->set_stream_cb = vision_source_v4l2_set_stream_cb;
+}
+#endif
#include <gtest/gtest.h>
+#include <media_packet.h>
#include <vision_source.h>
class VisionV4L2 : public ::testing::Test
vision_source_h ms_handle;
};
-TEST_F(VisionV4L2, EnumerateDev)
+TEST_F(VisionV4L2, ListDevices)
{
- vision_source_device_info_list_s dev_list;
- ASSERT_EQ(vision_source_enumerate_devices(ms_handle, &dev_list),
- VISION_SOURCE_ERROR_NONE);
- EXPECT_GT(dev_list.count, 0);
+ const vision_source_device_info_s *dev_list;
+ int dev_cnt;
+ ASSERT_EQ(vision_source_list_devices(ms_handle, &dev_list, &dev_cnt), VISION_SOURCE_ERROR_NONE);
+ EXPECT_GT(dev_cnt, 0);
+}
+
+TEST_F(VisionV4L2, DevCap)
+{
+ const vision_source_device_info_s *dev_list;
+ const media_format_h *fmt_list;
+ int fmt_count;
+ int dev_cnt;
+ ASSERT_EQ(vision_source_list_devices(ms_handle, &dev_list, &dev_cnt), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(dev_cnt, 0);
+ ASSERT_EQ(vision_source_list_device_caps(ms_handle, 0, &fmt_list, &fmt_count), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(fmt_count, 0);
}
TEST_F(VisionV4L2, OpenDev0)
{
- ASSERT_EQ(vision_source_open_device(ms_handle, 0),
- VISION_SOURCE_ERROR_NONE);
+ ASSERT_EQ(vision_source_open_device(ms_handle, 0), VISION_SOURCE_ERROR_NONE);
EXPECT_EQ(vision_source_close_device(ms_handle), VISION_SOURCE_ERROR_NONE);
}
+TEST_F(VisionV4L2, NotOpenDev)
+{
+ const vision_source_device_info_s *dev_list;
+ const media_format_h *fmt_list;
+ int fmt_count;
+ int dev_cnt;
+ ASSERT_EQ(vision_source_list_devices(ms_handle, &dev_list, &dev_cnt), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(dev_cnt, 0);
+ ASSERT_EQ(vision_source_list_device_caps(ms_handle, 0, &fmt_list, &fmt_count), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(fmt_count, 2);
+ EXPECT_EQ(vision_source_set_stream_format(ms_handle, fmt_list[2]), VISION_SOURCE_ERROR_INVALID_OPERATION);
+}
+
TEST_F(VisionV4L2, SetStream)
{
- vision_source_device_info_list_s dev_list;
- ASSERT_EQ(vision_source_enumerate_devices(ms_handle, &dev_list),
- VISION_SOURCE_ERROR_NONE);
- ASSERT_GT(dev_list.count, 0);
-
- ASSERT_GT(dev_list.device_info[0].pixel_list.count, 0);
- vision_source_format_s format;
- format.pixel_format =
- dev_list.device_info[0].pixel_list.pixels[0].pixel_format;
- ASSERT_GT(
- dev_list.device_info[0].pixel_list.pixels[0].resolution_list.count,
- 0);
-
- format.resolution.width = dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .resolution.width;
- format.resolution.height = dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .resolution.height;
- ASSERT_GT(dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .fps_list.count,
- 0);
- format.fps = dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .fps_list.fps[0];
-
- ASSERT_EQ(vision_source_open_device(ms_handle, 0),
- VISION_SOURCE_ERROR_NONE);
- EXPECT_EQ(vision_source_set_stream_format(ms_handle, &format),
- VISION_SOURCE_ERROR_NONE);
+ const vision_source_device_info_s *dev_list;
+ const media_format_h *fmt_list;
+ int fmt_count;
+ int dev_cnt;
+ ASSERT_EQ(vision_source_list_devices(ms_handle, &dev_list, &dev_cnt), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(dev_cnt, 0);
+ ASSERT_EQ(vision_source_list_device_caps(ms_handle, 0, &fmt_list, &fmt_count), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(fmt_count, 2);
+ ASSERT_EQ(vision_source_open_device(ms_handle, 0), VISION_SOURCE_ERROR_NONE);
+ EXPECT_EQ(vision_source_set_stream_format(ms_handle, fmt_list[2]), VISION_SOURCE_ERROR_NONE);
EXPECT_EQ(vision_source_close_device(ms_handle), VISION_SOURCE_ERROR_NONE);
}
void SetUp() override
{
ASSERT_EQ(vision_source_init(&ms_handle), VISION_SOURCE_ERROR_NONE);
- vision_source_device_info_list_s dev_list;
- ASSERT_EQ(vision_source_enumerate_devices(ms_handle, &dev_list),
- VISION_SOURCE_ERROR_NONE);
- ASSERT_GT(dev_list.count, 0);
-
- ASSERT_GT(dev_list.device_info[0].pixel_list.count, 0);
- vision_source_format_s format;
- format.pixel_format =
- dev_list.device_info[0].pixel_list.pixels[0].pixel_format;
- ASSERT_GT(dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.count,
- 0);
-
- format.resolution.width = dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .resolution.width;
- format.resolution.height = dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .resolution.height;
- ASSERT_GT(dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .fps_list.count,
- 0);
- format.fps = dev_list.device_info[0]
- .pixel_list.pixels[0]
- .resolution_list.resolutions[0]
- .fps_list.fps[0];
-
- ASSERT_EQ(vision_source_open_device(ms_handle, 0),
- VISION_SOURCE_ERROR_NONE);
- EXPECT_EQ(vision_source_set_stream_format(ms_handle, &format),
- VISION_SOURCE_ERROR_NONE);
+ const vision_source_device_info_s *dev_list;
+ const media_format_h *fmt_list;
+ int fmt_count;
+ int dev_cnt;
+
+ ASSERT_EQ(vision_source_list_devices(ms_handle, &dev_list, &dev_cnt), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(dev_cnt, 0);
+ ASSERT_EQ(vision_source_list_device_caps(ms_handle, 0, &fmt_list, &fmt_count), VISION_SOURCE_ERROR_NONE);
+ ASSERT_GT(fmt_count, 2);
+
+ ASSERT_EQ(vision_source_open_device(ms_handle, 0), VISION_SOURCE_ERROR_NONE);
+ ASSERT_EQ(vision_source_set_stream_format(ms_handle, fmt_list[2]), VISION_SOURCE_ERROR_NONE);
}
void TearDown() override
{
- EXPECT_EQ(vision_source_close_device(ms_handle),
- VISION_SOURCE_ERROR_NONE);
+ EXPECT_EQ(vision_source_close_device(ms_handle), VISION_SOURCE_ERROR_NONE);
ASSERT_EQ(vision_source_exit(ms_handle), VISION_SOURCE_ERROR_NONE);
}
vision_source_h ms_handle;
TEST_F(VisionV4L2FixedFormat, OnlyStopStream)
{
- EXPECT_EQ(vision_source_stop_stream(ms_handle), VISION_SOURCE_ERROR_NONE);
+ EXPECT_EQ(vision_source_stop_stream(ms_handle), VISION_SOURCE_ERROR_INVALID_OPERATION);
}
TEST_F(VisionV4L2FixedFormat, StartStream)
{
- ASSERT_EQ(vision_source_start_stream(ms_handle, nullptr, nullptr),
- VISION_SOURCE_ERROR_NONE);
- sleep(1);
+ ASSERT_EQ(vision_source_start_stream(ms_handle), VISION_SOURCE_ERROR_NONE);
+ sleep(5);
EXPECT_EQ(vision_source_stop_stream(ms_handle), VISION_SOURCE_ERROR_NONE);
}
-static int test_cb(vision_source_buffer_s *buffer, void *user_data)
+static int test_cb(media_packet_h pkt, void *user_data)
{
FILE *file;
clock_t end = clock();
double delta_ms = (double) (end - *start) / CLOCKS_PER_SEC * 1000;
- snprintf(filename, 127, "out_%04u.data", (unsigned) delta_ms);
+ snprintf(filename, 127, "/opt/usr/home/owner/media/Images/out_%04u.data", (unsigned) delta_ms);
file = fopen(filename, "w");
- fwrite(buffer->planes[0].data, sizeof(unsigned char),
- buffer->planes[0].used_size, file);
+ void *data;
+ uint64_t size;
+ media_packet_get_buffer_data_ptr(pkt, &data);
+ media_packet_get_buffer_size(pkt, &size);
+
+ fwrite(data, sizeof(unsigned char), size, file);
fclose(file);
return 0;
}
TEST_F(VisionV4L2FixedFormat, StartWithCallback)
{
clock_t start = clock();
- ASSERT_EQ(vision_source_start_stream(ms_handle, test_cb, &start),
- VISION_SOURCE_ERROR_NONE);
- sleep(1);
+ ASSERT_EQ(vision_source_set_stream_cb(ms_handle, test_cb, &start), VISION_SOURCE_ERROR_NONE);
+ ASSERT_EQ(vision_source_start_stream(ms_handle), VISION_SOURCE_ERROR_NONE);
+ sleep(5);
EXPECT_EQ(vision_source_stop_stream(ms_handle), VISION_SOURCE_ERROR_NONE);
-}
\ No newline at end of file
+}