-/*\r
- * Implementation of MARU Virtual Camera device by PCI bus on Linux.\r
- *\r
- * Copyright (c) 2011 - 2012 Samsung Electronics Co., Ltd All Rights Reserved\r
- *\r
- * Contact:\r
- * JinHyung Jo <jinhyung.jo@samsung.com>\r
- * YeongKyoon Lee <yeongkyoon.lee@samsung.com>\r
- *\r
- * This program is free software; you can redistribute it and/or\r
- * modify it under the terms of the GNU General Public License\r
- * as published by the Free Software Foundation; either version 2\r
- * of the License, or (at your option) any later version.\r
- *\r
- * This program is distributed in the hope that it will be useful,\r
- * but WITHOUT ANY WARRANTY; without even the implied warranty of\r
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\r
- * GNU General Public License for more details.\r
- *\r
- * You should have received a copy of the GNU General Public License\r
- * along with this program; if not, write to the Free Software\r
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.\r
- *\r
- * Contributors:\r
- * - S-Core Co., Ltd\r
- *\r
- */\r
-\r
-#include "qemu-common.h"\r
-#include "maru_camera_common.h"\r
-#include "pci.h"\r
-#include "kvm.h"\r
-#include "tizen/src/debug_ch.h"\r
-\r
-#include <linux/videodev2.h>\r
-\r
-#include <sys/stat.h>\r
-#include <sys/ioctl.h>\r
-#include <sys/mman.h>\r
-\r
-#include <libv4l2.h>\r
-#include <libv4lconvert.h>\r
-\r
-MULTI_DEBUG_CHANNEL(tizen, camera_linux);\r
-\r
-#define CLEAR(x) memset(&(x), 0, sizeof(x))\r
-\r
-#define MARUCAM_DEFAULT_BUFFER_COUNT 4\r
-\r
-#define MARUCAM_CTRL_VALUE_MAX 20\r
-#define MARUCAM_CTRL_VALUE_MIN 1\r
-#define MARUCAM_CTRL_VALUE_MID 10\r
-#define MARUCAM_CTRL_VALUE_STEP 1\r
-\r
-enum {\r
- _MC_THREAD_PAUSED,\r
- _MC_THREAD_STREAMON,\r
- _MC_THREAD_STREAMOFF,\r
-};\r
-\r
-typedef struct marucam_framebuffer {\r
- void *data;\r
- size_t size;\r
-} marucam_framebuffer;\r
-\r
-static int n_framebuffer;\r
-static struct marucam_framebuffer *framebuffer;\r
-\r
-static const char *dev_name = "/dev/video0";\r
-static int v4l2_fd;\r
-static int convert_trial;\r
-static int ready_count;\r
-\r
-static struct v4l2_format dst_fmt;\r
-\r
-static int yioctl(int fd, int req, void *arg)\r
-{\r
- int r;\r
-\r
- do {\r
- r = ioctl(fd, req, arg);\r
- } while (r < 0 && errno == EINTR);\r
-\r
- return r;\r
-}\r
-\r
-static int xioctl(int fd, int req, void *arg)\r
-{\r
- int r;\r
-\r
- do {\r
- r = v4l2_ioctl(fd, req, arg);\r
- } while (r < 0 && errno == EINTR);\r
-\r
- return r;\r
-}\r
-\r
-typedef struct tagMaruCamConvertPixfmt {\r
- uint32_t fmt; /* fourcc */\r
-} MaruCamConvertPixfmt;\r
-\r
-static MaruCamConvertPixfmt supported_dst_pixfmts[] = {\r
- { V4L2_PIX_FMT_YUYV },\r
- { V4L2_PIX_FMT_YUV420 },\r
- { V4L2_PIX_FMT_YVU420 },\r
-};\r
-\r
-typedef struct tagMaruCamConvertFrameInfo {\r
- uint32_t width;\r
- uint32_t height;\r
-} MaruCamConvertFrameInfo;\r
-\r
-static MaruCamConvertFrameInfo supported_dst_frames[] = {\r
- { 640, 480 },\r
- { 352, 288 },\r
- { 320, 240 },\r
- { 176, 144 },\r
- { 160, 120 },\r
-};\r
-\r
-struct marucam_qctrl {\r
- uint32_t id;\r
- uint32_t hit;\r
- int32_t min;\r
- int32_t max;\r
- int32_t step;\r
- int32_t init_val;\r
-};\r
-\r
-static struct marucam_qctrl qctrl_tbl[] = {\r
- { V4L2_CID_BRIGHTNESS, 0, },\r
- { V4L2_CID_CONTRAST, 0, },\r
- { V4L2_CID_SATURATION, 0, },\r
- { V4L2_CID_SHARPNESS, 0, },\r
-};\r
-\r
-static void marucam_reset_controls(void)\r
-{\r
- uint32_t i;\r
- for (i = 0; i < ARRAY_SIZE(qctrl_tbl); i++) {\r
- if (qctrl_tbl[i].hit) {\r
- struct v4l2_control ctrl = {0,};\r
- qctrl_tbl[i].hit = 0;\r
- ctrl.id = qctrl_tbl[i].id;\r
- ctrl.value = qctrl_tbl[i].init_val;\r
- if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &ctrl) < 0) {\r
- ERR("Failed to reset control value: id(0x%x), errstr(%s)\n",\r
- ctrl.id, strerror(errno));\r
- }\r
- }\r
- }\r
-}\r
-\r
-static int32_t value_convert_from_guest(int32_t min, int32_t max, int32_t value)\r
-{\r
- double rate = 0.0;\r
- int32_t dist = 0, ret = 0;\r
-\r
- dist = max - min;\r
-\r
- if (dist < MARUCAM_CTRL_VALUE_MAX) {\r
- rate = (double)MARUCAM_CTRL_VALUE_MAX / (double)dist;\r
- ret = min + (int32_t)(value / rate);\r
- } else {\r
- rate = (double)dist / (double)MARUCAM_CTRL_VALUE_MAX;\r
- ret = min + (int32_t)(rate * value);\r
- }\r
- return ret;\r
-}\r
-\r
-static int32_t value_convert_to_guest(int32_t min, int32_t max, int32_t value)\r
-{\r
- double rate = 0.0;\r
- int32_t dist = 0, ret = 0;\r
-\r
- dist = max - min;\r
-\r
- if (dist < MARUCAM_CTRL_VALUE_MAX) {\r
- rate = (double)MARUCAM_CTRL_VALUE_MAX / (double)dist;\r
- ret = (int32_t)((double)(value - min) * rate);\r
- } else {\r
- rate = (double)dist / (double)MARUCAM_CTRL_VALUE_MAX;\r
- ret = (int32_t)((double)(value - min) / rate);\r
- }\r
-\r
- return ret;\r
-}\r
-\r
-static void set_maxframeinterval(MaruCamState *state, uint32_t pixel_format,\r
- uint32_t width, uint32_t height)\r
-{\r
- struct v4l2_frmivalenum fival;\r
- struct v4l2_streamparm sp;\r
- uint32_t min_num = 0, min_denom = 0;\r
-\r
- CLEAR(fival);\r
- fival.pixel_format = pixel_format;\r
- fival.width = width;\r
- fival.height = height;\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) < 0) {\r
- ERR("Unable to enumerate intervals for pixelformat(0x%x), (%d:%d)\n",\r
- pixel_format, width, height);\r
- return;\r
- }\r
-\r
- if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {\r
- float max_ival = -1.0;\r
- do {\r
- float cur_ival = (float)fival.discrete.numerator\r
- / (float)fival.discrete.denominator;\r
- if (cur_ival > max_ival) {\r
- max_ival = cur_ival;\r
- min_num = fival.discrete.numerator;\r
- min_denom = fival.discrete.denominator;\r
- }\r
- TRACE("Discrete frame interval %u/%u supported\n",\r
- fival.discrete.numerator, fival.discrete.denominator);\r
- fival.index++;\r
- } while (xioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) >= 0);\r
- } else if ((fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) ||\r
- (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)) {\r
- TRACE("Frame intervals from %u/%u to %u/%u supported",\r
- fival.stepwise.min.numerator, fival.stepwise.min.denominator,\r
- fival.stepwise.max.numerator, fival.stepwise.max.denominator);\r
- if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {\r
- TRACE("with %u/%u step", fival.stepwise.step.numerator,\r
- fival.stepwise.step.denominator);\r
- }\r
- if (((float)fival.stepwise.max.denominator /\r
- (float)fival.stepwise.max.numerator) >\r
- ((float)fival.stepwise.min.denominator /\r
- (float)fival.stepwise.min.numerator)) {\r
- min_num = fival.stepwise.max.numerator;\r
- min_denom = fival.stepwise.max.denominator;\r
- } else {\r
- min_num = fival.stepwise.min.numerator;\r
- min_denom = fival.stepwise.min.denominator;\r
- }\r
- }\r
- TRACE("The actual min values: %u/%u\n", min_num, min_denom);\r
-\r
- CLEAR(sp);\r
- sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- sp.parm.capture.timeperframe.numerator = min_num;\r
- sp.parm.capture.timeperframe.denominator = min_denom;\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_S_PARM, &sp) < 0) {\r
- ERR("Failed to set to minimum FPS(%u/%u)\n", min_num, min_denom);\r
- }\r
-}\r
-\r
-static uint32_t stop_capturing(void)\r
-{\r
- enum v4l2_buf_type type;\r
-\r
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- if (xioctl(v4l2_fd, VIDIOC_STREAMOFF, &type) < 0) {\r
- ERR("Failed to ioctl() with VIDIOC_STREAMOFF: %s\n", strerror(errno));\r
- return errno;\r
- }\r
- return 0;\r
-}\r
-\r
-static uint32_t start_capturing(void)\r
-{\r
- enum v4l2_buf_type type;\r
-\r
- type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- if (xioctl(v4l2_fd, VIDIOC_STREAMON, &type) < 0) {\r
- ERR("Failed to ioctl() with VIDIOC_STREAMON: %s\n", strerror(errno));\r
- return errno;\r
- }\r
- return 0;\r
-}\r
-\r
-static void free_framebuffers(marucam_framebuffer *fb, int buf_num)\r
-{\r
- int i;\r
-\r
- if (fb == NULL) {\r
- ERR("The framebuffer is NULL. Failed to release the framebuffer\n");\r
- return;\r
- } else if (buf_num == 0) {\r
- ERR("The buffer count is 0. Failed to release the framebuffer\n");\r
- return;\r
- } else {\r
- TRACE("[%s]:fb(0x%p), buf_num(%d)\n", __func__, fb, buf_num);\r
- }\r
-\r
- /* Unmap framebuffers. */\r
- for (i = 0; i < buf_num; i++) {\r
- if (fb[i].data != NULL) {\r
- v4l2_munmap(fb[i].data, fb[i].size);\r
- fb[i].data = NULL;\r
- fb[i].size = 0;\r
- } else {\r
- ERR("framebuffer[%d].data is NULL.\n", i);\r
- }\r
- }\r
-}\r
-\r
-static uint32_t\r
-mmap_framebuffers(marucam_framebuffer **fb, int *buf_num)\r
-{\r
- struct v4l2_requestbuffers req;\r
-\r
- CLEAR(req);\r
- req.count = MARUCAM_DEFAULT_BUFFER_COUNT;\r
- req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- req.memory = V4L2_MEMORY_MMAP;\r
- if (xioctl(v4l2_fd, VIDIOC_REQBUFS, &req) < 0) {\r
- if (errno == EINVAL) {\r
- ERR("%s does not support memory mapping: %s\n",\r
- dev_name, strerror(errno));\r
- } else {\r
- ERR("Failed to request bufs: %s\n", strerror(errno));\r
- }\r
- return errno;\r
- }\r
- if (req.count == 0) {\r
- ERR("Insufficient buffer memory on %s\n", dev_name);\r
- return EINVAL;\r
- }\r
-\r
- *fb = g_new0(marucam_framebuffer, req.count);\r
- if (*fb == NULL) {\r
- ERR("Not enough memory to allocate framebuffers\n");\r
- return ENOMEM;\r
- }\r
-\r
- for (*buf_num = 0; *buf_num < req.count; ++*buf_num) {\r
- struct v4l2_buffer buf;\r
- CLEAR(buf);\r
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- buf.memory = V4L2_MEMORY_MMAP;\r
- buf.index = *buf_num;\r
- if (xioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf) < 0) {\r
- ERR("Failed to ioctl() with VIDIOC_QUERYBUF: %s\n",\r
- strerror(errno));\r
- return errno;\r
- }\r
-\r
- (*fb)[*buf_num].size = buf.length;\r
- (*fb)[*buf_num].data = v4l2_mmap(NULL,\r
- buf.length,\r
- PROT_READ | PROT_WRITE,\r
- MAP_SHARED,\r
- v4l2_fd, buf.m.offset);\r
- if (MAP_FAILED == (*fb)[*buf_num].data) {\r
- ERR("Failed to mmap: %s\n", strerror(errno));\r
- return errno;\r
- }\r
-\r
- /* Queue the mapped buffer. */\r
- CLEAR(buf);\r
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- buf.memory = V4L2_MEMORY_MMAP;\r
- buf.index = *buf_num;\r
- if (xioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {\r
- ERR("Failed to ioctl() with VIDIOC_QBUF: %s\n", strerror(errno));\r
- return errno;\r
- }\r
- }\r
- return 0;\r
-}\r
-\r
-static int is_streamon(MaruCamState *state)\r
-{\r
- int st;\r
- qemu_mutex_lock(&state->thread_mutex);\r
- st = state->streamon;\r
- qemu_mutex_unlock(&state->thread_mutex);\r
- return (st == _MC_THREAD_STREAMON);\r
-}\r
-\r
-static int is_stream_paused(MaruCamState *state)\r
-{\r
- int st;\r
- qemu_mutex_lock(&state->thread_mutex);\r
- st = state->streamon;\r
- qemu_mutex_unlock(&state->thread_mutex);\r
- return (st == _MC_THREAD_PAUSED);\r
-}\r
-\r
-static void __raise_err_intr(MaruCamState *state)\r
-{\r
- qemu_mutex_lock(&state->thread_mutex);\r
- if (state->streamon == _MC_THREAD_STREAMON) {\r
- state->req_frame = 0; /* clear request */\r
- state->isr = 0x08; /* set a error flag of rasing a interrupt */\r
- qemu_bh_schedule(state->tx_bh);\r
- }\r
- qemu_mutex_unlock(&state->thread_mutex);\r
-}\r
-\r
-static void\r
-notify_buffer_ready(MaruCamState *state, void *ptr, size_t size)\r
-{\r
- void *buf = NULL;\r
-\r
- qemu_mutex_lock(&state->thread_mutex);\r
- if (state->streamon == _MC_THREAD_STREAMON) {\r
- if (ready_count < MARUCAM_SKIPFRAMES) {\r
- /* skip a frame cause first some frame are distorted */\r
- ++ready_count;\r
- TRACE("Skip %d frame\n", ready_count);\r
- qemu_mutex_unlock(&state->thread_mutex);\r
- return;\r
- }\r
- if (state->req_frame == 0) {\r
- TRACE("There is no request\n");\r
- qemu_mutex_unlock(&state->thread_mutex);\r
- return;\r
- }\r
- buf = state->vaddr + state->buf_size * (state->req_frame - 1);\r
- memcpy(buf, ptr, state->buf_size);\r
- state->req_frame = 0; /* clear request */\r
- state->isr |= 0x01; /* set a flag of rasing a interrupt */\r
- qemu_bh_schedule(state->tx_bh);\r
- }\r
- qemu_mutex_unlock(&state->thread_mutex);\r
-}\r
-\r
-static int read_frame(MaruCamState *state)\r
-{\r
- struct v4l2_buffer buf;\r
-\r
- CLEAR(buf);\r
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- buf.memory = V4L2_MEMORY_MMAP;\r
- if (xioctl(v4l2_fd, VIDIOC_DQBUF, &buf) < 0) {\r
- switch (errno) {\r
- case EAGAIN:\r
- case EINTR:\r
- ERR("DQBUF error, try again: %s\n", strerror(errno));\r
- return 0;\r
- case EIO:\r
- ERR("The v4l2_read() met the EIO\n");\r
- if (convert_trial-- == -1) {\r
- ERR("Try count for v4l2_read is exceeded: %s\n",\r
- strerror(errno));\r
- return -1;\r
- }\r
- return 0;\r
- default:\r
- ERR("DQBUF error: %s\n", strerror(errno));\r
- return -1;\r
- }\r
- }\r
-\r
- notify_buffer_ready(state, framebuffer[buf.index].data, buf.bytesused);\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {\r
- ERR("QBUF error: %s\n", strerror(errno));\r
- return -1;\r
- }\r
- return 0;\r
-}\r
-\r
-static int __v4l2_streaming(MaruCamState *state)\r
-{\r
- fd_set fds;\r
- struct timeval tv;\r
- int ret;\r
- static uint32_t timeout_n = 0;\r
-\r
- FD_ZERO(&fds);\r
- FD_SET(v4l2_fd, &fds);\r
-\r
- tv.tv_sec = 1;\r
- tv.tv_usec = 0;\r
-\r
- ret = select(v4l2_fd + 1, &fds, NULL, NULL, &tv);\r
- if (ret < 0) {\r
- if (errno == EAGAIN || errno == EINTR) {\r
- ERR("Select again: %s\n", strerror(errno));\r
- return 0;\r
- }\r
- ERR("Failed to select: %s\n", strerror(errno));\r
- __raise_err_intr(state);\r
- return -1;\r
- } else if (!ret) {\r
- timeout_n++;\r
- ERR("Select timed out: count(%u)\n", timeout_n);\r
- if (timeout_n >= 5) {\r
- __raise_err_intr(state);\r
- return -1;\r
- }\r
- return 0;\r
- }\r
-\r
- if (!v4l2_fd || (v4l2_fd == -1)) {\r
- ERR("The file descriptor is closed or not opened\n");\r
- __raise_err_intr(state);\r
- return -1;\r
- }\r
-\r
- ret = read_frame(state);\r
- if (ret < 0) {\r
- ERR("Failed to operate the read_frame()\n");\r
- __raise_err_intr(state);\r
- return -1;\r
- }\r
-\r
- /* clear the skip count for select time-out */\r
- if (timeout_n > 0) {\r
- timeout_n = 0;\r
- }\r
-\r
- return 0;\r
-}\r
-\r
-/* Worker thread */\r
-static void *marucam_worker_thread(void *thread_param)\r
-{\r
- MaruCamState *state = (MaruCamState *)thread_param;\r
-\r
- while (1) {\r
- qemu_mutex_lock(&state->thread_mutex);\r
- state->streamon = _MC_THREAD_PAUSED;\r
- qemu_cond_wait(&state->thread_cond, &state->thread_mutex);\r
- qemu_mutex_unlock(&state->thread_mutex);\r
-\r
- if (state->destroying) {\r
- break;\r
- }\r
-\r
- convert_trial = 10;\r
- ready_count = 0;\r
- qemu_mutex_lock(&state->thread_mutex);\r
- state->streamon = _MC_THREAD_STREAMON;\r
- qemu_mutex_unlock(&state->thread_mutex);\r
- INFO("Streaming on ......\n");\r
-\r
- while (1) {\r
- if (is_streamon(state)) {\r
- if (__v4l2_streaming(state) < 0) {\r
- INFO("...... Streaming off\n");\r
- break;\r
- }\r
- } else {\r
- INFO("...... Streaming off\n");\r
- break;\r
- }\r
- }\r
- }\r
-\r
- return NULL;\r
-}\r
-\r
-int marucam_device_check(int log_flag)\r
-{\r
- int tmp_fd;\r
- struct timeval t1, t2;\r
- struct stat st;\r
- struct v4l2_fmtdesc format;\r
- struct v4l2_frmsizeenum size;\r
- struct v4l2_capability cap;\r
- int ret = 0;\r
-\r
- gettimeofday(&t1, NULL);\r
- if (stat(dev_name, &st) < 0) {\r
- fprintf(stdout, "[Webcam] <WARNING> Cannot identify '%s': %s\n",\r
- dev_name, strerror(errno));\r
- } else {\r
- if (!S_ISCHR(st.st_mode)) {\r
- fprintf(stdout, "[Webcam] <WARNING>%s is no character device\n",\r
- dev_name);\r
- }\r
- }\r
-\r
- tmp_fd = open(dev_name, O_RDWR | O_NONBLOCK, 0);\r
- if (tmp_fd < 0) {\r
- fprintf(stdout, "[Webcam] Camera device open failed: %s\n", dev_name);\r
- gettimeofday(&t2, NULL);\r
- fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",\r
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
- return ret;\r
- }\r
- if (ioctl(tmp_fd, VIDIOC_QUERYCAP, &cap) < 0) {\r
- fprintf(stdout, "[Webcam] Could not qeury video capabilities\n");\r
- close(tmp_fd);\r
- gettimeofday(&t2, NULL);\r
- fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",\r
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
- return ret;\r
- }\r
- if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) ||\r
- !(cap.capabilities & V4L2_CAP_STREAMING)) {\r
- fprintf(stdout, "[Webcam] Not supported video driver\n");\r
- close(tmp_fd);\r
- gettimeofday(&t2, NULL);\r
- fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",\r
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
- return ret;\r
- }\r
- ret = 1;\r
-\r
- if (log_flag) {\r
- fprintf(stdout, "[Webcam] Driver: %s\n", cap.driver);\r
- fprintf(stdout, "[Webcam] Card: %s\n", cap.card);\r
- fprintf(stdout, "[Webcam] Bus info: %s\n", cap.bus_info);\r
-\r
- CLEAR(format);\r
- format.index = 0;\r
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
-\r
- if (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) < 0) {\r
- close(tmp_fd);\r
- gettimeofday(&t2, NULL);\r
- fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",\r
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
- return ret;\r
- }\r
-\r
- do {\r
- CLEAR(size);\r
- size.index = 0;\r
- size.pixel_format = format.pixelformat;\r
-\r
- fprintf(stdout, "[Webcam] PixelFormat: %c%c%c%c\n",\r
- (char)(format.pixelformat),\r
- (char)(format.pixelformat >> 8),\r
- (char)(format.pixelformat >> 16),\r
- (char)(format.pixelformat >> 24));\r
-\r
- if (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) {\r
- close(tmp_fd);\r
- gettimeofday(&t2, NULL);\r
- fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",\r
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
- return ret;\r
- }\r
-\r
- if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {\r
- do {\r
- fprintf(stdout, "[Webcam] got discrete frame size %dx%d\n",\r
- size.discrete.width, size.discrete.height);\r
- size.index++;\r
- } while (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);\r
- } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {\r
- fprintf(stdout, "[Webcam] we have stepwise frame sizes:\n");\r
- fprintf(stdout, "[Webcam] min width: %d, min height: %d\n",\r
- size.stepwise.min_width, size.stepwise.min_height);\r
- fprintf(stdout, "[Webcam] max width: %d, max height: %d\n",\r
- size.stepwise.max_width, size.stepwise.max_height);\r
- fprintf(stdout, "[Webcam] step width: %d, step height: %d\n",\r
- size.stepwise.step_width, size.stepwise.step_height);\r
- } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {\r
- fprintf(stdout, "[Webcam] we have continuous frame sizes:\n");\r
- fprintf(stdout, "[Webcam] min width: %d, min height: %d\n",\r
- size.stepwise.min_width, size.stepwise.min_height);\r
- fprintf(stdout, "[Webcam] max width: %d, max height: %d\n",\r
- size.stepwise.max_width, size.stepwise.max_height);\r
-\r
- }\r
- format.index++;\r
- } while (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) >= 0);\r
- }\r
-\r
- close(tmp_fd);\r
- gettimeofday(&t2, NULL);\r
- fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",\r
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
- return ret;\r
-}\r
-\r
-void marucam_device_init(MaruCamState *state)\r
-{\r
- state->destroying = false;\r
- qemu_thread_create(&state->thread_id, marucam_worker_thread, (void *)state,\r
- QEMU_THREAD_JOINABLE);\r
-}\r
-\r
-void marucam_device_exit(MaruCamState *state)\r
-{\r
- state->destroying = true;\r
- qemu_mutex_lock(&state->thread_mutex);\r
- qemu_cond_signal(&state->thread_cond);\r
- qemu_mutex_unlock(&state->thread_mutex);\r
- qemu_thread_join(&state->thread_id);\r
-}\r
-\r
-void marucam_device_open(MaruCamState *state)\r
-{\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- v4l2_fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);\r
- if (v4l2_fd < 0) {\r
- ERR("The v4l2 device open failed: %s\n", dev_name);\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
- INFO("Opened\n");\r
-\r
- /* FIXME : Do not use fixed values */\r
- CLEAR(dst_fmt);\r
- dst_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- dst_fmt.fmt.pix.width = 640;\r
- dst_fmt.fmt.pix.height = 480;\r
- dst_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;\r
- dst_fmt.fmt.pix.field = V4L2_FIELD_ANY;\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_S_FMT, &dst_fmt) < 0) {\r
- ERR("Failed to set video format: format(0x%x), width:height(%d:%d), "\r
- "errstr(%s)\n", dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.width,\r
- dst_fmt.fmt.pix.height, strerror(errno));\r
- param->errCode = errno;\r
- return;\r
- }\r
- TRACE("Set the default format: w:h(%dx%d), fmt(0x%x), size(%d), "\r
- "color(%d), field(%d)\n",\r
- dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height,\r
- dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.sizeimage,\r
- dst_fmt.fmt.pix.colorspace, dst_fmt.fmt.pix.field);\r
-}\r
-\r
-void marucam_device_start_preview(MaruCamState *state)\r
-{\r
- struct timespec req;\r
- MaruCamParam *param = state->param;\r
- param->top = 0;\r
- req.tv_sec = 0;\r
- req.tv_nsec = 10000000;\r
-\r
- INFO("Pixfmt(%c%c%c%C), W:H(%d:%d), buf size(%u)\n",\r
- (char)(dst_fmt.fmt.pix.pixelformat),\r
- (char)(dst_fmt.fmt.pix.pixelformat >> 8),\r
- (char)(dst_fmt.fmt.pix.pixelformat >> 16),\r
- (char)(dst_fmt.fmt.pix.pixelformat >> 24),\r
- dst_fmt.fmt.pix.width,\r
- dst_fmt.fmt.pix.height,\r
- dst_fmt.fmt.pix.sizeimage);\r
-\r
- param->errCode = mmap_framebuffers(&framebuffer, &n_framebuffer);\r
- if (param->errCode) {\r
- ERR("Failed to mmap framebuffers\n");\r
- if (framebuffer != NULL) {\r
- free_framebuffers(framebuffer, n_framebuffer);\r
- g_free(framebuffer);\r
- framebuffer = NULL;\r
- n_framebuffer = 0;\r
- }\r
- return;\r
- }\r
-\r
- param->errCode = start_capturing();\r
- if (param->errCode) {\r
- if (framebuffer != NULL) {\r
- free_framebuffers(framebuffer, n_framebuffer);\r
- g_free(framebuffer);\r
- framebuffer = NULL;\r
- n_framebuffer = 0;\r
- }\r
- return;\r
- }\r
-\r
- INFO("Starting preview\n");\r
- state->buf_size = dst_fmt.fmt.pix.sizeimage;\r
- qemu_mutex_lock(&state->thread_mutex);\r
- qemu_cond_signal(&state->thread_cond);\r
- qemu_mutex_unlock(&state->thread_mutex);\r
-\r
- /* nanosleep until thread is streamon */\r
- while (!is_streamon(state)) {\r
- nanosleep(&req, NULL);\r
- }\r
-}\r
-\r
-void marucam_device_stop_preview(MaruCamState *state)\r
-{\r
- struct timespec req;\r
- struct v4l2_requestbuffers reqbuf;\r
- MaruCamParam *param = state->param;\r
- param->top = 0;\r
- req.tv_sec = 0;\r
- req.tv_nsec = 50000000;\r
-\r
- if (is_streamon(state)) {\r
- qemu_mutex_lock(&state->thread_mutex);\r
- state->streamon = _MC_THREAD_STREAMOFF;\r
- qemu_mutex_unlock(&state->thread_mutex);\r
-\r
- /* nanosleep until thread is paused */\r
- while (!is_stream_paused(state)) {\r
- nanosleep(&req, NULL);\r
- }\r
- }\r
-\r
- param->errCode = stop_capturing();\r
- if (framebuffer != NULL) {\r
- free_framebuffers(framebuffer, n_framebuffer);\r
- g_free(framebuffer);\r
- framebuffer = NULL;\r
- n_framebuffer = 0;\r
- }\r
- state->buf_size = 0;\r
-\r
- reqbuf.count = 0;\r
- reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- reqbuf.memory = V4L2_MEMORY_MMAP;\r
- if (xioctl(v4l2_fd, VIDIOC_REQBUFS, &reqbuf) < 0) {\r
- ERR("Failed to ioctl() with VIDIOC_REQBUF in stop_preview: %s\n",\r
- strerror(errno));\r
- }\r
- INFO("Stopping preview\n");\r
-}\r
-\r
-void marucam_device_s_param(MaruCamState *state)\r
-{\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
-\r
- /* If KVM enabled, We use default FPS of the webcam.\r
- * If KVM disabled, we use mininum FPS of the webcam */\r
- if (!kvm_enabled()) {\r
- set_maxframeinterval(state, dst_fmt.fmt.pix.pixelformat,\r
- dst_fmt.fmt.pix.width,\r
- dst_fmt.fmt.pix.height);\r
- }\r
-}\r
-\r
-void marucam_device_g_param(MaruCamState *state)\r
-{\r
- MaruCamParam *param = state->param;\r
-\r
- /* We use default FPS of the webcam\r
- * return a fixed value on guest ini file (1/30).\r
- */\r
- param->top = 0;\r
- param->stack[0] = 0x1000; /* V4L2_CAP_TIMEPERFRAME */\r
- param->stack[1] = 1; /* numerator */\r
- param->stack[2] = 30; /* denominator */\r
-}\r
-\r
-void marucam_device_s_fmt(MaruCamState *state)\r
-{\r
- struct v4l2_format format;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- CLEAR(format);\r
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- format.fmt.pix.width = param->stack[0];\r
- format.fmt.pix.height = param->stack[1];\r
- format.fmt.pix.pixelformat = param->stack[2];\r
- format.fmt.pix.field = V4L2_FIELD_ANY;\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_S_FMT, &format) < 0) {\r
- ERR("Failed to set video format: format(0x%x), width:height(%d:%d), "\r
- "errstr(%s)\n", format.fmt.pix.pixelformat, format.fmt.pix.width,\r
- format.fmt.pix.height, strerror(errno));\r
- param->errCode = errno;\r
- return;\r
- }\r
-\r
- memcpy(&dst_fmt, &format, sizeof(format));\r
- param->stack[0] = dst_fmt.fmt.pix.width;\r
- param->stack[1] = dst_fmt.fmt.pix.height;\r
- param->stack[2] = dst_fmt.fmt.pix.field;\r
- param->stack[3] = dst_fmt.fmt.pix.pixelformat;\r
- param->stack[4] = dst_fmt.fmt.pix.bytesperline;\r
- param->stack[5] = dst_fmt.fmt.pix.sizeimage;\r
- param->stack[6] = dst_fmt.fmt.pix.colorspace;\r
- param->stack[7] = dst_fmt.fmt.pix.priv;\r
- TRACE("Set the format: w:h(%dx%d), fmt(0x%x), size(%d), "\r
- "color(%d), field(%d)\n",\r
- dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height,\r
- dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.sizeimage,\r
- dst_fmt.fmt.pix.colorspace, dst_fmt.fmt.pix.field);\r
-}\r
-\r
-void marucam_device_g_fmt(MaruCamState *state)\r
-{\r
- struct v4l2_format format;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- CLEAR(format);\r
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_G_FMT, &format) < 0) {\r
- ERR("Failed to get video format: %s\n", strerror(errno));\r
- param->errCode = errno;\r
- } else {\r
- param->stack[0] = format.fmt.pix.width;\r
- param->stack[1] = format.fmt.pix.height;\r
- param->stack[2] = format.fmt.pix.field;\r
- param->stack[3] = format.fmt.pix.pixelformat;\r
- param->stack[4] = format.fmt.pix.bytesperline;\r
- param->stack[5] = format.fmt.pix.sizeimage;\r
- param->stack[6] = format.fmt.pix.colorspace;\r
- param->stack[7] = format.fmt.pix.priv;\r
- TRACE("Get the format: w:h(%dx%d), fmt(0x%x), size(%d), "\r
- "color(%d), field(%d)\n",\r
- format.fmt.pix.width, format.fmt.pix.height,\r
- format.fmt.pix.pixelformat, format.fmt.pix.sizeimage,\r
- format.fmt.pix.colorspace, format.fmt.pix.field);\r
- }\r
-}\r
-\r
-void marucam_device_try_fmt(MaruCamState *state)\r
-{\r
- struct v4l2_format format;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- CLEAR(format);\r
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
- format.fmt.pix.width = param->stack[0];\r
- format.fmt.pix.height = param->stack[1];\r
- format.fmt.pix.pixelformat = param->stack[2];\r
- format.fmt.pix.field = V4L2_FIELD_ANY;\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_TRY_FMT, &format) < 0) {\r
- ERR("Failed to check video format: format(0x%x), width:height(%d:%d),"\r
- " errstr(%s)\n", format.fmt.pix.pixelformat, format.fmt.pix.width,\r
- format.fmt.pix.height, strerror(errno));\r
- param->errCode = errno;\r
- return;\r
- }\r
- param->stack[0] = format.fmt.pix.width;\r
- param->stack[1] = format.fmt.pix.height;\r
- param->stack[2] = format.fmt.pix.field;\r
- param->stack[3] = format.fmt.pix.pixelformat;\r
- param->stack[4] = format.fmt.pix.bytesperline;\r
- param->stack[5] = format.fmt.pix.sizeimage;\r
- param->stack[6] = format.fmt.pix.colorspace;\r
- param->stack[7] = format.fmt.pix.priv;\r
- TRACE("Check the format: w:h(%dx%d), fmt(0x%x), size(%d), "\r
- "color(%d), field(%d)\n",\r
- format.fmt.pix.width, format.fmt.pix.height,\r
- format.fmt.pix.pixelformat, format.fmt.pix.sizeimage,\r
- format.fmt.pix.colorspace, format.fmt.pix.field);\r
-}\r
-\r
-void marucam_device_enum_fmt(MaruCamState *state)\r
-{\r
- uint32_t index;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- index = param->stack[0];\r
-\r
- if (index >= ARRAY_SIZE(supported_dst_pixfmts)) {\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
- param->stack[1] = 0; /* flags = NONE */\r
- param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */\r
- /* set description */\r
- switch (supported_dst_pixfmts[index].fmt) {\r
- case V4L2_PIX_FMT_YUYV:\r
- memcpy(¶m->stack[3], "YUYV", 32);\r
- break;\r
- case V4L2_PIX_FMT_YUV420:\r
- memcpy(¶m->stack[3], "YU12", 32);\r
- break;\r
- case V4L2_PIX_FMT_YVU420:\r
- memcpy(¶m->stack[3], "YV12", 32);\r
- break;\r
- default:\r
- ERR("Invalid fixel format\n");\r
- param->errCode = EINVAL;\r
- break;\r
- }\r
-}\r
-\r
-void marucam_device_qctrl(MaruCamState *state)\r
-{\r
- uint32_t i;\r
- char name[32] = {0,};\r
- struct v4l2_queryctrl ctrl;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- CLEAR(ctrl);\r
- ctrl.id = param->stack[0];\r
-\r
- switch (ctrl.id) {\r
- case V4L2_CID_BRIGHTNESS:\r
- TRACE("Query : BRIGHTNESS\n");\r
- memcpy((void *)name, (void *)"brightness", 32);\r
- i = 0;\r
- break;\r
- case V4L2_CID_CONTRAST:\r
- TRACE("Query : CONTRAST\n");\r
- memcpy((void *)name, (void *)"contrast", 32);\r
- i = 1;\r
- break;\r
- case V4L2_CID_SATURATION:\r
- TRACE("Query : SATURATION\n");\r
- memcpy((void *)name, (void *)"saturation", 32);\r
- i = 2;\r
- break;\r
- case V4L2_CID_SHARPNESS:\r
- TRACE("Query : SHARPNESS\n");\r
- memcpy((void *)name, (void *)"sharpness", 32);\r
- i = 3;\r
- break;\r
- default:\r
- ERR("Invalid control ID\n");\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_QUERYCTRL, &ctrl) < 0) {\r
- if (errno != EINVAL) {\r
- ERR("Failed to query video controls: %s\n", strerror(errno));\r
- }\r
- param->errCode = errno;\r
- return;\r
- } else {\r
- struct v4l2_control sctrl;\r
- CLEAR(sctrl);\r
- sctrl.id = ctrl.id;\r
- if ((ctrl.maximum + ctrl.minimum) == 0) {\r
- sctrl.value = 0;\r
- } else {\r
- sctrl.value = (ctrl.maximum + ctrl.minimum) / 2;\r
- }\r
- if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &sctrl) < 0) {\r
- ERR("Failed to set control value: id(0x%x), value(%d), "\r
- "errstr(%s)\n", sctrl.id, sctrl.value, strerror(errno));\r
- param->errCode = errno;\r
- return;\r
- }\r
- qctrl_tbl[i].hit = 1;\r
- qctrl_tbl[i].min = ctrl.minimum;\r
- qctrl_tbl[i].max = ctrl.maximum;\r
- qctrl_tbl[i].step = ctrl.step;\r
- qctrl_tbl[i].init_val = ctrl.default_value;\r
- }\r
-\r
- /* set fixed values by FW configuration file */\r
- param->stack[0] = ctrl.id;\r
- param->stack[1] = MARUCAM_CTRL_VALUE_MIN; /* minimum */\r
- param->stack[2] = MARUCAM_CTRL_VALUE_MAX; /* maximum */\r
- param->stack[3] = MARUCAM_CTRL_VALUE_STEP; /* step */\r
- param->stack[4] = MARUCAM_CTRL_VALUE_MID; /* default_value */\r
- param->stack[5] = ctrl.flags;\r
- /* name field setting */\r
- memcpy(¶m->stack[6], (void *)name, sizeof(ctrl.name));\r
-}\r
-\r
-void marucam_device_s_ctrl(MaruCamState *state)\r
-{\r
- uint32_t i;\r
- struct v4l2_control ctrl;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- CLEAR(ctrl);\r
- ctrl.id = param->stack[0];\r
-\r
- switch (ctrl.id) {\r
- case V4L2_CID_BRIGHTNESS:\r
- i = 0;\r
- TRACE("%d is set to the value of the BRIGHTNESS\n", param->stack[1]);\r
- break;\r
- case V4L2_CID_CONTRAST:\r
- i = 1;\r
- TRACE("%d is set to the value of the CONTRAST\n", param->stack[1]);\r
- break;\r
- case V4L2_CID_SATURATION:\r
- i = 2;\r
- TRACE("%d is set to the value of the SATURATION\n", param->stack[1]);\r
- break;\r
- case V4L2_CID_SHARPNESS:\r
- i = 3;\r
- TRACE("%d is set to the value of the SHARPNESS\n", param->stack[1]);\r
- break;\r
- default:\r
- ERR("Our emulator does not support this control: 0x%x\n", ctrl.id);\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
-\r
- ctrl.value = value_convert_from_guest(qctrl_tbl[i].min,\r
- qctrl_tbl[i].max, param->stack[1]);\r
- if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &ctrl) < 0) {\r
- ERR("Failed to set control value: id(0x%x), value(r:%d, c:%d), "\r
- "errstr(%s)\n", ctrl.id, param->stack[1], ctrl.value,\r
- strerror(errno));\r
- param->errCode = errno;\r
- return;\r
- }\r
-}\r
-\r
-void marucam_device_g_ctrl(MaruCamState *state)\r
-{\r
- uint32_t i;\r
- struct v4l2_control ctrl;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- CLEAR(ctrl);\r
- ctrl.id = param->stack[0];\r
-\r
- switch (ctrl.id) {\r
- case V4L2_CID_BRIGHTNESS:\r
- TRACE("Gets the value of the BRIGHTNESS\n");\r
- i = 0;\r
- break;\r
- case V4L2_CID_CONTRAST:\r
- TRACE("Gets the value of the CONTRAST\n");\r
- i = 1;\r
- break;\r
- case V4L2_CID_SATURATION:\r
- TRACE("Gets the value of the SATURATION\n");\r
- i = 2;\r
- break;\r
- case V4L2_CID_SHARPNESS:\r
- TRACE("Gets the value of the SHARPNESS\n");\r
- i = 3;\r
- break;\r
- default:\r
- ERR("Our emulator does not support this control: 0x%x\n", ctrl.id);\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
-\r
- if (xioctl(v4l2_fd, VIDIOC_G_CTRL, &ctrl) < 0) {\r
- ERR("Failed to get video control value: %s\n", strerror(errno));\r
- param->errCode = errno;\r
- return;\r
- }\r
- param->stack[0] = value_convert_to_guest(qctrl_tbl[i].min,\r
- qctrl_tbl[i].max, ctrl.value);\r
- TRACE("Value: %d\n", param->stack[0]);\r
-}\r
-\r
-void marucam_device_enum_fsizes(MaruCamState *state)\r
-{\r
- uint32_t index, pixfmt, i;\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
- index = param->stack[0];\r
- pixfmt = param->stack[1];\r
-\r
- if (index >= ARRAY_SIZE(supported_dst_frames)) {\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
- for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {\r
- if (supported_dst_pixfmts[i].fmt == pixfmt) {\r
- break;\r
- }\r
- }\r
-\r
- if (i == ARRAY_SIZE(supported_dst_pixfmts)) {\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
-\r
- param->stack[0] = supported_dst_frames[index].width;\r
- param->stack[1] = supported_dst_frames[index].height;\r
-}\r
-\r
-void marucam_device_enum_fintv(MaruCamState *state)\r
-{\r
- MaruCamParam *param = state->param;\r
-\r
- param->top = 0;\r
-\r
- /* switch by index(param->stack[0]) */\r
- switch (param->stack[0]) {\r
- case 0:\r
- /* we only use 1/30 frame interval */\r
- param->stack[1] = 30; /* denominator */\r
- break;\r
- default:\r
- param->errCode = EINVAL;\r
- return;\r
- }\r
- param->stack[0] = 1; /* numerator */\r
-}\r
-\r
-void marucam_device_close(MaruCamState *state)\r
-{\r
- if (!is_stream_paused(state)) {\r
- marucam_device_stop_preview(state);\r
- }\r
-\r
- marucam_reset_controls();\r
-\r
- v4l2_close(v4l2_fd);\r
- v4l2_fd = 0;\r
- INFO("Closed\n");\r
-}\r
+/*
+ * Implementation of MARU Virtual Camera device by PCI bus on Linux.
+ *
+ * Copyright (c) 2011 - 2013 Samsung Electronics Co., Ltd All Rights Reserved
+ *
+ * Contact:
+ * JinHyung Jo <jinhyung.jo@samsung.com>
+ * YeongKyoon Lee <yeongkyoon.lee@samsung.com>
+ *
+ * This program is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU General Public License
+ * as published by the Free Software Foundation; either version 2
+ * of the License, or (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with this program; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
+ * MA 02110-1301, USA.
+ *
+ * Contributors:
+ * - S-Core Co., Ltd
+ *
+ */
+
+#include "qemu-common.h"
+#include "maru_camera_common.h"
+#include "pci.h"
+#include "kvm.h"
+#include "tizen/src/debug_ch.h"
+
+#include <linux/videodev2.h>
+
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+
+#include <libv4l2.h>
+#include <libv4lconvert.h>
+
+MULTI_DEBUG_CHANNEL(tizen, camera_linux);
+
+#define CLEAR(x) memset(&(x), 0, sizeof(x))
+
+#define MARUCAM_DEFAULT_BUFFER_COUNT 4
+#define MARUCAM_DUMMYFRAME_COUNT 2
+
+#define MARUCAM_CTRL_VALUE_MAX 20
+#define MARUCAM_CTRL_VALUE_MIN 1
+#define MARUCAM_CTRL_VALUE_MID 10
+#define MARUCAM_CTRL_VALUE_STEP 1
+
+enum {
+ _MC_THREAD_PAUSED,
+ _MC_THREAD_STREAMON,
+ _MC_THREAD_STREAMOFF,
+};
+
+typedef struct marucam_framebuffer {
+ void *data;
+ size_t size;
+} marucam_framebuffer;
+
+static int n_framebuffer;
+static struct marucam_framebuffer *framebuffer;
+
+static const char *dev_name = "/dev/video0";
+static int v4l2_fd;
+static int convert_trial;
+static int ready_count;
+static int timeout_n;
+
+static struct v4l2_format dst_fmt;
+
+static void make_yu12_black(unsigned char *dest, uint32_t width, uint32_t height)
+{
+ uint32_t x, y;
+ unsigned char *udest, *vdest;
+
+ /* Y */
+ for (y = 0; y < height; y++) {
+ for (x = 0; x < width; x++) {
+ *dest++ = 16;
+ }
+ }
+
+ /* U + V */
+ udest = dest;
+ vdest = dest + width * height / 4;
+
+ for (y = 0; y < height / 2; y++) {
+ for (x = 0; x < width / 2; x++) {
+ *udest++ = *vdest++ = 128;
+ }
+ }
+}
+
+static int yioctl(int fd, int req, void *arg)
+{
+ int r;
+
+ do {
+ r = ioctl(fd, req, arg);
+ } while (r < 0 && errno == EINTR);
+
+ return r;
+}
+
+static int xioctl(int fd, int req, void *arg)
+{
+ int r;
+
+ do {
+ r = v4l2_ioctl(fd, req, arg);
+ } while (r < 0 && errno == EINTR);
+
+ return r;
+}
+
+typedef struct tagMaruCamConvertPixfmt {
+ uint32_t fmt; /* fourcc */
+} MaruCamConvertPixfmt;
+
+static MaruCamConvertPixfmt supported_dst_pixfmts[] = {
+ { V4L2_PIX_FMT_YUYV },
+ { V4L2_PIX_FMT_YUV420 },
+ { V4L2_PIX_FMT_YVU420 },
+};
+
+typedef struct tagMaruCamConvertFrameInfo {
+ uint32_t width;
+ uint32_t height;
+} MaruCamConvertFrameInfo;
+
+static MaruCamConvertFrameInfo supported_dst_frames[] = {
+ { 640, 480 },
+ { 352, 288 },
+ { 320, 240 },
+ { 176, 144 },
+ { 160, 120 },
+};
+
+struct marucam_qctrl {
+ uint32_t id;
+ uint32_t hit;
+ int32_t min;
+ int32_t max;
+ int32_t step;
+ int32_t init_val;
+};
+
+static struct marucam_qctrl qctrl_tbl[] = {
+ { V4L2_CID_BRIGHTNESS, 0, },
+ { V4L2_CID_CONTRAST, 0, },
+ { V4L2_CID_SATURATION, 0, },
+ { V4L2_CID_SHARPNESS, 0, },
+};
+
+static void marucam_reset_controls(void)
+{
+ uint32_t i;
+ for (i = 0; i < ARRAY_SIZE(qctrl_tbl); i++) {
+ if (qctrl_tbl[i].hit) {
+ struct v4l2_control ctrl = {0,};
+ qctrl_tbl[i].hit = 0;
+ ctrl.id = qctrl_tbl[i].id;
+ ctrl.value = qctrl_tbl[i].init_val;
+ if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &ctrl) < 0) {
+ ERR("Failed to reset control value: id(0x%x), errstr(%s)\n",
+ ctrl.id, strerror(errno));
+ }
+ }
+ }
+}
+
+static int32_t value_convert_from_guest(int32_t min, int32_t max, int32_t value)
+{
+ double rate = 0.0;
+ int32_t dist = 0, ret = 0;
+
+ dist = max - min;
+
+ if (dist < MARUCAM_CTRL_VALUE_MAX) {
+ rate = (double)MARUCAM_CTRL_VALUE_MAX / (double)dist;
+ ret = min + (int32_t)(value / rate);
+ } else {
+ rate = (double)dist / (double)MARUCAM_CTRL_VALUE_MAX;
+ ret = min + (int32_t)(rate * value);
+ }
+ return ret;
+}
+
+static int32_t value_convert_to_guest(int32_t min, int32_t max, int32_t value)
+{
+ double rate = 0.0;
+ int32_t dist = 0, ret = 0;
+
+ dist = max - min;
+
+ if (dist < MARUCAM_CTRL_VALUE_MAX) {
+ rate = (double)MARUCAM_CTRL_VALUE_MAX / (double)dist;
+ ret = (int32_t)((double)(value - min) * rate);
+ } else {
+ rate = (double)dist / (double)MARUCAM_CTRL_VALUE_MAX;
+ ret = (int32_t)((double)(value - min) / rate);
+ }
+
+ return ret;
+}
+
+static void set_maxframeinterval(MaruCamState *state, uint32_t pixel_format,
+ uint32_t width, uint32_t height)
+{
+ struct v4l2_frmivalenum fival;
+ struct v4l2_streamparm sp;
+ uint32_t min_num = 0, min_denom = 0;
+
+ CLEAR(fival);
+ fival.pixel_format = pixel_format;
+ fival.width = width;
+ fival.height = height;
+
+ if (xioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) < 0) {
+ ERR("Unable to enumerate intervals for pixelformat(0x%x), (%d:%d)\n",
+ pixel_format, width, height);
+ return;
+ }
+
+ if (fival.type == V4L2_FRMIVAL_TYPE_DISCRETE) {
+ float max_ival = -1.0;
+ do {
+ float cur_ival = (float)fival.discrete.numerator
+ / (float)fival.discrete.denominator;
+ if (cur_ival > max_ival) {
+ max_ival = cur_ival;
+ min_num = fival.discrete.numerator;
+ min_denom = fival.discrete.denominator;
+ }
+ TRACE("Discrete frame interval %u/%u supported\n",
+ fival.discrete.numerator, fival.discrete.denominator);
+ fival.index++;
+ } while (xioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) >= 0);
+ } else if ((fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) ||
+ (fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)) {
+ TRACE("Frame intervals from %u/%u to %u/%u supported",
+ fival.stepwise.min.numerator, fival.stepwise.min.denominator,
+ fival.stepwise.max.numerator, fival.stepwise.max.denominator);
+ if (fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) {
+ TRACE("with %u/%u step", fival.stepwise.step.numerator,
+ fival.stepwise.step.denominator);
+ }
+ if (((float)fival.stepwise.max.denominator /
+ (float)fival.stepwise.max.numerator) >
+ ((float)fival.stepwise.min.denominator /
+ (float)fival.stepwise.min.numerator)) {
+ min_num = fival.stepwise.max.numerator;
+ min_denom = fival.stepwise.max.denominator;
+ } else {
+ min_num = fival.stepwise.min.numerator;
+ min_denom = fival.stepwise.min.denominator;
+ }
+ }
+ TRACE("The actual min values: %u/%u\n", min_num, min_denom);
+
+ CLEAR(sp);
+ sp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ sp.parm.capture.timeperframe.numerator = min_num;
+ sp.parm.capture.timeperframe.denominator = min_denom;
+
+ if (xioctl(v4l2_fd, VIDIOC_S_PARM, &sp) < 0) {
+ ERR("Failed to set to minimum FPS(%u/%u)\n", min_num, min_denom);
+ }
+}
+
+static uint32_t stop_capturing(void)
+{
+ enum v4l2_buf_type type;
+
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(v4l2_fd, VIDIOC_STREAMOFF, &type) < 0) {
+ ERR("Failed to ioctl() with VIDIOC_STREAMOFF: %s\n", strerror(errno));
+ return errno;
+ }
+ return 0;
+}
+
+static uint32_t start_capturing(void)
+{
+ enum v4l2_buf_type type;
+
+ type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (xioctl(v4l2_fd, VIDIOC_STREAMON, &type) < 0) {
+ ERR("Failed to ioctl() with VIDIOC_STREAMON: %s\n", strerror(errno));
+ return errno;
+ }
+ return 0;
+}
+
+static void free_framebuffers(marucam_framebuffer *fb, int buf_num)
+{
+ int i;
+
+ if (fb == NULL) {
+ ERR("The framebuffer is NULL. Failed to release the framebuffer\n");
+ return;
+ } else if (buf_num == 0) {
+ ERR("The buffer count is 0. Failed to release the framebuffer\n");
+ return;
+ } else {
+ TRACE("[%s]:fb(0x%p), buf_num(%d)\n", __func__, fb, buf_num);
+ }
+
+ /* Unmap framebuffers. */
+ for (i = 0; i < buf_num; i++) {
+ if (fb[i].data != NULL) {
+ v4l2_munmap(fb[i].data, fb[i].size);
+ fb[i].data = NULL;
+ fb[i].size = 0;
+ } else {
+ ERR("framebuffer[%d].data is NULL.\n", i);
+ }
+ }
+}
+
+static uint32_t
+mmap_framebuffers(marucam_framebuffer **fb, int *buf_num)
+{
+ struct v4l2_requestbuffers req;
+
+ CLEAR(req);
+ req.count = MARUCAM_DEFAULT_BUFFER_COUNT;
+ req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ req.memory = V4L2_MEMORY_MMAP;
+ if (xioctl(v4l2_fd, VIDIOC_REQBUFS, &req) < 0) {
+ if (errno == EINVAL) {
+ ERR("%s does not support memory mapping: %s\n",
+ dev_name, strerror(errno));
+ } else {
+ ERR("Failed to request bufs: %s\n", strerror(errno));
+ }
+ return errno;
+ }
+ if (req.count == 0) {
+ ERR("Insufficient buffer memory on %s\n", dev_name);
+ return EINVAL;
+ }
+
+ *fb = g_new0(marucam_framebuffer, req.count);
+ if (*fb == NULL) {
+ ERR("Not enough memory to allocate framebuffers\n");
+ return ENOMEM;
+ }
+
+ for (*buf_num = 0; *buf_num < req.count; ++*buf_num) {
+ struct v4l2_buffer buf;
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = *buf_num;
+ if (xioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf) < 0) {
+ ERR("Failed to ioctl() with VIDIOC_QUERYBUF: %s\n",
+ strerror(errno));
+ return errno;
+ }
+
+ (*fb)[*buf_num].size = buf.length;
+ (*fb)[*buf_num].data = v4l2_mmap(NULL,
+ buf.length,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ v4l2_fd, buf.m.offset);
+ if (MAP_FAILED == (*fb)[*buf_num].data) {
+ ERR("Failed to mmap: %s\n", strerror(errno));
+ return errno;
+ }
+
+ /* Queue the mapped buffer. */
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ buf.index = *buf_num;
+ if (xioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {
+ ERR("Failed to ioctl() with VIDIOC_QBUF: %s\n", strerror(errno));
+ return errno;
+ }
+ }
+ return 0;
+}
+
+static int is_streamon(MaruCamState *state)
+{
+ int st;
+ qemu_mutex_lock(&state->thread_mutex);
+ st = state->streamon;
+ qemu_mutex_unlock(&state->thread_mutex);
+ return (st == _MC_THREAD_STREAMON);
+}
+
+static int is_stream_paused(MaruCamState *state)
+{
+ int st;
+ qemu_mutex_lock(&state->thread_mutex);
+ st = state->streamon;
+ qemu_mutex_unlock(&state->thread_mutex);
+ return (st == _MC_THREAD_PAUSED);
+}
+
+/* sends a frame, YU12/black color */
+static void __raise_empty_intr(MaruCamState *state)
+{
+ void *buf = NULL;
+ qemu_mutex_lock(&state->thread_mutex);
+ if (state->streamon == _MC_THREAD_STREAMON && state->req_frame) {
+ buf = state->vaddr + state->buf_size * (state->req_frame - 1);
+ make_yu12_black(buf, dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height);
+ state->req_frame = 0; /* clear request */
+ state->isr = 0x01; /* set a flag of raising a interrupt */
+ qemu_bh_schedule(state->tx_bh);
+ }
+ qemu_mutex_unlock(&state->thread_mutex);
+}
+
+static void __raise_err_intr(MaruCamState *state)
+{
+ qemu_mutex_lock(&state->thread_mutex);
+ if (state->streamon == _MC_THREAD_STREAMON) {
+ state->req_frame = 0; /* clear request */
+ state->isr = 0x08; /* set a error flag of raising a interrupt */
+ qemu_bh_schedule(state->tx_bh);
+ }
+ qemu_mutex_unlock(&state->thread_mutex);
+}
+
+static void
+notify_buffer_ready(MaruCamState *state, void *ptr, size_t size)
+{
+ void *buf = NULL;
+
+ qemu_mutex_lock(&state->thread_mutex);
+ if (state->streamon == _MC_THREAD_STREAMON) {
+ if (ready_count < MARUCAM_SKIPFRAMES) {
+ /* skip a frame cause first some frame are distorted */
+ ++ready_count;
+ TRACE("Skip %d frame\n", ready_count);
+ qemu_mutex_unlock(&state->thread_mutex);
+ return;
+ }
+ if (state->req_frame == 0) {
+ TRACE("There is no request\n");
+ qemu_mutex_unlock(&state->thread_mutex);
+ return;
+ }
+ buf = state->vaddr + state->buf_size * (state->req_frame - 1);
+ memcpy(buf, ptr, state->buf_size);
+ state->req_frame = 0; /* clear request */
+ state->isr |= 0x01; /* set a flag of rasing a interrupt */
+ qemu_bh_schedule(state->tx_bh);
+ }
+ qemu_mutex_unlock(&state->thread_mutex);
+}
+
+static int read_frame(MaruCamState *state)
+{
+ struct v4l2_buffer buf;
+
+ CLEAR(buf);
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ if (xioctl(v4l2_fd, VIDIOC_DQBUF, &buf) < 0) {
+ switch (errno) {
+ case EAGAIN:
+ case EINTR:
+ ERR("DQBUF error, try again: %s\n", strerror(errno));
+ return 0;
+ case EIO:
+ ERR("The v4l2_read() met the EIO\n");
+ if (convert_trial-- == -1) {
+ ERR("Try count for v4l2_read is exceeded: %s\n",
+ strerror(errno));
+ return -1;
+ }
+ return 0;
+ default:
+ ERR("DQBUF error: %s\n", strerror(errno));
+ return -1;
+ }
+ }
+
+ notify_buffer_ready(state, framebuffer[buf.index].data, buf.bytesused);
+
+ if (xioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {
+ ERR("QBUF error: %s\n", strerror(errno));
+ return -1;
+ }
+ return 0;
+}
+
+static int __v4l2_streaming(MaruCamState *state)
+{
+ fd_set fds;
+ struct timeval tv;
+ int ret;
+
+ FD_ZERO(&fds);
+ FD_SET(v4l2_fd, &fds);
+
+ tv.tv_sec = 1;
+ tv.tv_usec = 0;
+
+ ret = select(v4l2_fd + 1, &fds, NULL, NULL, &tv);
+ if (ret < 0) {
+ if (errno == EAGAIN || errno == EINTR) {
+ ERR("Select again: %s\n", strerror(errno));
+ return 0;
+ }
+ ERR("Failed to select: %s\n", strerror(errno));
+ __raise_err_intr(state);
+ return -1;
+ } else if (!ret) {
+ timeout_n++;
+ ERR("Select timed out: count(%u)\n", timeout_n);
+ if (ready_count == 0) {
+ if (timeout_n <= MARUCAM_DUMMYFRAME_COUNT) {
+ ERR("Sends dummy data to initialize the camera\n");
+ __raise_empty_intr(state);
+ return 0;
+ } else {
+ ERR("Webcam is busy, failed to a read frame."
+ " Raises an error\n");
+ __raise_err_intr(state);
+ return -1;
+ }
+ }
+ if (timeout_n >= 5) {
+ ERR("Webcam is busy, failed to a read frame. Raises an error\n");
+ __raise_err_intr(state);
+ return -1;
+ }
+ return 0;
+ }
+
+ if (!v4l2_fd || (v4l2_fd == -1)) {
+ ERR("The file descriptor is closed or not opened\n");
+ __raise_err_intr(state);
+ return -1;
+ }
+
+ ret = read_frame(state);
+ if (ret < 0) {
+ ERR("Failed to operate the read_frame()\n");
+ __raise_err_intr(state);
+ return -1;
+ }
+
+ /* clear the skip count for select time-out */
+ if (timeout_n > 0) {
+ timeout_n = 0;
+ }
+
+ return 0;
+}
+
+/* Worker thread */
+static void *marucam_worker_thread(void *thread_param)
+{
+ MaruCamState *state = (MaruCamState *)thread_param;
+
+ while (1) {
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = _MC_THREAD_PAUSED;
+ qemu_cond_wait(&state->thread_cond, &state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
+
+ if (state->destroying) {
+ break;
+ }
+
+ convert_trial = 10;
+ ready_count = 0;
+ timeout_n = 0;
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = _MC_THREAD_STREAMON;
+ qemu_mutex_unlock(&state->thread_mutex);
+ INFO("Streaming on ......\n");
+
+ while (1) {
+ if (is_streamon(state)) {
+ if (__v4l2_streaming(state) < 0) {
+ INFO("...... Streaming off\n");
+ break;
+ }
+ } else {
+ INFO("...... Streaming off\n");
+ break;
+ }
+ }
+ }
+
+ return NULL;
+}
+
+int marucam_device_check(int log_flag)
+{
+ int tmp_fd;
+ struct timeval t1, t2;
+ struct stat st;
+ struct v4l2_fmtdesc format;
+ struct v4l2_frmsizeenum size;
+ struct v4l2_capability cap;
+ int ret = 0;
+
+ gettimeofday(&t1, NULL);
+ if (stat(dev_name, &st) < 0) {
+ fprintf(stdout, "[Webcam] <WARNING> Cannot identify '%s': %s\n",
+ dev_name, strerror(errno));
+ } else {
+ if (!S_ISCHR(st.st_mode)) {
+ fprintf(stdout, "[Webcam] <WARNING>%s is no character device\n",
+ dev_name);
+ }
+ }
+
+ tmp_fd = open(dev_name, O_RDWR | O_NONBLOCK, 0);
+ if (tmp_fd < 0) {
+ fprintf(stdout, "[Webcam] Camera device open failed: %s\n", dev_name);
+ gettimeofday(&t2, NULL);
+ fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+ }
+ if (ioctl(tmp_fd, VIDIOC_QUERYCAP, &cap) < 0) {
+ fprintf(stdout, "[Webcam] Could not qeury video capabilities\n");
+ close(tmp_fd);
+ gettimeofday(&t2, NULL);
+ fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+ }
+ if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) ||
+ !(cap.capabilities & V4L2_CAP_STREAMING)) {
+ fprintf(stdout, "[Webcam] Not supported video driver\n");
+ close(tmp_fd);
+ gettimeofday(&t2, NULL);
+ fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+ }
+ ret = 1;
+
+ if (log_flag) {
+ fprintf(stdout, "[Webcam] Driver: %s\n", cap.driver);
+ fprintf(stdout, "[Webcam] Card: %s\n", cap.card);
+ fprintf(stdout, "[Webcam] Bus info: %s\n", cap.bus_info);
+
+ CLEAR(format);
+ format.index = 0;
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) < 0) {
+ close(tmp_fd);
+ gettimeofday(&t2, NULL);
+ fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+ }
+
+ do {
+ CLEAR(size);
+ size.index = 0;
+ size.pixel_format = format.pixelformat;
+
+ fprintf(stdout, "[Webcam] PixelFormat: %c%c%c%c\n",
+ (char)(format.pixelformat),
+ (char)(format.pixelformat >> 8),
+ (char)(format.pixelformat >> 16),
+ (char)(format.pixelformat >> 24));
+
+ if (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) {
+ close(tmp_fd);
+ gettimeofday(&t2, NULL);
+ fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+ }
+
+ if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ do {
+ fprintf(stdout, "[Webcam] got discrete frame size %dx%d\n",
+ size.discrete.width, size.discrete.height);
+ size.index++;
+ } while (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
+ } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+ fprintf(stdout, "[Webcam] we have stepwise frame sizes:\n");
+ fprintf(stdout, "[Webcam] min width: %d, min height: %d\n",
+ size.stepwise.min_width, size.stepwise.min_height);
+ fprintf(stdout, "[Webcam] max width: %d, max height: %d\n",
+ size.stepwise.max_width, size.stepwise.max_height);
+ fprintf(stdout, "[Webcam] step width: %d, step height: %d\n",
+ size.stepwise.step_width, size.stepwise.step_height);
+ } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ fprintf(stdout, "[Webcam] we have continuous frame sizes:\n");
+ fprintf(stdout, "[Webcam] min width: %d, min height: %d\n",
+ size.stepwise.min_width, size.stepwise.min_height);
+ fprintf(stdout, "[Webcam] max width: %d, max height: %d\n",
+ size.stepwise.max_width, size.stepwise.max_height);
+
+ }
+ format.index++;
+ } while (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) >= 0);
+ }
+
+ close(tmp_fd);
+ gettimeofday(&t2, NULL);
+ fprintf(stdout, "[Webcam] Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+}
+
+void marucam_device_init(MaruCamState *state)
+{
+ state->destroying = false;
+ qemu_thread_create(&state->thread_id, marucam_worker_thread, (void *)state,
+ QEMU_THREAD_JOINABLE);
+}
+
+void marucam_device_exit(MaruCamState *state)
+{
+ state->destroying = true;
+ qemu_mutex_lock(&state->thread_mutex);
+ qemu_cond_signal(&state->thread_cond);
+ qemu_mutex_unlock(&state->thread_mutex);
+ qemu_thread_join(&state->thread_id);
+}
+
+void marucam_device_open(MaruCamState *state)
+{
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ v4l2_fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
+ if (v4l2_fd < 0) {
+ ERR("The v4l2 device open failed: %s\n", dev_name);
+ param->errCode = EINVAL;
+ return;
+ }
+ INFO("Opened\n");
+
+ /* FIXME : Do not use fixed values */
+ CLEAR(dst_fmt);
+ dst_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ dst_fmt.fmt.pix.width = 640;
+ dst_fmt.fmt.pix.height = 480;
+ dst_fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
+ dst_fmt.fmt.pix.field = V4L2_FIELD_ANY;
+
+ if (xioctl(v4l2_fd, VIDIOC_S_FMT, &dst_fmt) < 0) {
+ ERR("Failed to set video format: format(0x%x), width:height(%d:%d), "
+ "errstr(%s)\n", dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.width,
+ dst_fmt.fmt.pix.height, strerror(errno));
+ param->errCode = errno;
+ return;
+ }
+ TRACE("Set the default format: w:h(%dx%d), fmt(0x%x), size(%d), "
+ "color(%d), field(%d)\n",
+ dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height,
+ dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.sizeimage,
+ dst_fmt.fmt.pix.colorspace, dst_fmt.fmt.pix.field);
+}
+
+void marucam_device_start_preview(MaruCamState *state)
+{
+ struct timespec req;
+ MaruCamParam *param = state->param;
+ param->top = 0;
+ req.tv_sec = 0;
+ req.tv_nsec = 10000000;
+
+ INFO("Pixfmt(%c%c%c%C), W:H(%d:%d), buf size(%u)\n",
+ (char)(dst_fmt.fmt.pix.pixelformat),
+ (char)(dst_fmt.fmt.pix.pixelformat >> 8),
+ (char)(dst_fmt.fmt.pix.pixelformat >> 16),
+ (char)(dst_fmt.fmt.pix.pixelformat >> 24),
+ dst_fmt.fmt.pix.width,
+ dst_fmt.fmt.pix.height,
+ dst_fmt.fmt.pix.sizeimage);
+
+ param->errCode = mmap_framebuffers(&framebuffer, &n_framebuffer);
+ if (param->errCode) {
+ ERR("Failed to mmap framebuffers\n");
+ if (framebuffer != NULL) {
+ free_framebuffers(framebuffer, n_framebuffer);
+ g_free(framebuffer);
+ framebuffer = NULL;
+ n_framebuffer = 0;
+ }
+ return;
+ }
+
+ param->errCode = start_capturing();
+ if (param->errCode) {
+ if (framebuffer != NULL) {
+ free_framebuffers(framebuffer, n_framebuffer);
+ g_free(framebuffer);
+ framebuffer = NULL;
+ n_framebuffer = 0;
+ }
+ return;
+ }
+
+ INFO("Starting preview\n");
+ state->buf_size = dst_fmt.fmt.pix.sizeimage;
+ qemu_mutex_lock(&state->thread_mutex);
+ qemu_cond_signal(&state->thread_cond);
+ qemu_mutex_unlock(&state->thread_mutex);
+
+ /* nanosleep until thread is streamon */
+ while (!is_streamon(state)) {
+ nanosleep(&req, NULL);
+ }
+}
+
+void marucam_device_stop_preview(MaruCamState *state)
+{
+ struct timespec req;
+ struct v4l2_requestbuffers reqbuf;
+ MaruCamParam *param = state->param;
+ param->top = 0;
+ req.tv_sec = 0;
+ req.tv_nsec = 50000000;
+
+ if (is_streamon(state)) {
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = _MC_THREAD_STREAMOFF;
+ qemu_mutex_unlock(&state->thread_mutex);
+
+ /* nanosleep until thread is paused */
+ while (!is_stream_paused(state)) {
+ nanosleep(&req, NULL);
+ }
+ }
+
+ param->errCode = stop_capturing();
+ if (framebuffer != NULL) {
+ free_framebuffers(framebuffer, n_framebuffer);
+ g_free(framebuffer);
+ framebuffer = NULL;
+ n_framebuffer = 0;
+ }
+ state->buf_size = 0;
+
+ reqbuf.count = 0;
+ reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ reqbuf.memory = V4L2_MEMORY_MMAP;
+ if (xioctl(v4l2_fd, VIDIOC_REQBUFS, &reqbuf) < 0) {
+ ERR("Failed to ioctl() with VIDIOC_REQBUF in stop_preview: %s\n",
+ strerror(errno));
+ }
+ INFO("Stopping preview\n");
+}
+
+void marucam_device_s_param(MaruCamState *state)
+{
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+
+ /* If KVM enabled, We use default FPS of the webcam.
+ * If KVM disabled, we use mininum FPS of the webcam */
+ if (!kvm_enabled()) {
+ set_maxframeinterval(state, dst_fmt.fmt.pix.pixelformat,
+ dst_fmt.fmt.pix.width,
+ dst_fmt.fmt.pix.height);
+ }
+}
+
+void marucam_device_g_param(MaruCamState *state)
+{
+ MaruCamParam *param = state->param;
+
+ /* We use default FPS of the webcam
+ * return a fixed value on guest ini file (1/30).
+ */
+ param->top = 0;
+ param->stack[0] = 0x1000; /* V4L2_CAP_TIMEPERFRAME */
+ param->stack[1] = 1; /* numerator */
+ param->stack[2] = 30; /* denominator */
+}
+
+void marucam_device_s_fmt(MaruCamState *state)
+{
+ struct v4l2_format format;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ CLEAR(format);
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.fmt.pix.width = param->stack[0];
+ format.fmt.pix.height = param->stack[1];
+ format.fmt.pix.pixelformat = param->stack[2];
+ format.fmt.pix.field = V4L2_FIELD_ANY;
+
+ if (xioctl(v4l2_fd, VIDIOC_S_FMT, &format) < 0) {
+ ERR("Failed to set video format: format(0x%x), width:height(%d:%d), "
+ "errstr(%s)\n", format.fmt.pix.pixelformat, format.fmt.pix.width,
+ format.fmt.pix.height, strerror(errno));
+ param->errCode = errno;
+ return;
+ }
+
+ memcpy(&dst_fmt, &format, sizeof(format));
+ param->stack[0] = dst_fmt.fmt.pix.width;
+ param->stack[1] = dst_fmt.fmt.pix.height;
+ param->stack[2] = dst_fmt.fmt.pix.field;
+ param->stack[3] = dst_fmt.fmt.pix.pixelformat;
+ param->stack[4] = dst_fmt.fmt.pix.bytesperline;
+ param->stack[5] = dst_fmt.fmt.pix.sizeimage;
+ param->stack[6] = dst_fmt.fmt.pix.colorspace;
+ param->stack[7] = dst_fmt.fmt.pix.priv;
+ TRACE("Set the format: w:h(%dx%d), fmt(0x%x), size(%d), "
+ "color(%d), field(%d)\n",
+ dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height,
+ dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.sizeimage,
+ dst_fmt.fmt.pix.colorspace, dst_fmt.fmt.pix.field);
+}
+
+void marucam_device_g_fmt(MaruCamState *state)
+{
+ struct v4l2_format format;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ CLEAR(format);
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (xioctl(v4l2_fd, VIDIOC_G_FMT, &format) < 0) {
+ ERR("Failed to get video format: %s\n", strerror(errno));
+ param->errCode = errno;
+ } else {
+ param->stack[0] = format.fmt.pix.width;
+ param->stack[1] = format.fmt.pix.height;
+ param->stack[2] = format.fmt.pix.field;
+ param->stack[3] = format.fmt.pix.pixelformat;
+ param->stack[4] = format.fmt.pix.bytesperline;
+ param->stack[5] = format.fmt.pix.sizeimage;
+ param->stack[6] = format.fmt.pix.colorspace;
+ param->stack[7] = format.fmt.pix.priv;
+ TRACE("Get the format: w:h(%dx%d), fmt(0x%x), size(%d), "
+ "color(%d), field(%d)\n",
+ format.fmt.pix.width, format.fmt.pix.height,
+ format.fmt.pix.pixelformat, format.fmt.pix.sizeimage,
+ format.fmt.pix.colorspace, format.fmt.pix.field);
+ }
+}
+
+void marucam_device_try_fmt(MaruCamState *state)
+{
+ struct v4l2_format format;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ CLEAR(format);
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ format.fmt.pix.width = param->stack[0];
+ format.fmt.pix.height = param->stack[1];
+ format.fmt.pix.pixelformat = param->stack[2];
+ format.fmt.pix.field = V4L2_FIELD_ANY;
+
+ if (xioctl(v4l2_fd, VIDIOC_TRY_FMT, &format) < 0) {
+ ERR("Failed to check video format: format(0x%x), width:height(%d:%d),"
+ " errstr(%s)\n", format.fmt.pix.pixelformat, format.fmt.pix.width,
+ format.fmt.pix.height, strerror(errno));
+ param->errCode = errno;
+ return;
+ }
+ param->stack[0] = format.fmt.pix.width;
+ param->stack[1] = format.fmt.pix.height;
+ param->stack[2] = format.fmt.pix.field;
+ param->stack[3] = format.fmt.pix.pixelformat;
+ param->stack[4] = format.fmt.pix.bytesperline;
+ param->stack[5] = format.fmt.pix.sizeimage;
+ param->stack[6] = format.fmt.pix.colorspace;
+ param->stack[7] = format.fmt.pix.priv;
+ TRACE("Check the format: w:h(%dx%d), fmt(0x%x), size(%d), "
+ "color(%d), field(%d)\n",
+ format.fmt.pix.width, format.fmt.pix.height,
+ format.fmt.pix.pixelformat, format.fmt.pix.sizeimage,
+ format.fmt.pix.colorspace, format.fmt.pix.field);
+}
+
+void marucam_device_enum_fmt(MaruCamState *state)
+{
+ uint32_t index;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ index = param->stack[0];
+
+ if (index >= ARRAY_SIZE(supported_dst_pixfmts)) {
+ param->errCode = EINVAL;
+ return;
+ }
+ param->stack[1] = 0; /* flags = NONE */
+ param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */
+ /* set description */
+ switch (supported_dst_pixfmts[index].fmt) {
+ case V4L2_PIX_FMT_YUYV:
+ memcpy(¶m->stack[3], "YUYV", 32);
+ break;
+ case V4L2_PIX_FMT_YUV420:
+ memcpy(¶m->stack[3], "YU12", 32);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ memcpy(¶m->stack[3], "YV12", 32);
+ break;
+ default:
+ ERR("Invalid fixel format\n");
+ param->errCode = EINVAL;
+ break;
+ }
+}
+
+void marucam_device_qctrl(MaruCamState *state)
+{
+ uint32_t i;
+ char name[32] = {0,};
+ struct v4l2_queryctrl ctrl;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ CLEAR(ctrl);
+ ctrl.id = param->stack[0];
+
+ switch (ctrl.id) {
+ case V4L2_CID_BRIGHTNESS:
+ TRACE("Query : BRIGHTNESS\n");
+ memcpy((void *)name, (void *)"brightness", 32);
+ i = 0;
+ break;
+ case V4L2_CID_CONTRAST:
+ TRACE("Query : CONTRAST\n");
+ memcpy((void *)name, (void *)"contrast", 32);
+ i = 1;
+ break;
+ case V4L2_CID_SATURATION:
+ TRACE("Query : SATURATION\n");
+ memcpy((void *)name, (void *)"saturation", 32);
+ i = 2;
+ break;
+ case V4L2_CID_SHARPNESS:
+ TRACE("Query : SHARPNESS\n");
+ memcpy((void *)name, (void *)"sharpness", 32);
+ i = 3;
+ break;
+ default:
+ ERR("Invalid control ID\n");
+ param->errCode = EINVAL;
+ return;
+ }
+
+ if (xioctl(v4l2_fd, VIDIOC_QUERYCTRL, &ctrl) < 0) {
+ if (errno != EINVAL) {
+ ERR("Failed to query video controls: %s\n", strerror(errno));
+ }
+ param->errCode = errno;
+ return;
+ } else {
+ struct v4l2_control sctrl;
+ CLEAR(sctrl);
+ sctrl.id = ctrl.id;
+ if ((ctrl.maximum + ctrl.minimum) == 0) {
+ sctrl.value = 0;
+ } else {
+ sctrl.value = (ctrl.maximum + ctrl.minimum) / 2;
+ }
+ if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &sctrl) < 0) {
+ ERR("Failed to set control value: id(0x%x), value(%d), "
+ "errstr(%s)\n", sctrl.id, sctrl.value, strerror(errno));
+ param->errCode = errno;
+ return;
+ }
+ qctrl_tbl[i].hit = 1;
+ qctrl_tbl[i].min = ctrl.minimum;
+ qctrl_tbl[i].max = ctrl.maximum;
+ qctrl_tbl[i].step = ctrl.step;
+ qctrl_tbl[i].init_val = ctrl.default_value;
+ }
+
+ /* set fixed values by FW configuration file */
+ param->stack[0] = ctrl.id;
+ param->stack[1] = MARUCAM_CTRL_VALUE_MIN; /* minimum */
+ param->stack[2] = MARUCAM_CTRL_VALUE_MAX; /* maximum */
+ param->stack[3] = MARUCAM_CTRL_VALUE_STEP; /* step */
+ param->stack[4] = MARUCAM_CTRL_VALUE_MID; /* default_value */
+ param->stack[5] = ctrl.flags;
+ /* name field setting */
+ memcpy(¶m->stack[6], (void *)name, sizeof(ctrl.name));
+}
+
+void marucam_device_s_ctrl(MaruCamState *state)
+{
+ uint32_t i;
+ struct v4l2_control ctrl;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ CLEAR(ctrl);
+ ctrl.id = param->stack[0];
+
+ switch (ctrl.id) {
+ case V4L2_CID_BRIGHTNESS:
+ i = 0;
+ TRACE("%d is set to the value of the BRIGHTNESS\n", param->stack[1]);
+ break;
+ case V4L2_CID_CONTRAST:
+ i = 1;
+ TRACE("%d is set to the value of the CONTRAST\n", param->stack[1]);
+ break;
+ case V4L2_CID_SATURATION:
+ i = 2;
+ TRACE("%d is set to the value of the SATURATION\n", param->stack[1]);
+ break;
+ case V4L2_CID_SHARPNESS:
+ i = 3;
+ TRACE("%d is set to the value of the SHARPNESS\n", param->stack[1]);
+ break;
+ default:
+ ERR("Our emulator does not support this control: 0x%x\n", ctrl.id);
+ param->errCode = EINVAL;
+ return;
+ }
+
+ ctrl.value = value_convert_from_guest(qctrl_tbl[i].min,
+ qctrl_tbl[i].max, param->stack[1]);
+ if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &ctrl) < 0) {
+ ERR("Failed to set control value: id(0x%x), value(r:%d, c:%d), "
+ "errstr(%s)\n", ctrl.id, param->stack[1], ctrl.value,
+ strerror(errno));
+ param->errCode = errno;
+ return;
+ }
+}
+
+void marucam_device_g_ctrl(MaruCamState *state)
+{
+ uint32_t i;
+ struct v4l2_control ctrl;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ CLEAR(ctrl);
+ ctrl.id = param->stack[0];
+
+ switch (ctrl.id) {
+ case V4L2_CID_BRIGHTNESS:
+ TRACE("Gets the value of the BRIGHTNESS\n");
+ i = 0;
+ break;
+ case V4L2_CID_CONTRAST:
+ TRACE("Gets the value of the CONTRAST\n");
+ i = 1;
+ break;
+ case V4L2_CID_SATURATION:
+ TRACE("Gets the value of the SATURATION\n");
+ i = 2;
+ break;
+ case V4L2_CID_SHARPNESS:
+ TRACE("Gets the value of the SHARPNESS\n");
+ i = 3;
+ break;
+ default:
+ ERR("Our emulator does not support this control: 0x%x\n", ctrl.id);
+ param->errCode = EINVAL;
+ return;
+ }
+
+ if (xioctl(v4l2_fd, VIDIOC_G_CTRL, &ctrl) < 0) {
+ ERR("Failed to get video control value: %s\n", strerror(errno));
+ param->errCode = errno;
+ return;
+ }
+ param->stack[0] = value_convert_to_guest(qctrl_tbl[i].min,
+ qctrl_tbl[i].max, ctrl.value);
+ TRACE("Value: %d\n", param->stack[0]);
+}
+
+void marucam_device_enum_fsizes(MaruCamState *state)
+{
+ uint32_t index, pixfmt, i;
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+ index = param->stack[0];
+ pixfmt = param->stack[1];
+
+ if (index >= ARRAY_SIZE(supported_dst_frames)) {
+ param->errCode = EINVAL;
+ return;
+ }
+ for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
+ if (supported_dst_pixfmts[i].fmt == pixfmt) {
+ break;
+ }
+ }
+
+ if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
+ param->errCode = EINVAL;
+ return;
+ }
+
+ param->stack[0] = supported_dst_frames[index].width;
+ param->stack[1] = supported_dst_frames[index].height;
+}
+
+void marucam_device_enum_fintv(MaruCamState *state)
+{
+ MaruCamParam *param = state->param;
+
+ param->top = 0;
+
+ /* switch by index(param->stack[0]) */
+ switch (param->stack[0]) {
+ case 0:
+ /* we only use 1/30 frame interval */
+ param->stack[1] = 30; /* denominator */
+ break;
+ default:
+ param->errCode = EINVAL;
+ return;
+ }
+ param->stack[0] = 1; /* numerator */
+}
+
+void marucam_device_close(MaruCamState *state)
+{
+ if (!is_stream_paused(state)) {
+ marucam_device_stop_preview(state);
+ }
+
+ marucam_reset_controls();
+
+ v4l2_close(v4l2_fd);
+ v4l2_fd = 0;
+ INFO("Closed\n");
+}