Without using the CPU IO, directly using the device memory.
Argument memory is separated into the data pointer and the return variable.
For windows module & darwin module, added minimum declarations for v4l2 copied from videodev2.h
Changed the log type from ERR to INFO in the marucam_check() function.
Modified the maru_camera_convert module.
Change-Id: I0239158aac07a217b8a47f8df239f96bc38f45ea
Signed-off-by: Jinhyung Jo <jinhyung.jo@samsung.com>
LIBS += -lv4l2 -lv4lconvert
endif
ifdef CONFIG_WIN32
-obj-y += maru_camera_win32.o maru_camera_util.o
+obj-y += maru_camera_win32.o maru_camera_convert.o
LIBS += -lole32 -loleaut32 -luuid -lstrmiids
endif
ifdef CONFIG_DARWIN
-obj-y += maru_camera_darwin.o maru_camera_util.o
+obj-y += maru_camera_darwin.o maru_camera_convert.o
LIBS += -framework Foundation -framework SystemConfiguration
LIBS += -framework Cocoa -framework QTKit -framework CoreVideo
LIBS += -framework AppKit
#define MARU_PCI_CAMERA_DEVICE_NAME "maru-camera"
-#define MARUCAM_MEM_SIZE (4 * 1024 * 1024) /* 4MB */
-#define MARUCAM_REG_SIZE (256) /* 64 * 4Byte */
+/* PCI region size must be pow2. */
+/* set to 1048(2^10) * 1048(2^10) * 4(2^2) = 4,194,304(2^22) */
+#define MARUCAM_MEM_SIZE (1 << 22)
+#define MARUCAM_REG_SIZE (1 << 8) /* 64 * 4Byte = 256(2^8) */
+/* RAM type region must be at least 4KB */
+#define MARUCAM_IOMEM_SIZE (1 << 12) /* 4096Byte(2^12) */
/*
* I/O functions
}
qemu_mutex_unlock(&state->thread_mutex);
break;
- case MARUCAM_CMD_G_DATA:
- ret = state->param->stack[state->param->top++];
- break;
case MARUCAM_CMD_OPEN:
case MARUCAM_CMD_CLOSE:
- case MARUCAM_CMD_START_PREVIEW:
- case MARUCAM_CMD_STOP_PREVIEW:
- case MARUCAM_CMD_S_PARAM:
- case MARUCAM_CMD_G_PARAM:
+ case MARUCAM_CMD_STREAMON:
+ case MARUCAM_CMD_STREAMOFF:
+ case MARUCAM_CMD_S_PARM:
+ case MARUCAM_CMD_G_PARM:
case MARUCAM_CMD_ENUM_FMT:
case MARUCAM_CMD_TRY_FMT:
case MARUCAM_CMD_S_FMT:
case MARUCAM_CMD_G_FMT:
- case MARUCAM_CMD_QCTRL:
+ case MARUCAM_CMD_QUERYCTRL:
case MARUCAM_CMD_S_CTRL:
case MARUCAM_CMD_G_CTRL:
case MARUCAM_CMD_ENUM_FSIZES:
case MARUCAM_CMD_ENUM_FINTV:
- ret = state->param->errCode;
- state->param->errCode = 0;
+ case MARUCAM_CMD_EXIT:
+ ret = state->ret_val;
break;
+ case MARUCAM_CMD_REQFRAME:
+ qemu_mutex_lock(&state->thread_mutex);
+ ret = state->req_frame - 1;
+ qemu_mutex_unlock(&state->thread_mutex);
default:
ERR("Not supported command: 0x%x\n", offset);
ret = EINVAL;
{
MaruCamState *state = (MaruCamState *)opaque;
+ if (state->backend == NULL) {
+ ERR("There is no camera device\n");
+ state->ret_val = ENODEV;
+ return;
+ }
+
+ state->ret_val = 0;
switch (offset & 0xFF) {
case MARUCAM_CMD_OPEN:
- marucam_device_open(state);
+ state->backend->open(state);
break;
case MARUCAM_CMD_CLOSE:
- marucam_device_close(state);
+ state->backend->close(state);
break;
- case MARUCAM_CMD_START_PREVIEW:
- marucam_device_start_preview(state);
+ case MARUCAM_CMD_STREAMON:
+ state->backend->stream_on(state);
break;
- case MARUCAM_CMD_STOP_PREVIEW:
- marucam_device_stop_preview(state);
- memset(state->vaddr, 0, MARUCAM_MEM_SIZE);
+ case MARUCAM_CMD_STREAMOFF:
+ state->backend->stream_off(state);
+ memset(state->fb_ptr, 0, MARUCAM_MEM_SIZE);
break;
- case MARUCAM_CMD_S_PARAM:
- marucam_device_s_param(state);
+ case MARUCAM_CMD_S_PARM:
+ state->backend->s_parm(state);
break;
- case MARUCAM_CMD_G_PARAM:
- marucam_device_g_param(state);
+ case MARUCAM_CMD_G_PARM:
+ state->backend->g_parm(state);
break;
case MARUCAM_CMD_ENUM_FMT:
- marucam_device_enum_fmt(state);
+ state->backend->enum_fmt(state);
break;
case MARUCAM_CMD_TRY_FMT:
- marucam_device_try_fmt(state);
+ state->backend->try_fmt(state);
break;
case MARUCAM_CMD_S_FMT:
- marucam_device_s_fmt(state);
+ state->backend->s_fmt(state);
break;
case MARUCAM_CMD_G_FMT:
- marucam_device_g_fmt(state);
+ state->backend->g_fmt(state);
break;
- case MARUCAM_CMD_QCTRL:
- marucam_device_qctrl(state);
+ case MARUCAM_CMD_QUERYCTRL:
+ state->backend->query_ctrl(state);
break;
case MARUCAM_CMD_S_CTRL:
- marucam_device_s_ctrl(state);
+ state->backend->s_ctrl(state);
break;
case MARUCAM_CMD_G_CTRL:
- marucam_device_g_ctrl(state);
+ state->backend->g_ctrl(state);
break;
case MARUCAM_CMD_ENUM_FSIZES:
- marucam_device_enum_fsizes(state);
+ state->backend->enum_framesizes(state);
break;
case MARUCAM_CMD_ENUM_FINTV:
- marucam_device_enum_fintv(state);
- break;
- case MARUCAM_CMD_S_DATA:
- state->param->stack[state->param->top++] = value;
- break;
- case MARUCAM_CMD_DATACLR:
- memset(state->param, 0, sizeof(MaruCamParam));
+ state->backend->enum_frameintervals(state);
break;
case MARUCAM_CMD_REQFRAME:
qemu_mutex_lock(&state->thread_mutex);
/* Check available webcam
* If there is not one, you can't use the camera.
*/
- if (!marucam_device_check(1)) {
+ if (!marucam_device_check()) {
s->initialized = false;
- ERR("Failed to check the camera device, "
+ INFO("Failed to check the camera device, "
"You can *not* use the camera\n");
return 0;
}
pci_config_set_interrupt_pin(pci_conf, 0x03);
- memory_region_init_ram(&s->vram, OBJECT(s), "marucamera.ram", MARUCAM_MEM_SIZE,
- &error_abort);
- s->vaddr = memory_region_get_ram_ptr(&s->vram);
- memset(s->vaddr, 0, MARUCAM_MEM_SIZE);
+ memory_region_init_ram(&s->fbmem, OBJECT(s), "marucamera.fbmem",
+ MARUCAM_MEM_SIZE, &error_abort);
+ s->fb_ptr = memory_region_get_ram_ptr(&s->fbmem);
+ memset(s->fb_ptr, 0, MARUCAM_MEM_SIZE);
+
+ memory_region_init_ram(&s->iomem, OBJECT(s), "marucamera.iomem",
+ MARUCAM_IOMEM_SIZE, &error_abort);
+ s->io_ptr = memory_region_get_ram_ptr(&s->iomem);
+ memset(s->io_ptr, 0, MARUCAM_IOMEM_SIZE);
- memory_region_init_io(&s->mmio, OBJECT(s),
+ memory_region_init_io(&s->ioreg, OBJECT(s),
&maru_camera_mmio_ops,
s,
"maru-camera-mmio",
MARUCAM_REG_SIZE);
- pci_register_bar(&s->dev, 0, PCI_BASE_ADDRESS_MEM_PREFETCH, &s->vram);
- pci_register_bar(&s->dev, 1, PCI_BASE_ADDRESS_SPACE_MEMORY, &s->mmio);
+ pci_register_bar(&s->dev, 0, PCI_BASE_ADDRESS_MEM_PREFETCH, &s->fbmem);
+ pci_register_bar(&s->dev, 1, PCI_BASE_ADDRESS_MEM_PREFETCH, &s->iomem);
+ pci_register_bar(&s->dev, 2, PCI_BASE_ADDRESS_SPACE_MEMORY, &s->ioreg);
/* for worker thread */
- s->param = (MaruCamParam *)g_malloc0(sizeof(MaruCamParam));
qemu_cond_init(&s->thread_cond);
qemu_mutex_init(&s->thread_mutex);
- marucam_device_init(s);
+ s->backend = marucam_backend_create(s);
+ s->backend->init(s);
s->tx_bh = qemu_bh_new(marucam_tx_bh, s);
- s->initialized = true;
INFO("initialize maru-camera device\n");
return 0;
OBJECT_CHECK(MaruCamState, pci_dev, MARU_PCI_CAMERA_DEVICE_NAME);
if (s->initialized) {
- marucam_device_exit(s);
- g_free(s->param);
+ s->backend->release(s);
qemu_cond_destroy(&s->thread_cond);
qemu_mutex_destroy(&s->thread_mutex);
}
MaruCamState *s = (MaruCamState *)d;
if (s->initialized) {
- marucam_device_close(s);
+ s->backend->reset(s);
qemu_mutex_lock(&s->thread_mutex);
s->isr = s->streamon = s->req_frame = s->buf_size = 0;
qemu_mutex_unlock(&s->thread_mutex);
- memset(s->vaddr, 0, MARUCAM_MEM_SIZE);
- memset(s->param, 0x00, sizeof(MaruCamParam));
+ memset(s->fb_ptr, 0, MARUCAM_MEM_SIZE);
+ memset(s->io_ptr, 0, MARUCAM_IOMEM_SIZE);
INFO("reset maru-camera device\n");
}
}
#include "hw/pci/pci.h"
#include "qemu/thread.h"
-#define MARUCAM_MAX_PARAM 20
#define MARUCAM_SKIPFRAMES 2
+#define MARUCAM_THREAD_NAME "marucam_worker_thread"
+
/* must sync with GUEST camera_driver */
#define MARUCAM_CMD_INIT 0x00
#define MARUCAM_CMD_OPEN 0x04
#define MARUCAM_CMD_CLOSE 0x08
#define MARUCAM_CMD_ISR 0x0C
-#define MARUCAM_CMD_START_PREVIEW 0x10
-#define MARUCAM_CMD_STOP_PREVIEW 0x14
-#define MARUCAM_CMD_S_PARAM 0x18
-#define MARUCAM_CMD_G_PARAM 0x1C
+#define MARUCAM_CMD_STREAMON 0x10
+#define MARUCAM_CMD_STREAMOFF 0x14
+#define MARUCAM_CMD_S_PARM 0x18
+#define MARUCAM_CMD_G_PARM 0x1C
#define MARUCAM_CMD_ENUM_FMT 0x20
#define MARUCAM_CMD_TRY_FMT 0x24
#define MARUCAM_CMD_S_FMT 0x28
#define MARUCAM_CMD_G_FMT 0x2C
-#define MARUCAM_CMD_QCTRL 0x30
+#define MARUCAM_CMD_QUERYCTRL 0x30
#define MARUCAM_CMD_S_CTRL 0x34
#define MARUCAM_CMD_G_CTRL 0x38
#define MARUCAM_CMD_ENUM_FSIZES 0x3C
#define MARUCAM_CMD_ENUM_FINTV 0x40
-#define MARUCAM_CMD_S_DATA 0x44
-#define MARUCAM_CMD_G_DATA 0x48
-#define MARUCAM_CMD_DATACLR 0x50
-#define MARUCAM_CMD_REQFRAME 0x54
+#define MARUCAM_CMD_REQFRAME 0x44
+#define MARUCAM_CMD_EXIT 0x48
+
+enum {
+ _MC_THREAD_PAUSED,
+ _MC_THREAD_STREAMON,
+ _MC_THREAD_STREAMOFF,
+};
+
+#define MARUCAM_CTRL_VALUE_MAX 20
+#define MARUCAM_CTRL_VALUE_MIN 1
+#define MARUCAM_CTRL_VALUE_MID 10
+#define MARUCAM_CTRL_VALUE_STEP 1
typedef struct MaruCamState MaruCamState;
-typedef struct MaruCamParam MaruCamParam;
-struct MaruCamParam {
- uint32_t top;
- uint32_t retVal;
- uint32_t errCode;
- uint32_t stack[MARUCAM_MAX_PARAM];
-};
+typedef struct MaruCamBackend {
+ MaruCamState *state;
+
+ uint32_t type;
+
+ void (*init)(MaruCamState *state);
+ void (*release)(MaruCamState *state);
+ void (*reset)(MaruCamState *state);
+ void (*open)(MaruCamState *state);
+ void (*close)(MaruCamState *state);
+ void (*stream_on)(MaruCamState *state);
+ void (*stream_off)(MaruCamState *state);
+ void (*s_parm)(MaruCamState *state);
+ void (*g_parm)(MaruCamState *state);
+ void (*enum_fmt)(MaruCamState *state);
+ void (*try_fmt)(MaruCamState *state);
+ void (*s_fmt)(MaruCamState *state);
+ void (*g_fmt)(MaruCamState *state);
+ void (*query_ctrl)(MaruCamState *state);
+ void (*s_ctrl)(MaruCamState *state);
+ void (*g_ctrl)(MaruCamState *state);
+ void (*enum_framesizes)(MaruCamState *state);
+ void (*enum_frameintervals)(MaruCamState *state);
+} MaruCamBackend;
struct MaruCamState {
PCIDevice dev;
- MaruCamParam *param;
QemuThread thread_id;
QemuMutex thread_mutex;;
QemuCond thread_cond;
bool initialized;
bool destroying;
- void *vaddr; /* vram ptr */
+ void *fb_ptr; /* fbmem ptr */
+ void *io_ptr; /* iomem ptr */
uint32_t isr;
uint32_t streamon;
uint32_t buf_size;
uint32_t req_frame;
+ uint32_t ret_val;
- MemoryRegion vram;
- MemoryRegion mmio;
+ MemoryRegion fbmem;
+ MemoryRegion iomem;
+ MemoryRegion ioreg;
+
+ MaruCamBackend *backend;
};
-/* ------------------------------------------------------------------------- */
-/* Fucntion prototype */
-/* ------------------------------------------------------------------------- */
-int marucam_device_check(int log_flag);
-void marucam_device_init(MaruCamState *state);
-void marucam_device_exit(MaruCamState *state);
-void marucam_device_open(MaruCamState *state);
-void marucam_device_close(MaruCamState *state);
-void marucam_device_start_preview(MaruCamState *state);
-void marucam_device_stop_preview(MaruCamState *state);
-void marucam_device_s_param(MaruCamState *state);
-void marucam_device_g_param(MaruCamState *state);
-void marucam_device_s_fmt(MaruCamState *state);
-void marucam_device_g_fmt(MaruCamState *state);
-void marucam_device_try_fmt(MaruCamState *state);
-void marucam_device_enum_fmt(MaruCamState *state);
-void marucam_device_qctrl(MaruCamState *state);
-void marucam_device_s_ctrl(MaruCamState *state);
-void marucam_device_g_ctrl(MaruCamState *state);
-void marucam_device_enum_fsizes(MaruCamState *state);
-void marucam_device_enum_fintv(MaruCamState *state);
+MaruCamBackend *marucam_backend_create(MaruCamState *state);
+
+int marucam_device_check(void);
int maru_camera_pci_init(PCIBus *bus);
*/
#include "qemu-common.h"
+#include "videodev2_min.h"
#include "maru_camera_convert.h"
#include "debug_ch.h"
uint32_t width, uint32_t height);
static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
uint32_t width, uint32_t height);
-static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- uint32_t width, uint32_t height);
-
static void yuyv_to_yuv420(const unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height, uint32_t yvu);
static void rgb24_to_yuv420(const unsigned char *src, unsigned char *dest,
- uint32_t width, uint32_t height, uint32_t yvu);
+ uint32_t width, uint32_t height, uint32_t yvu, bool reverse);
static void rgb24_to_yuyv(unsigned char *src, unsigned char *dest,
- uint32_t width, uint32_t height);
+ uint32_t width, uint32_t height, bool reverse);
static void yuv420_to_yvu420(unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height);
static void yuv420_to_yuyv(unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height);
+uint32_t get_bytesperline(uint32_t pixfmt, uint32_t width)
+{
+ uint32_t bytesperline;
+
+ switch (pixfmt) {
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_YVU420:
+ bytesperline = width;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ default:
+ bytesperline = width * 2;
+ break;
+ }
+
+ return bytesperline;
+}
+
+uint32_t get_sizeimage(uint32_t pixfmt, uint32_t width, uint32_t height)
+{
+ uint32_t bytesperline;
+
+ switch (pixfmt) {
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_YVU420:
+ bytesperline = (width * 12) >> 3;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ default:
+ bytesperline = width * 2;
+ break;
+ }
+
+ return bytesperline * height;
+}
+
/* Convert pixel format to YUV420 */
uint8_t convert_frame(uint32_t src_format, uint32_t dst_format,
uint32_t width, uint32_t height, size_t frame_size,
- void *src_buf, void *dst_buf)
+ void *src_buf, void *dst_buf, bool reverse)
{
switch (src_format) {
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YUYV:
switch (dst_format) {
case V4L2_PIX_FMT_YUV420:
- //YUYVToYUV420(src_buf, dst_buf, width, height);
yuyv_to_yuv420(src_buf, dst_buf, width, height, 0);
break;
case V4L2_PIX_FMT_YVU420:
return 1;
}
break;
- case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_RGB24:
switch (dst_format) {
case V4L2_PIX_FMT_YUV420:
- rgb24_to_yuv420(src_buf, dst_buf, width, height, 0);
+ rgb24_to_yuv420(src_buf, dst_buf, width, height, 0, reverse);
break;
case V4L2_PIX_FMT_YVU420:
- rgb24_to_yuv420(src_buf, dst_buf, width, height, 1);
+ rgb24_to_yuv420(src_buf, dst_buf, width, height, 1, reverse);
break;
case V4L2_PIX_FMT_YUYV:
- rgb24_to_yuyv(src_buf, dst_buf, width, height);
+ rgb24_to_yuyv(src_buf, dst_buf, width, height, reverse);
break;
default:
ERR("Cannot convert from the pixel format (%.4s)...\n",
}
-static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- uint32_t width, uint32_t height)
-{
- uint32_t i, j;
-
- /* Source*/
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1;
- unsigned char *ptrsrccb3;
- unsigned char *ptrsrccr1;
- unsigned char *ptrsrccr3;
- uint32_t srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc ;
- ptrsrcy2 = bufsrc + (width << 1);
- ptrsrcy3 = bufsrc + (width << 1) * 2;
- ptrsrcy4 = bufsrc + (width << 1) * 3;
-
- ptrsrccb1 = bufsrc + 1;
- ptrsrccb3 = bufsrc + (width << 1) * 2 + 1;
-
- ptrsrccr1 = bufsrc + 3;
- ptrsrccr3 = bufsrc + (width << 1) * 2 + 3;
-
- srcystride = (width << 1) * 3;
- srcccstride = (width << 1) * 3;
-
- /* Destination */
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- uint32_t destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width * 2;
- ptrdesty4 = bufdest + width * 3;
-
- ptrdestcb1 = bufdest + width * height;
- ptrdestcb2 = bufdest + width * height + (width >> 1);
-
- ptrdestcr1 = bufdest + width * height + ((width * height) >> 2);
- ptrdestcr2 = bufdest + width * height + ((width * height) >> 2)
- + (width >> 1);
-
- destystride = width * 3;
- destccstride = (width >> 1);
-
- for (j = 0; j < (height / 4); j++) {
- for (i = 0; i < (width / 2); i++) {
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdestcb1++) = (*ptrsrccb1);
- (*ptrdestcb2++) = (*ptrsrccb3);
-
- ptrsrccb1 += 4;
- ptrsrccb3 += 4;
-
- (*ptrdestcr1++) = (*ptrsrccr1);
- (*ptrdestcr2++) = (*ptrsrccr3);
-
- ptrsrccr1 += 4;
- ptrsrccr3 += 4;
-
- }
-
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
-
- ptrsrccb1 += srcccstride;
- ptrsrccb3 += srcccstride;
-
- ptrsrccr1 += srcccstride;
- ptrsrccr3 += srcccstride;
-
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
-
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
-
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
- }
-}
-
static void yuyv_to_yuv420(const unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height, uint32_t yvu)
{
(unsigned char)(((color) > 0xFF) ? 0xff : (((color) < 0) ? 0 : (color)))
static void rgb24_to_yuv420(const unsigned char *src, unsigned char *dest,
- uint32_t width, uint32_t height, uint32_t yvu)
+ uint32_t width, uint32_t height, uint32_t yvu,
+ bool reverse)
{
uint32_t x, y;
uint32_t halfWidth;
uline = uplane + ((y >> 1) * halfWidth);
vline = vplane + ((y >> 1) * halfWidth);
- rgbIndex = src + (width * (height - 1 - y) * 3);
- for (x = 0; x < (int)width; x+=2) {
+ if (reverse) {
+ rgbIndex = src + (width * (height - 1 - y) * 3);
+ } else {
+ rgbIndex = src + (width * y * 3);
+ }
+
+ for (x = 0; x < (int)width; x += 2) {
RGB2Y(rgbIndex[2], rgbIndex[1], rgbIndex[0], *yline++);
rgbIndex += 3;
RGB2Y(rgbIndex[2], rgbIndex[1], rgbIndex[0], *yline++);
}
static void rgb24_to_yuyv(unsigned char *src, unsigned char *dest,
- uint32_t width, uint32_t height)
+ uint32_t width, uint32_t height, bool reverse)
{
uint32_t i, j;
uint8_t *ptr;
for (i = 0; i < height; i++) {
- ptr = src + (width * (height - 1 - i) * 3);
+ if (reverse) {
+ ptr = src + (width * (height - 1 - i) * 3);
+ } else {
+ ptr = src + (width * i * 3);
+ }
+
for (j = 0; j < width; j += 2) {
/* y */
*dest++ = CLIP(0.299 * (ptr[2] - 128) +
#ifndef _MARU_CAMERA_CONVERT_H_
#define _MARU_CAMERA_CONVERT_H_
-#define MAKEFOURCC(a, b, c, d) \
- (((uint32_t)(a) << 0) | \
- ((uint32_t)(b) << 8) | \
- ((uint32_t)(c) << 16) | \
- ((uint32_t)(d) << 24))
+uint32_t get_bytesperline(uint32_t pixfmt, uint32_t width);
-/* 16 RGB-5-5-5 */
-#define V4L2_PIX_FMT_RGB555 MAKEFOURCC('R', 'G', 'B', 'O')
-/* 16 RGB-5-6-5 */
-#define V4L2_PIX_FMT_RGB565 MAKEFOURCC('R', 'G', 'B', 'P')
-/* 16 RGB-5-5-5 BE */
-#define V4L2_PIX_FMT_RGB555X MAKEFOURCC('R', 'G', 'B', 'Q')
-/* 16 RGB-5-6-5 BE */
-#define V4L2_PIX_FMT_RGB565X MAKEFOURCC('R', 'G', 'B', 'R')
-/* 24 BGR-8-8-8 */
-#define V4L2_PIX_FMT_BGR24 MAKEFOURCC('B', 'G', 'R', '3')
-/* 24 RGB-8-8-8 */
-#define V4L2_PIX_FMT_RGB24 MAKEFOURCC('R', 'G', 'B', '3')
-/* 32 BGR-8-8-8-8 */
-#define V4L2_PIX_FMT_BGR32 MAKEFOURCC('B', 'G', 'R', '4')
-/* 32 RGB-8-8-8-8 */
-#define V4L2_PIX_FMT_RGB32 MAKEFOURCC('R', 'G', 'B', '4')
-/* 9 YVU 4:1:0 */
-#define V4L2_PIX_FMT_YVU410 MAKEFOURCC('Y', 'V', 'U', '9')
-/* 12 YVU 4:2:0 */
-#define V4L2_PIX_FMT_YVU420 MAKEFOURCC('Y', 'V', '1', '2')
-/* 16 YUV 4:2:2 */
-#define V4L2_PIX_FMT_YUYV MAKEFOURCC('Y', 'U', 'Y', 'V')
-/* 16 YUV 4:2:2 */
-#define V4L2_PIX_FMT_UYVY MAKEFOURCC('U', 'Y', 'V', 'Y')
-/* 16 YVU422 planar */
-#define V4L2_PIX_FMT_YUV422P MAKEFOURCC('4', '2', '2', 'P')
-/* 16 YVU411 planar */
-#define V4L2_PIX_FMT_YUV411P MAKEFOURCC('4', '1', '1', 'P')
-/* 12 YUV 4:1:1 */
-#define V4L2_PIX_FMT_Y41P MAKEFOURCC('Y', '4', '1', 'P')
-/* 16 xxxxyyyy uuuuvvvv */
-#define V4L2_PIX_FMT_YUV444 MAKEFOURCC('Y', '4', '4', '4')
-/* 16 YUV-5-5-5 */
-#define V4L2_PIX_FMT_YUV555 MAKEFOURCC('Y', 'U', 'V', 'O')
-/* 16 YUV-5-6-5 */
-#define V4L2_PIX_FMT_YUV565 MAKEFOURCC('Y', 'U', 'V', 'P')
-/* 32 YUV-8-8-8-8 */
-#define V4L2_PIX_FMT_YUV32 MAKEFOURCC('Y', 'U', 'V', '4')
-/* 9 YUV 4:1:0 */
-#define V4L2_PIX_FMT_YUV410 MAKEFOURCC('Y', 'U', 'V', '9')
-/* 12 YUV 4:2:0 */
-#define V4L2_PIX_FMT_YUV420 MAKEFOURCC('Y', 'U', '1', '2')
-/* 16 YUV 4:2:2 */
-#define V4L2_PIX_FMT_YYUV MAKEFOURCC('Y', 'Y', 'U', 'V')
-
-/* V4L2 defines copy from videodev2.h */
-#define V4L2_CTRL_FLAG_SLIDER 0x0020
-
-#define V4L2_CTRL_CLASS_USER 0x00980000
-#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
-#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE + 0)
-#define V4L2_CID_CONTRAST (V4L2_CID_BASE + 1)
-#define V4L2_CID_SATURATION (V4L2_CID_BASE + 2)
-#define V4L2_CID_SHARPNESS (V4L2_CID_BASE + 27)
-
-inline uint32_t get_bytesperline(uint32_t pixfmt, uint32_t width)
-{
- uint32_t bytesperline;
-
- switch (pixfmt) {
- case V4L2_PIX_FMT_YUV420:
- case V4L2_PIX_FMT_YVU420:
- bytesperline = (width * 12) >> 3;
- break;
- case V4L2_PIX_FMT_YUYV:
- default:
- bytesperline = width * 2;
- break;
- }
-
- return bytesperline;
-}
-
-inline uint32_t get_sizeimage(uint32_t pixfmt, uint32_t width, uint32_t height)
-{
- return get_bytesperline(pixfmt, width) * height;
-}
+uint32_t get_sizeimage(uint32_t pixfmt, uint32_t width, uint32_t height);
uint8_t convert_frame(uint32_t src_format, uint32_t dst_format,
uint32_t width, uint32_t height, size_t frame_size,
- void *src_buf, void *dst_buf)
+ void *src_buf, void *dst_buf, bool reverse);
#endif /* _MARU_CAMERA_CONVERT_H_ */
*
*/
+/*********************************************/
+/* Must Translate to AVFoundation */
+/*********************************************/
+
+
#import <Cocoa/Cocoa.h>
#import <QTKit/QTKit.h>
#import <CoreAudio/CoreAudio.h>
#include "qemu-common.h"
#include "maru_camera.h"
#include "maru_camera_convert.h"
+#include "videodev2_min.h"
#include "debug_ch.h"
MULTI_DEBUG_CHANNEL(tizen, camera);
-#define MARUCAM_THREAD_NAME "marucam_worker_thread"
-
-typedef struct tagMaruCamConvertPixfmt {
- uint32_t fmt; /* fourcc */
- uint32_t bpp; /* bits per pixel, 0 for compressed formats */
- uint32_t needs_conversion;
-} MaruCamConvertPixfmt;
-
-
-static MaruCamConvertPixfmt supported_dst_pixfmts[] = {
- { V4L2_PIX_FMT_YUYV, 16, 0 },
- { V4L2_PIX_FMT_UYVY, 16, 0 },
- { V4L2_PIX_FMT_YUV420, 12, 0 },
- { V4L2_PIX_FMT_YVU420, 12, 0 },
+static uint32_t support_fmts[] = {
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_UYVY,
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YVU420,
};
-typedef struct tagMaruCamConvertFrameInfo {
+struct MCFrame {
uint32_t width;
uint32_t height;
-} MaruCamConvertFrameInfo;
+};
-static MaruCamConvertFrameInfo supported_dst_frames[] = {
+static struct MCFrame support_frames[] = {
{ 640, 480 },
{ 352, 288 },
{ 320, 240 },
{ 160, 120 },
};
-#define MARUCAM_CTRL_VALUE_MAX 20
-#define MARUCAM_CTRL_VALUE_MIN 1
-#define MARUCAM_CTRL_VALUE_MID 10
-#define MARUCAM_CTRL_VALUE_STEP 1
-
-enum {
- _MC_THREAD_PAUSED,
- _MC_THREAD_STREAMON,
- _MC_THREAD_STREAMOFF,
-};
-
-#if 0
-struct marucam_qctrl {
- uint32_t id;
- uint32_t hit;
- long min;
- long max;
- long step;
- long init_val;
-};
-
-static struct marucam_qctrl qctrl_tbl[] = {
- { V4L2_CID_BRIGHTNESS, 0, },
- { V4L2_CID_CONTRAST, 0, },
- { V4L2_CID_SATURATION, 0, },
- { V4L2_CID_SHARPNESS, 0, },
-};
-#endif
-
-static MaruCamState *g_state;
-
-static uint32_t ready_count;
-static uint32_t cur_fmt_idx;
-static uint32_t cur_frame_idx;
-
/***********************************
* Mac camera helper functions
***********************************/
CVImageBufferRef mCurrentImageBuffer;
BOOL mDeviceIsOpened;
BOOL mCaptureIsStarted;
+ uint32_t mRequestFormat;
}
-- (MaruCameraDriver *)init;
-- (int)startCapture:(int)width :(int)height;
+- (id)init;
+- (int)startCapture:(int)width setHeight:(int)height;
- (void)stopCapture;
- (int)readFrame:(void *)video_buf;
-- (int)setCaptureFormat:(int)width :(int)height :(int)pix_format;
-- (int)getCaptureFormat:(int)width :(int)height :(int)pix_format;
+- (int)setCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format;
+- (int)getCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format;
- (BOOL)deviceStatus;
@end
@implementation MaruCameraDriver
-- (MaruCameraDriver *)init
+- (id)init
{
BOOL success = NO;
NSError *error;
}
}
-- (int)startCapture:(int)width :(int)height
+- (int)startCapture:(int)width setHeight:(int)height
{
int ret = -1;
if (![mCaptureSession isRunning]) {
/* Set width & height, using default pixel format to capture */
NSDictionary *attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt: width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt: height], (id)kCVPixelBufferHeightKey,
- nil];
+ [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
+ nil];
[mCaptureVideoPreviewOutput setPixelBufferAttributes:attributes];
[mCaptureSession startRunning];
} else {
}
if ([mCaptureSession isRunning]) {
- while(!mCaptureIsStarted) {
+ while (!mCaptureIsStarted) {
/* Wait Until Capture is started */
- [[NSRunLoop currentRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.5]];
+ [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.5]];
}
ret = 0;
}
{
if ([mCaptureSession isRunning]) {
[mCaptureSession stopRunning];
- while([mCaptureSession isRunning]) {
+ while ([mCaptureSession isRunning]) {
/* Wait Until Capture is stopped */
- [[NSRunLoop currentRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.1]];
+ [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
}
}
const int frame_height = CVPixelBufferGetHeight(mCurrentImageBuffer);
const size_t frame_size = CVPixelBufferGetBytesPerRow(mCurrentImageBuffer) * frame_height;
const void *frame_pixels = CVPixelBufferGetBaseAddress(mCurrentImageBuffer);
- const uint32_t dst_format = supported_dst_pixfmts[cur_fmt_idx].fmt;
TRACE("buffer(%p), pixel_format(%d,%.4s), frame_width(%d), "
"frame_height(%d), frame_size(%d)\n",
frame_height, (int)frame_size);
/* convert frame to v4l2 format */
- convert_frame(pixel_format, dst_format, frame_width, frame_height,
- frame_size, (void *)frame_pixels, video_buf);
+ convert_frame(pixel_format, mRequestFormat, frame_width, frame_height,
+ frame_size, (void *)frame_pixels, video_buf, false);
CVPixelBufferUnlockBaseAddress(mCurrentImageBuffer, 0);
[pool release];
return 1;
return -1;
}
-- (int)setCaptureFormat:(int)width :(int)height :(int)pix_format
+- (int)setCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format
{
int ret = -1;
NSDictionary *attributes;
/* Set the pixel buffer attributes before running the capture session */
if (![mCaptureSession isRunning]) {
- if (pix_format) {
+ attributes = [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
+ nil];
+/*
+ if (format) {
attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt: width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt: height], (id)kCVPixelBufferHeightKey,
- [NSNumber numberWithInt: pix_format], (id)kCVPixelBufferPixelFormatTypeKey,
- nil];
+ [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
+ [NSNumber numberWithInt:format], (id)kCVPixelBufferPixelFormatTypeKey,
+ nil];
} else {
attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt: width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt: height], (id)kCVPixelBufferHeightKey,
- nil];
+ [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
+ nil];
}
+*/
[mCaptureVideoPreviewOutput setPixelBufferAttributes:attributes];
+ mRequestFormat = format;
ret = 0;
} else {
ERR("Cannot set pixel buffer attributes when it's running.\n");
return ret;
}
-- (int)getCaptureFormat:(int)width :(int)height :(int)pix_format
+- (int)getCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format
{
return 0;
}
** Maru Camera APIs
*****************************************************************/
-typedef struct MaruCameraDevice MaruCameraDevice;
-struct MaruCameraDevice {
+typedef struct MCBackendMac {
+ MaruCamBackend base;
+
/* Maru camera device object. */
MaruCameraDriver *driver;
-};
-/* Golbal representation of the Maru camera */
-MaruCameraDevice *mcd = NULL;
+ uint32_t src_fmt;
+ uint32_t dst_width;
+ uint32_t dst_height;
+ uint32_t dst_fmt;
+} MCBackendMac;
-static int is_streamon()
+static int is_streamon(MaruCamState *state)
{
int st;
- qemu_mutex_lock(&g_state->thread_mutex);
- st = g_state->streamon;
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_lock(&state->thread_mutex);
+ st = state->streamon;
+ qemu_mutex_unlock(&state->thread_mutex);
return (st == _MC_THREAD_STREAMON);
}
-static void __raise_err_intr()
+static void __raise_err_intr(MaruCamState *state)
{
- qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon == _MC_THREAD_STREAMON) {
- g_state->req_frame = 0; /* clear request */
- g_state->isr = 0x08; /* set a error flag of rasing a interrupt */
- qemu_bh_schedule(g_state->tx_bh);
+ qemu_mutex_lock(&state->thread_mutex);
+ if (state->streamon == _MC_THREAD_STREAMON) {
+ state->req_frame = 0; /* clear request */
+ state->isr = 0x08; /* set a error flag of rasing a interrupt */
+ qemu_bh_schedule(state->tx_bh);
}
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
}
-static int marucam_device_read_frame()
+static int backend_mac_read_frame(MaruCamState *state)
{
int ret;
void *tmp_buf;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
- qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon == _MC_THREAD_STREAMON) {
-#if 0
- if (ready_count < MARUCAM_SKIPFRAMES) {
- /* skip a frame cause first some frame are distorted */
- ++ready_count;
- TRACE("Skip %d frame\n", ready_count);
- qemu_mutex_unlock(&g_state->thread_mutex);
- return 0;
- }
-#endif
- if (g_state->req_frame == 0) {
+ qemu_mutex_lock(&state->thread_mutex);
+ if (state->streamon == _MC_THREAD_STREAMON) {
+ if (state->req_frame == 0) {
TRACE("There is no request\n");
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
return 0;
}
/* Grab the camera frame into temp buffer */
- tmp_buf = g_state->vaddr + g_state->buf_size * (g_state->req_frame - 1);
- ret = [mcd->driver readFrame: tmp_buf];
+ tmp_buf = state->fb_ptr + state->buf_size * (state->req_frame - 1);
+ ret = [backend->driver readFrame: tmp_buf];
if (ret < 0) {
ERR("%s, Capture error\n", __func__);
- qemu_mutex_unlock(&g_state->thread_mutex);
- __raise_err_intr();
+ qemu_mutex_unlock(&state->thread_mutex);
+ __raise_err_intr(state);
return -1;
} else if (!ret) {
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
return 0;
}
- g_state->req_frame = 0; /* clear request */
- g_state->isr |= 0x01; /* set a flag of rasing a interrupt */
- qemu_bh_schedule(g_state->tx_bh);
+ state->req_frame = 0; /* clear request */
+ state->isr |= 0x01; /* set a flag of rasing a interrupt */
+ qemu_bh_schedule(state->tx_bh);
} else {
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
return -1;
}
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
return 0;
}
/* Worker thread to grab frames to the preview window */
static void *marucam_worker_thread(void *thread_param)
{
+ MaruCamState *state = (MaruCamState *)thread_param;
+
while (1) {
- qemu_mutex_lock(&g_state->thread_mutex);
- g_state->streamon = _MC_THREAD_PAUSED;
- qemu_cond_wait(&g_state->thread_cond, &g_state->thread_mutex);
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = _MC_THREAD_PAUSED;
+ qemu_cond_wait(&state->thread_cond, &state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
- if (g_state->destroying) {
+ if (state->destroying) {
break;
}
- ready_count = 0;
- qemu_mutex_lock(&g_state->thread_mutex);
- g_state->streamon = _MC_THREAD_STREAMON;
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = _MC_THREAD_STREAMON;
+ qemu_mutex_unlock(&state->thread_mutex);
INFO("Streaming on ......\n");
/* Loop: capture frame -> convert format -> render to screen */
while (1) {
- if (is_streamon()) {
- if (marucam_device_read_frame() < 0) {
+ if (is_streamon(state)) {
+ if (backend_mac_read_frame(state) < 0) {
INFO("Streaming is off ...\n");
break;
} else {
return NULL;
}
-int marucam_device_check(int log_flag)
+int marucam_device_check()
{
/* FIXME: check the device parameters */
SInt32 versMaj, versMin, versBugFix;
/**********************************************
* MARU camera routines
**********************************************/
-void marucam_device_init(MaruCamState *state)
+static void backend_mac_init(MaruCamState *state)
{
- g_state = state;
- g_state->destroying = false;
+ state->destroying = false;
qemu_thread_create(&state->thread_id,
MARUCAM_THREAD_NAME,
marucam_worker_thread,
- NULL,
+ (void *)state,
QEMU_THREAD_JOINABLE);
+ state->initialized = true;
+}
+
+static void backend_mac_reset(MaruCamState *state)
+{
+ state->backend->close(state);
}
-void marucam_device_exit(MaruCamState *state)
+static void backend_mac_release(MaruCamState *state)
{
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
+
state->destroying = true;
qemu_mutex_lock(&state->thread_mutex);
qemu_cond_signal(&state->thread_cond);
qemu_mutex_unlock(&state->thread_mutex);
qemu_thread_join(&state->thread_id);
+ g_free(backend);
+ backend = NULL;
}
-/* MARUCAM_CMD_OPEN */
-void marucam_device_open(MaruCamState *state)
+static void backend_mac_open(MaruCamState *state)
{
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
- mcd = (MaruCameraDevice *)malloc(sizeof(MaruCameraDevice));
- if (mcd == NULL) {
- ERR("%s: MaruCameraDevice allocate failed\n", __func__);
- param->errCode = EINVAL;
+ backend->driver = [[MaruCameraDriver alloc] init];
+ if (backend->driver == nil) {
+ ERR("Camera device open failed\n");
+ [backend->driver dealloc];
+ state->ret_val = EINVAL;
return;
}
- memset(mcd, 0, sizeof(MaruCameraDevice));
- mcd->driver = [[MaruCameraDriver alloc] init];
- if (mcd->driver == nil) {
- ERR("Camera device open failed\n");
- [mcd->driver dealloc];
- free(mcd);
- param->errCode = EINVAL;
+
+ /* Set default values, TODO: can be smart? */
+ backend->dst_height = support_frames[0].height;
+ backend->dst_width = support_frames[0].width;
+ backend->dst_fmt = support_fmts[0];
+ if ([backend->driver setCaptureFormat:backend->dst_width
+ setHeight:backend->dst_height
+ setFormat:backend->dst_fmt] < 0) {
+ ERR("Set pixel format failed\n");
+ state->ret_val = EINVAL;
return;
}
- INFO("Camera opened!\n");
+
+
+ INFO("Opend\n");
}
-/* MARUCAM_CMD_CLOSE */
-void marucam_device_close(MaruCamState *state)
+static void backend_mac_close(MaruCamState *state)
{
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
- if (mcd != NULL) {
- if (is_streamon()) {
- marucam_device_stop_preview(state);
- }
- [mcd->driver dealloc];
- free(mcd);
- mcd = NULL;
+ if (is_streamon(state)) {
+ state->backend->stream_off(state);
+ }
+ if (backend->driver != nil) {
+ [backend->driver dealloc];
+ backend->driver = nil;
}
- /* marucam_reset_controls(); */
- INFO("Camera closed\n");
+ INFO("Closed\n");
}
-/* MARUCAM_CMD_START_PREVIEW */
-void marucam_device_start_preview(MaruCamState *state)
+static void backend_mac_stream_on(MaruCamState *state)
{
- uint32_t width, height, pixfmt;
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- state->buf_size = get_sizeimage(pixfmt, width, height);
+ state->buf_size = get_sizeimage(backend->dst_fmt,
+ backend->dst_width,
+ backend->dst_height);
- INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u), frame idx(%d), fmt idx(%d)\n",
- (char)(pixfmt), (char)(pixfmt >> 8),
- (char)(pixfmt >> 16), (char)(pixfmt >> 24),
- width, height, state->buf_size,
- cur_frame_idx, cur_fmt_idx);
+ INFO("Pixfmt(%c%c%c%c), W:H(%u:%u), buf size(%u)\n",
+ (char)(backend->dst_fmt), (char)(backend->dst_fmt >> 8),
+ (char)(backend->dst_fmt >> 16), (char)(backend->dst_fmt >> 24),
+ backend->dst_width, backend->dst_height, state->buf_size);
+ INFO("Starting preview\n");
- if (mcd->driver == nil) {
- ERR("%s: Start capture failed: vaild device", __func__);
- param->errCode = EINVAL;
+ if (backend->driver == nil) {
+ ERR("%s: Start capture failed: invaild device", __func__);
+ state->ret_val = EINVAL;
return;
}
- INFO("Starting preview ...\n");
- [mcd->driver startCapture: width: height];
+ [backend->driver startCapture:backend->dst_width
+ setHeight:backend->dst_height];
/* Enable the condition to capture frames now */
qemu_mutex_lock(&state->thread_mutex);
qemu_cond_signal(&state->thread_cond);
qemu_mutex_unlock(&state->thread_mutex);
- while (!is_streamon()) {
+ while (!is_streamon(state)) {
usleep(10000);
}
+
+ INFO("Streaming on ......\n");
}
-/* MARUCAM_CMD_STOP_PREVIEW */
-void marucam_device_stop_preview(MaruCamState *state)
+static void backend_mac_stream_off(MaruCamState *state)
{
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
- if (is_streamon()) {
+ INFO("...... Streaming off\n");
+ if (is_streamon(state)) {
qemu_mutex_lock(&state->thread_mutex);
state->streamon = _MC_THREAD_STREAMOFF;
qemu_mutex_unlock(&state->thread_mutex);
- while (is_streamon()) {
+ while (is_streamon(state)) {
usleep(10000);
}
}
- if (mcd->driver != nil) {
- [mcd->driver stopCapture];
+ if (backend->driver != nil) {
+ [backend->driver stopCapture];
}
-
state->buf_size = 0;
- INFO("Stopping preview ...\n");
+
+ INFO("Stopping preview\n");
}
-/* MARUCAM_CMD_S_PARAM */
-void marucam_device_s_param(MaruCamState *state)
+static void backend_mac_s_parm(MaruCamState *state)
{
- MaruCamParam *param = state->param;
-
/* We use default FPS of the webcam */
- param->top = 0;
}
-/* MARUCAM_CMD_G_PARAM */
-void marucam_device_g_param(MaruCamState *state)
+static void backend_mac_g_parm(MaruCamState *state)
{
- MaruCamParam *param = state->param;
+ struct v4l2_captureparm *cp =
+ (struct v4l2_captureparm *)state->io_ptr;
/* We use default FPS of the webcam
* return a fixed value on guest ini file (1/30).
*/
- param->top = 0;
- param->stack[0] = 0x1000; /* V4L2_CAP_TIMEPERFRAME */
- param->stack[1] = 1; /* numerator */
- param->stack[2] = 30; /* denominator */
+ cp->capability = V4L2_CAP_TIMEPERFRAME;
+ cp->timeperframe.numerator = 1;
+ cp->timeperframe.denominator = 30;
}
-/* MARUCAM_CMD_S_FMT */
-void marucam_device_s_fmt(MaruCamState *state)
+static void backend_mac_s_fmt(MaruCamState *state)
{
- MaruCamParam *param = state->param;
- uint32_t width, height, pixfmt, pidx, fidx;
-
- param->top = 0;
- width = param->stack[0];
- height = param->stack[1];
- pixfmt = param->stack[2];
+ uint32_t pidx, fidx;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- TRACE("Set format: width(%d), height(%d), pixfmt(%d, %.4s)\n",
- width, height, pixfmt, (const char*)&pixfmt);
+ TRACE("Set format: w:h(%u:%u), pixfmt(%u, %.4s)\n",
+ f->width, f->height, f->pixelformat,
+ (const char*)&(f->pixelformat));
- for (fidx = 0; fidx < ARRAY_SIZE(supported_dst_frames); fidx++) {
- if ((supported_dst_frames[fidx].width == width) &&
- (supported_dst_frames[fidx].height == height)) {
+ for (fidx = 0; fidx < ARRAY_SIZE(support_frames); fidx++) {
+ if ((support_frames[fidx].width == f->width) &&
+ (support_frames[fidx].height == f->height)) {
break;
}
}
- if (fidx == ARRAY_SIZE(supported_dst_frames)) {
- param->errCode = EINVAL;
+ if (fidx == ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;
return;
}
-
- for (pidx = 0; pidx < ARRAY_SIZE(supported_dst_pixfmts); pidx++) {
- if (supported_dst_pixfmts[pidx].fmt == pixfmt) {
- TRACE("pixfmt index is match: %d\n", pidx);
+ for (pidx = 0; pidx < ARRAY_SIZE(support_fmts); pidx++) {
+ if (support_fmts[pidx] == f->pixelformat) {
+ TRACE("pixfmt index is match: index(%u)\n", pidx);
break;
}
}
- if (pidx == ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (pidx == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;
return;
}
- if ((supported_dst_frames[cur_frame_idx].width != width) &&
- (supported_dst_frames[cur_frame_idx].height != height)) {
- if (mcd->driver == nil || [mcd->driver setCaptureFormat: width: height: 0] < 0) {
+ if ((backend->dst_width != f->width) &&
+ (backend->dst_height != f->height)) {
+ if ([backend->driver setCaptureFormat:f->width
+ setHeight:f->height
+ setFormat:support_fmts[pidx]] < 0) {
ERR("Set pixel format failed\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;
return;
}
-
- TRACE("cur_frame_idx:%d, supported_dst_frames[cur_frame_idx].width:%d\n",
- cur_frame_idx, supported_dst_frames[cur_frame_idx].width);
}
- cur_frame_idx = fidx;
- cur_fmt_idx = pidx;
-
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
+ f->width = backend->dst_width = support_frames[fidx].width;
+ f->height = backend->dst_height = support_frames[fidx].height;
+ f->pixelformat = backend->dst_fmt = support_fmts[pidx];
- param->stack[0] = width;
- param->stack[1] = height;
- param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
- param->stack[6] = 0;
- param->stack[7] = 0;
+ f->field = V4L2_FIELD_NONE;
+ f->bytesperline = get_bytesperline(backend->dst_fmt,
+ backend->dst_width);
+ f->sizeimage = get_sizeimage(backend->dst_fmt,
+ backend->dst_width,
+ backend->dst_height);
+ f->colorspace = 0;
+ f->priv = 0;
- TRACE("Set device pixel format ...\n");
+ TRACE("Set the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ f->width, f->height, f->pixelformat, f->bytesperline,
+ f->sizeimage, f->colorspace);
}
-/* MARUCAM_CMD_G_FMT */
-void marucam_device_g_fmt(MaruCamState *state)
+static void backend_mac_g_fmt(MaruCamState *state)
{
- uint32_t width, height, pixfmt;
- MaruCamParam *param = state->param;
+ MCBackendMac *backend = (MCBackendMac *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- param->top = 0;
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
+ f->width = backend->dst_width;
+ f->height = backend->dst_height;
+ f->pixelformat = backend->dst_fmt;
+ f->field = V4L2_FIELD_NONE;
+ f->bytesperline = get_bytesperline(backend->dst_fmt,
+ backend->dst_width);
+ f->sizeimage = get_sizeimage(backend->dst_fmt,
+ backend->dst_width,
+ backend->dst_height);
+ f->colorspace = 0;
+ f->priv = 0;
- param->stack[0] = width;
- param->stack[1] = height;
- param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
- param->stack[6] = 0;
- param->stack[7] = 0;
-
- TRACE("Get device frame format ...\n");
+ TRACE("Get the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ f->width, f->height, f->pixelformat, f->bytesperline,
+ f->sizeimage, f->colorspace);
}
-void marucam_device_try_fmt(MaruCamState *state)
+static void backend_mac_try_fmt(MaruCamState *state)
{
- TRACE("Try device frame format, use default setting ...\n");
+ uint32_t i;
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
+
+ for (i = 0; i < ARRAY_SIZE(support_frames); i++) {
+ if ((support_frames[i].width == f->width) &&
+ (support_frames[i].height == f->height)) {
+ break;
+ }
+ }
+ if (i == ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;
+ return;
+ }
+ for (i = 0; i < ARRAY_SIZE(support_fmts); i++) {
+ if (support_fmts[i] == f->pixelformat) {
+ break;
+ }
+ }
+ if (i == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;
+ return;
+ }
+
+ f->field = V4L2_FIELD_NONE;
+ f->bytesperline = get_bytesperline(f->pixelformat,
+ f->width);
+ f->sizeimage = get_sizeimage(f->pixelformat,
+ f->width,
+ f->height);
+ f->colorspace = 0;
+ f->priv = 0;
+
+ TRACE("Check the format: w:h(%dx%d), pix_fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ f->width, f->height, f->pixelformat, f->bytesperline,
+ f->sizeimage, f->colorspace);
}
/* Get specific pixelformat description */
-void marucam_device_enum_fmt(MaruCamState *state)
+static void backend_mac_enum_fmt(MaruCamState *state)
{
- uint32_t index;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- index = param->stack[0];
+ struct v4l2_fmtdesc *f = (struct v4l2_fmtdesc *)state->io_ptr;
- if (index >= ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (f->index >= ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;
return;
}
- param->stack[1] = 0; /* flags = NONE */
- param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */
- switch (supported_dst_pixfmts[index].fmt) {
+
+ f->flags = 0; /* flags = NONE */
+ f->pixelformat = support_fmts[f->index];
+
+ /* set description */
+ switch (support_fmts[f->index]) {
case V4L2_PIX_FMT_YUYV:
- memcpy(¶m->stack[3], "YUYV", 32);
- break;
- case V4L2_PIX_FMT_UYVY:
- memcpy(¶m->stack[3], "UYVY", 32);
+ pstrcpy((char *)f->description, sizeof(f->description), "YUYV");
break;
case V4L2_PIX_FMT_YUV420:
- memcpy(¶m->stack[3], "YU12", 32);
+ pstrcpy((char *)f->description, sizeof(f->description), "YU12");
break;
case V4L2_PIX_FMT_YVU420:
- memcpy(¶m->stack[3], "YV12", 32);
+ pstrcpy((char *)f->description, sizeof(f->description), "YV12");
break;
default:
- param->errCode = EINVAL;
+ ERR("Invalid fixel format\n");
+ state->ret_val = EINVAL;
break;
}
}
/*
* QTKit don't support setting brightness, contrast, saturation & sharpness
*/
-void marucam_device_qctrl(MaruCamState *state)
+static void backend_mac_query_ctrl(MaruCamState *state)
{
- uint32_t id, i;
- /* long property, min, max, step, def_val, set_val; */
- char name[32] = {0,};
- MaruCamParam *param = state->param;
-
- param->top = 0;
- id = param->stack[0];
+ uint32_t i;
+ struct v4l2_queryctrl *qc = (struct v4l2_queryctrl *)state->io_ptr;
- switch (id) {
+ /* NOTICE: Tizen MMFW hardcoded for control name
+ Do Not Modified the name
+ */
+ switch (qc->id) {
case V4L2_CID_BRIGHTNESS:
- TRACE("V4L2_CID_BRIGHTNESS\n");
- memcpy((void *)name, (void *)"brightness", 32);
+ TRACE("Query : BRIGHTNESS\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "brightness");
i = 0;
break;
case V4L2_CID_CONTRAST:
- TRACE("V4L2_CID_CONTRAST\n");
- memcpy((void *)name, (void *)"contrast", 32);
+ TRACE("Query : CONTRAST\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "contrast");
i = 1;
break;
case V4L2_CID_SATURATION:
- TRACE("V4L2_CID_SATURATION\n");
- memcpy((void *)name, (void *)"saturation", 32);
+ TRACE("Query : SATURATION\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "saturation");
i = 2;
break;
case V4L2_CID_SHARPNESS:
- TRACE("V4L2_CID_SHARPNESS\n");
- memcpy((void *)name, (void *)"sharpness", 32);
+ TRACE("Query : SHARPNESS\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "sharpness");
i = 3;
break;
default:
- param->errCode = EINVAL;
+ ERR("Invalid control ID\n");
+ state->ret_val = EINVAL;
return;
}
- param->stack[0] = id;
- param->stack[1] = MARUCAM_CTRL_VALUE_MIN; /* minimum */
- param->stack[2] = MARUCAM_CTRL_VALUE_MAX; /* maximum */
- param->stack[3] = MARUCAM_CTRL_VALUE_STEP; /* step */
- param->stack[4] = MARUCAM_CTRL_VALUE_MID; /* default_value */
- param->stack[5] = V4L2_CTRL_FLAG_SLIDER;
- /* name field setting */
- memcpy(¶m->stack[6], (void *)name, sizeof(name)/sizeof(name[0]));
+ /* set fixed values by FW configuration file */
+ qc->minimum = MARUCAM_CTRL_VALUE_MIN; /* minimum */
+ qc->maximum = MARUCAM_CTRL_VALUE_MAX; /* maximum */
+ qc->step = MARUCAM_CTRL_VALUE_STEP; /* step */
+ qc->default_value = MARUCAM_CTRL_VALUE_MID; /* default_value */
+ qc->flags = V4L2_CTRL_FLAG_SLIDER;
}
-void marucam_device_s_ctrl(MaruCamState *state)
+static void backend_mac_s_ctrl(MaruCamState *state)
{
- INFO("Set control\n");
+ INFO("[%s][Not Implemented] QTKit don't support setting "
+ " brightness, contrast, saturation & sharpness\n", __func__);
}
-void marucam_device_g_ctrl(MaruCamState *state)
+static void backend_mac_g_ctrl(MaruCamState *state)
{
- INFO("Get control\n");
+ INFO("[%s][Not Implemented] QTKit don't support getting "
+ " brightness, contrast, saturation & sharpness\n", __func__);
}
/* Get frame width & height */
-void marucam_device_enum_fsizes(MaruCamState *state)
+static void backend_mac_enum_fsizes(MaruCamState *state)
{
- uint32_t index, pixfmt, i;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- index = param->stack[0];
- pixfmt = param->stack[1];
+ uint32_t i;
+ struct v4l2_frmsizeenum *fsize =
+ (struct v4l2_frmsizeenum *)state->io_ptr;
- if (index >= ARRAY_SIZE(supported_dst_frames)) {
- param->errCode = EINVAL;
+ if (fsize->index >= ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;
return;
}
- for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
- if (supported_dst_pixfmts[i].fmt == pixfmt) {
+ for (i = 0; i < ARRAY_SIZE(support_fmts); i++) {
+ if (support_fmts[i] == fsize->pixel_format) {
break;
}
}
- if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (i == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;
return;
}
- param->stack[0] = supported_dst_frames[index].width;
- param->stack[1] = supported_dst_frames[index].height;
+ fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
+ fsize->discrete.width = support_frames[fsize->index].width;
+ fsize->discrete.height = support_frames[fsize->index].height;
}
-void marucam_device_enum_fintv(MaruCamState *state)
+static void backend_mac_enum_fintv(MaruCamState *state)
{
- MaruCamParam *param = state->param;
- param->top = 0;
+ struct v4l2_frmivalenum *fival =
+ (struct v4l2_frmivalenum *)state->io_ptr;
- /* switch by index(param->stack[0]) */
- switch (param->stack[0]) {
+ /* switch by index) */
+ switch (fival->index) {
case 0:
- param->stack[1] = 30; /* denominator */
+ /* we only use 1/30 frame interval */
+ fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
+ fival->discrete.numerator = 1;
+ fival->discrete.denominator = 30;
break;
default:
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;
return;
}
- param->stack[0] = 1; /* numerator */
+}
+
+MaruCamBackend *marucam_backend_create(MaruCamState *state)
+{
+ MCBackendMac *backend_mac;
+
+ backend_mac = g_new0(MCBackendMac, 1);
+ if (backend_mac == NULL) {
+ ERR("Failed to allocate memory for the native backend\n");
+ return NULL;
+ }
+
+ backend_mac->base.state = state;
+ backend_mac->base.init = backend_mac_init;
+ backend_mac->base.reset = backend_mac_reset;
+ backend_mac->base.release = backend_mac_release;
+ backend_mac->base.open = backend_mac_open;
+ backend_mac->base.close = backend_mac_close;
+ backend_mac->base.stream_on = backend_mac_stream_on;
+ backend_mac->base.stream_off = backend_mac_stream_off;
+ backend_mac->base.enum_fmt = backend_mac_enum_fmt;
+ backend_mac->base.try_fmt = backend_mac_try_fmt;
+ backend_mac->base.s_fmt = backend_mac_s_fmt;
+ backend_mac->base.g_fmt = backend_mac_g_fmt;
+ backend_mac->base.s_parm = backend_mac_s_parm;
+ backend_mac->base.g_parm = backend_mac_g_parm;
+ backend_mac->base.query_ctrl = backend_mac_query_ctrl;
+ backend_mac->base.s_ctrl = backend_mac_s_ctrl;
+ backend_mac->base.g_ctrl = backend_mac_g_ctrl;
+ backend_mac->base.enum_framesizes = backend_mac_enum_fsizes;
+ backend_mac->base.enum_frameintervals = backend_mac_enum_fintv;
+
+ return &backend_mac->base;
}
MULTI_DEBUG_CHANNEL(tizen, camera);
-#define MARUCAM_THREAD_NAME "marucam_worker_thread"
-
#define CLEAR(x) memset(&(x), 0, sizeof(x))
#define MARUCAM_DEFAULT_BUFFER_COUNT 4
-#define MARUCAM_CTRL_VALUE_MAX 20
-#define MARUCAM_CTRL_VALUE_MIN 1
-#define MARUCAM_CTRL_VALUE_MID 10
-#define MARUCAM_CTRL_VALUE_STEP 1
+static uint32_t support_fmts[] = {
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YVU420,
+};
+
+struct MCFrame {
+ uint32_t width;
+ uint32_t height;
+};
-enum {
- _MC_THREAD_PAUSED,
- _MC_THREAD_STREAMON,
- _MC_THREAD_STREAMOFF,
+static struct MCFrame support_frames[] = {
+ { 640, 480 },
+ { 352, 288 },
+ { 320, 240 },
+ { 176, 144 },
+ { 160, 120 },
+};
+
+struct MCControls {
+ uint32_t id;
+ uint32_t hit;
+ int32_t min;
+ int32_t max;
+ int32_t step;
+ int32_t init_val;
};
-typedef struct marucam_framebuffer {
+static struct MCControls ctrl_tbl[] = {
+ { V4L2_CID_BRIGHTNESS, 0, },
+ { V4L2_CID_CONTRAST, 0, },
+ { V4L2_CID_SATURATION, 0, },
+ { V4L2_CID_SHARPNESS, 0, },
+};
+
+typedef struct MCBuffer {
void *data;
size_t size;
-} marucam_framebuffer;
+} MCBuffer;
-struct marucam_saved_frame {
+struct MCStoredFrame {
void *data;
uint32_t pixelformat;
uint32_t width;
uint32_t size;
};
-static struct marucam_saved_frame saved_frame;
-static char has_success_frame;
-static int n_framebuffer;
-static int previous_frame_index = -1;
-static struct marucam_framebuffer *framebuffer;
+typedef struct MCBackendV4l2 {
+ MaruCamBackend base;
+
+ char dev_name[16];
+ int fd;
+ int convert_trial;
+ int ready_count;
+ int timeout;
+ int has_success;
+ int prev_index;
+ int fb_num;
-static const char *dev_name = "/dev/video0";
-static int v4l2_fd = -1;
-static int convert_trial;
-static int ready_count;
-static int timeout_n;
+ struct v4l2_format src_fmt;
+ struct v4l2_format dst_fmt;
-static struct v4l2_format dst_fmt;
+ struct MCStoredFrame stored;
+ struct MCBuffer *fbs;
+} MCBackendV4l2;
static void ScalePlaneSimple(int src_width, int src_height,
int dst_width, int dst_height,
}
}
-static void make_yu12_black(unsigned char *dest, uint32_t width, uint32_t height)
+static void make_yu12_black(unsigned char *dest,
+ uint32_t width,
+ uint32_t height)
{
uint32_t x, y;
unsigned char *udest, *vdest;
uint8_t *src_v = src_u + (src_width * src_height / 4);
uint8_t *dst_u = dst + (dst_width * dst_height);
- uint8_t *dst_v = dst_u + (dst_width * dst_height /4);
+ uint8_t *dst_v = dst_u + (dst_width * dst_height / 4);
ScalePlaneSimple(src_width, src_height,
dst_width, dst_height,
return r;
}
-typedef struct tagMaruCamConvertPixfmt {
- uint32_t fmt; /* fourcc */
-} MaruCamConvertPixfmt;
-
-static MaruCamConvertPixfmt supported_dst_pixfmts[] = {
- { V4L2_PIX_FMT_YUYV },
- { V4L2_PIX_FMT_YUV420 },
- { V4L2_PIX_FMT_YVU420 },
-};
-
-typedef struct tagMaruCamConvertFrameInfo {
- uint32_t width;
- uint32_t height;
-} MaruCamConvertFrameInfo;
-
-static MaruCamConvertFrameInfo supported_dst_frames[] = {
- { 640, 480 },
- { 352, 288 },
- { 320, 240 },
- { 176, 144 },
- { 160, 120 },
-};
-
-struct marucam_qctrl {
- uint32_t id;
- uint32_t hit;
- int32_t min;
- int32_t max;
- int32_t step;
- int32_t init_val;
-};
-
-static struct marucam_qctrl qctrl_tbl[] = {
- { V4L2_CID_BRIGHTNESS, 0, },
- { V4L2_CID_CONTRAST, 0, },
- { V4L2_CID_SATURATION, 0, },
- { V4L2_CID_SHARPNESS, 0, },
-};
-
-static void marucam_reset_controls(void)
+static void backend_v4l2_reset_controls(MCBackendV4l2 *backend)
{
uint32_t i;
- for (i = 0; i < ARRAY_SIZE(qctrl_tbl); i++) {
- if (qctrl_tbl[i].hit) {
+ for (i = 0; i < ARRAY_SIZE(ctrl_tbl); i++) {
+ if (ctrl_tbl[i].hit) {
struct v4l2_control ctrl = {0,};
- ctrl.id = qctrl_tbl[i].id;
- ctrl.value = qctrl_tbl[i].init_val;
- qctrl_tbl[i].hit = qctrl_tbl[i].init_val = 0;
- qctrl_tbl[i].min = qctrl_tbl[i].max = qctrl_tbl[i].step = 0;
- if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &ctrl) < 0) {
+ ctrl.id = ctrl_tbl[i].id;
+ ctrl.value = ctrl_tbl[i].init_val;
+ ctrl_tbl[i].hit = ctrl_tbl[i].init_val = 0;
+ ctrl_tbl[i].min = ctrl_tbl[i].max = ctrl_tbl[i].step = 0;
+ if (xioctl(backend->fd, VIDIOC_S_CTRL, &ctrl) < 0) {
ERR("Failed to reset control value: id(0x%x), errstr(%s)\n",
ctrl.id, strerror(errno));
}
return ret;
}
-static void set_maxframeinterval(MaruCamState *state, uint32_t pixel_format,
+static void set_maxframeinterval(int fd, uint32_t pixel_format,
uint32_t width, uint32_t height)
{
struct v4l2_frmivalenum fival;
fival.width = width;
fival.height = height;
- if (xioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) < 0) {
+ if (xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) < 0) {
ERR("Unable to enumerate intervals for pixelformat(0x%x), (%d:%d)\n",
pixel_format, width, height);
return;
TRACE("Discrete frame interval %u/%u supported\n",
fival.discrete.numerator, fival.discrete.denominator);
fival.index++;
- } while (xioctl(v4l2_fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) >= 0);
+ } while (xioctl(fd, VIDIOC_ENUM_FRAMEINTERVALS, &fival) >= 0);
} else if ((fival.type == V4L2_FRMIVAL_TYPE_STEPWISE) ||
(fival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS)) {
TRACE("Frame intervals from %u/%u to %u/%u supported",
sp.parm.capture.timeperframe.numerator = min_num;
sp.parm.capture.timeperframe.denominator = min_denom;
- if (xioctl(v4l2_fd, VIDIOC_S_PARM, &sp) < 0) {
+ if (xioctl(fd, VIDIOC_S_PARM, &sp) < 0) {
ERR("Failed to set to minimum FPS(%u/%u)\n", min_num, min_denom);
}
}
-static uint32_t stop_capturing(void)
+static uint32_t stop_capturing(MCBackendV4l2 *backend)
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (xioctl(v4l2_fd, VIDIOC_STREAMOFF, &type) < 0) {
+ if (xioctl(backend->fd, VIDIOC_STREAMOFF, &type) < 0) {
ERR("Failed to ioctl() with VIDIOC_STREAMOFF: %s\n", strerror(errno));
return errno;
}
return 0;
}
-static uint32_t start_capturing(void)
+static uint32_t start_capturing(MCBackendV4l2 *backend)
{
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (xioctl(v4l2_fd, VIDIOC_STREAMON, &type) < 0) {
+ if (xioctl(backend->fd, VIDIOC_STREAMON, &type) < 0) {
ERR("Failed to ioctl() with VIDIOC_STREAMON: %s\n", strerror(errno));
return errno;
}
return 0;
}
-static void free_framebuffers(marucam_framebuffer *fb, int buf_num)
+static void backend_v4l2_storedframe_clear(MCBackendV4l2 *backend)
+{
+ if (backend->stored.data) {
+ g_free(backend->stored.data);
+ backend->stored.data = NULL;
+ }
+ memset(&backend->stored, 0x00, sizeof(struct MCStoredFrame));
+}
+
+static void backend_v4l2_storedframe_set(MCBackendV4l2 *backend)
+{
+ backend->stored.width = backend->dst_fmt.fmt.pix.width;
+ backend->stored.height = backend->dst_fmt.fmt.pix.height;
+ backend->stored.pixelformat = backend->dst_fmt.fmt.pix.pixelformat;
+ backend->stored.size = backend->dst_fmt.fmt.pix.sizeimage;
+ if (backend->stored.data) {
+ g_free(backend->stored.data);
+ backend->stored.data = NULL;
+ }
+ backend->stored.data = (void *)g_malloc0(backend->stored.size);
+ memcpy(backend->stored.data,
+ backend->fbs[backend->prev_index].data,
+ backend->stored.size);
+ TRACE("Saves a frame data\n");
+}
+
+static void free_framebuffers(MCBackendV4l2 *backend)
{
int i;
- if (fb == NULL) {
+ if (backend->fbs == NULL) {
ERR("The framebuffer is NULL. Failed to release the framebuffer\n");
return;
- } else if (buf_num == 0) {
+ } else if (backend->fb_num == 0) {
ERR("The buffer count is 0. Failed to release the framebuffer\n");
return;
} else {
- TRACE("[%s]:fb(0x%p), buf_num(%d)\n", __func__, fb, buf_num);
+ TRACE("[%s]: fbs(0x%p), buf_num(%d)\n",
+ __func__, backend->fbs, backend->fb_num);
}
/* Unmap framebuffers. */
- for (i = 0; i < buf_num; i++) {
- if (fb[i].data != NULL) {
- v4l2_munmap(fb[i].data, fb[i].size);
- fb[i].data = NULL;
- fb[i].size = 0;
+ for (i = 0; i < backend->fb_num; i++) {
+ if (backend->fbs[i].data != NULL) {
+ v4l2_munmap(backend->fbs[i].data, backend->fbs[i].size);
+ backend->fbs[i].data = NULL;
+ backend->fbs[i].size = 0;
} else {
ERR("framebuffer[%d].data is NULL.\n", i);
}
}
- previous_frame_index = -1;
+ backend->prev_index = -1;
+ g_free(backend->fbs);
+ backend->fbs = NULL;
+ backend->fb_num = 0;
}
static uint32_t
-mmap_framebuffers(marucam_framebuffer **fb, int *buf_num)
+mmap_framebuffers(MCBackendV4l2 *backend)
{
struct v4l2_requestbuffers req;
+ MCBuffer **fb = &backend->fbs;
+ int *buf_num = &backend->fb_num;
CLEAR(req);
req.count = MARUCAM_DEFAULT_BUFFER_COUNT;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP;
- if (xioctl(v4l2_fd, VIDIOC_REQBUFS, &req) < 0) {
+ if (xioctl(backend->fd, VIDIOC_REQBUFS, &req) < 0) {
if (errno == EINVAL) {
ERR("%s does not support memory mapping: %s\n",
- dev_name, strerror(errno));
+ backend->dev_name, strerror(errno));
} else {
ERR("Failed to request bufs: %s\n", strerror(errno));
}
return errno;
}
if (req.count == 0) {
- ERR("Insufficient buffer memory on %s\n", dev_name);
+ ERR("Insufficient buffer memory on %s\n", backend->dev_name);
return EINVAL;
}
- *fb = g_new0(marucam_framebuffer, req.count);
+ *fb = g_new0(MCBuffer, req.count);
if (*fb == NULL) {
ERR("Not enough memory to allocate framebuffers\n");
return ENOMEM;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = *buf_num;
- if (xioctl(v4l2_fd, VIDIOC_QUERYBUF, &buf) < 0) {
+ if (xioctl(backend->fd, VIDIOC_QUERYBUF, &buf) < 0) {
ERR("Failed to ioctl() with VIDIOC_QUERYBUF: %s\n",
strerror(errno));
return errno;
buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED,
- v4l2_fd, buf.m.offset);
+ backend->fd, buf.m.offset);
if (MAP_FAILED == (*fb)[*buf_num].data) {
ERR("Failed to mmap: %s\n", strerror(errno));
return errno;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = *buf_num;
- if (xioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {
+ if (xioctl(backend->fd, VIDIOC_QBUF, &buf) < 0) {
ERR("Failed to ioctl() with VIDIOC_QBUF: %s\n", strerror(errno));
return errno;
}
static void __raise_dummy_intr(MaruCamState *state)
{
void *buf = NULL;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+
qemu_mutex_lock(&state->thread_mutex);
if (state->streamon == _MC_THREAD_STREAMON && state->req_frame) {
- buf = state->vaddr + state->buf_size * (state->req_frame - 1);
- if (saved_frame.data) {
- if (saved_frame.width == dst_fmt.fmt.pix.width &&
- saved_frame.height == dst_fmt.fmt.pix.height) {
+ buf = state->fb_ptr + state->buf_size * (state->req_frame - 1);
+ if (backend->stored.data) {
+ if (backend->stored.width == backend->dst_fmt.fmt.pix.width &&
+ backend->stored.height == backend->dst_fmt.fmt.pix.height) {
TRACE("Copies the previuos frame\n");
- memcpy(buf, saved_frame.data, state->buf_size);
+ memcpy(buf, backend->stored.data, state->buf_size);
} else {
TRACE("Resizes the previous frame\n");
- marucam_scale_yuv420(saved_frame.data, saved_frame.width,
- saved_frame.height,
- buf, dst_fmt.fmt.pix.width,
- dst_fmt.fmt.pix.height);
+ marucam_scale_yuv420(backend->stored.data,
+ backend->stored.width,
+ backend->stored.height,
+ buf,
+ backend->dst_fmt.fmt.pix.width,
+ backend->dst_fmt.fmt.pix.height);
}
} else {
TRACE("Sends a black frame\n");
make_yu12_black(buf,
- dst_fmt.fmt.pix.width,
- dst_fmt.fmt.pix.height);
+ backend->dst_fmt.fmt.pix.width,
+ backend->dst_fmt.fmt.pix.height);
}
state->req_frame = 0; /* clear request */
state->isr |= 0x01; /* set a flag of raising a interrupt */
notify_buffer_ready(MaruCamState *state, uint32_t buf_index)
{
void *buf = NULL;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
qemu_mutex_lock(&state->thread_mutex);
if (state->streamon == _MC_THREAD_STREAMON) {
- if (ready_count < MARUCAM_SKIPFRAMES) {
+ if (backend->ready_count < MARUCAM_SKIPFRAMES) {
/* skip a frame cause first some frame are distorted */
- ++ready_count;
- TRACE("Skip %d frame\n", ready_count);
+ ++backend->ready_count;
+ TRACE("Skip %d frame\n", backend->ready_count);
qemu_mutex_unlock(&state->thread_mutex);
return;
}
qemu_mutex_unlock(&state->thread_mutex);
return;
}
- buf = state->vaddr + state->buf_size * (state->req_frame - 1);
- memcpy(buf, framebuffer[buf_index].data, state->buf_size);
- previous_frame_index = buf_index;
- has_success_frame = 1;
+ buf = state->fb_ptr + state->buf_size * (state->req_frame - 1);
+ memcpy(buf, backend->fbs[buf_index].data, state->buf_size);
+ backend->prev_index = buf_index;
+ backend->has_success = 1;
state->req_frame = 0; /* clear request */
state->isr |= 0x01; /* set a flag of rasing a interrupt */
qemu_bh_schedule(state->tx_bh);
static int read_frame(MaruCamState *state)
{
struct v4l2_buffer buf;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
CLEAR(buf);
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
- if (xioctl(v4l2_fd, VIDIOC_DQBUF, &buf) < 0) {
+ if (xioctl(backend->fd, VIDIOC_DQBUF, &buf) < 0) {
switch (errno) {
case EAGAIN:
case EINTR:
return 0;
case EIO:
ERR("The v4l2_read() met the EIO\n");
- if (convert_trial-- == -1) {
+ if (backend->convert_trial-- == -1) {
ERR("Try count for v4l2_read is exceeded: %s\n",
strerror(errno));
return -1;
notify_buffer_ready(state, buf.index);
- if (xioctl(v4l2_fd, VIDIOC_QBUF, &buf) < 0) {
+ if (xioctl(backend->fd, VIDIOC_QBUF, &buf) < 0) {
ERR("QBUF error: %s\n", strerror(errno));
return -1;
}
fd_set fds;
struct timeval tv;
int ret;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
FD_ZERO(&fds);
- FD_SET(v4l2_fd, &fds);
+ FD_SET(backend->fd, &fds);
tv.tv_sec = 1;
tv.tv_usec = 0;
- ret = select(v4l2_fd + 1, &fds, NULL, NULL, &tv);
+ ret = select(backend->fd + 1, &fds, NULL, NULL, &tv);
if (ret < 0) {
if (errno == EAGAIN || errno == EINTR) {
ERR("Select again: %s\n", strerror(errno));
__raise_err_intr(state);
return -1;
} else if (!ret) {
- timeout_n++;
- ERR("Select timed out: count(%u)\n", timeout_n);
- if (ready_count <= MARUCAM_SKIPFRAMES) {
- switch (timeout_n) {
+ backend->timeout++;
+ ERR("Select timed out: count(%u)\n", backend->timeout);
+ if (backend->ready_count <= MARUCAM_SKIPFRAMES) {
+ switch (backend->timeout) {
case 1:
ERR("Waiting for reading a frame data\n");
return 0;
return -1;
}
}
- if (timeout_n >= 5) {
+ if (backend->timeout >= 5) {
ERR("Webcam is busy, failed to a read frame. Raises an error\n");
__raise_err_intr(state);
return -1;
}
- if (previous_frame_index != -1) {
+ if (backend->prev_index != -1) {
ERR("Sends previous frame data\n");
- notify_buffer_ready(state, previous_frame_index);
+ notify_buffer_ready(state, backend->prev_index);
}
return 0;
}
- if (!v4l2_fd || (v4l2_fd == -1)) {
+ if (backend->fd < 0) {
ERR("The file descriptor is closed or not opened\n");
__raise_err_intr(state);
return -1;
}
/* clear the skip count for select time-out */
- if (timeout_n > 0) {
- timeout_n = 0;
+ if (backend->timeout > 0) {
+ backend->timeout = 0;
}
return 0;
static void *marucam_worker_thread(void *thread_param)
{
MaruCamState *state = (MaruCamState *)thread_param;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
while (1) {
qemu_mutex_lock(&state->thread_mutex);
break;
}
- convert_trial = 10;
- ready_count = 0;
- timeout_n = 0;
- has_success_frame = 0;
+ backend->convert_trial = 10;
+ backend->ready_count = 0;
+ backend->timeout = 0;
+ backend->has_success = 0;
qemu_mutex_lock(&state->thread_mutex);
state->streamon = _MC_THREAD_STREAMON;
qemu_mutex_unlock(&state->thread_mutex);
return NULL;
}
-int marucam_device_check(int log_flag)
+int marucam_device_check(void)
{
int tmp_fd;
struct timeval t1, t2;
struct v4l2_frmsizeenum size;
struct v4l2_capability cap;
int ret = 0;
+ const char *dev_name = "/dev/video0";
gettimeofday(&t1, NULL);
if (stat(dev_name, &st) < 0) {
- INFO("<WARNING> Cannot identify '%s': %s\n",
- dev_name, strerror(errno));
+ INFO("<WARNING> Cannot identify '%s': %s\n", dev_name, strerror(errno));
} else {
if (!S_ISCHR(st.st_mode)) {
- INFO("<WARNING>%s is no character device\n",
- dev_name);
+ INFO("<WARNING>%s is no character device\n", dev_name);
}
}
tmp_fd = open(dev_name, O_RDWR | O_NONBLOCK, 0);
if (tmp_fd < 0) {
- ERR("Camera device open failed: %s\n", dev_name);
+ INFO("Camera device open failed: %s\n", dev_name);
gettimeofday(&t2, NULL);
- ERR("Elapsed time: %lu:%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ INFO("Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
if (ioctl(tmp_fd, VIDIOC_QUERYCAP, &cap) < 0) {
- ERR("Could not qeury video capabilities\n");
+ INFO("Could not qeury video capabilities\n");
close(tmp_fd);
gettimeofday(&t2, NULL);
- ERR("Elapsed time: %lu:%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ INFO("Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) ||
!(cap.capabilities & V4L2_CAP_STREAMING)) {
- ERR("Not supported video driver\n");
+ INFO("Not supported video driver\n");
close(tmp_fd);
gettimeofday(&t2, NULL);
- ERR("Elapsed time: %lu:%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ INFO("Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
ret = 1;
- if (log_flag) {
- INFO("Driver: %s\n", cap.driver);
- INFO("Card: %s\n", cap.card);
- INFO("Bus info: %s\n", cap.bus_info);
+ INFO("Driver: %s\n", cap.driver);
+ INFO("Card: %s\n", cap.card);
+ INFO("Bus info: %s\n", cap.bus_info);
- CLEAR(format);
- format.index = 0;
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ CLEAR(format);
+ format.index = 0;
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ if (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) < 0) {
+ close(tmp_fd);
+ gettimeofday(&t2, NULL);
+ INFO("Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ return ret;
+ }
+
+ do {
+ CLEAR(size);
+ size.index = 0;
+ size.pixel_format = format.pixelformat;
- if (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) < 0) {
+ INFO("PixelFormat: %c%c%c%c\n",
+ (char)(format.pixelformat),
+ (char)(format.pixelformat >> 8),
+ (char)(format.pixelformat >> 16),
+ (char)(format.pixelformat >> 24));
+
+ if (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) {
close(tmp_fd);
gettimeofday(&t2, NULL);
- ERR("Elapsed time: %lu:%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ INFO("Elapsed time: %lu:%06lu\n",
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
- do {
- CLEAR(size);
- size.index = 0;
- size.pixel_format = format.pixelformat;
-
- INFO("PixelFormat: %c%c%c%c\n",
- (char)(format.pixelformat),
- (char)(format.pixelformat >> 8),
- (char)(format.pixelformat >> 16),
- (char)(format.pixelformat >> 24));
-
- if (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) {
- close(tmp_fd);
- gettimeofday(&t2, NULL);
- ERR("Elapsed time: %lu:%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
- return ret;
- }
-
- if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
- do {
- INFO("\tGot a discrete frame size %dx%d\n",
- size.discrete.width, size.discrete.height);
- size.index++;
- } while (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
- } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
- INFO("We have stepwise frame sizes:\n");
- INFO("\tmin width: %d, min height: %d\n",
- size.stepwise.min_width, size.stepwise.min_height);
- INFO("\tmax width: %d, max height: %d\n",
- size.stepwise.max_width, size.stepwise.max_height);
- INFO("\tstep width: %d, step height: %d\n",
- size.stepwise.step_width, size.stepwise.step_height);
- } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
- INFO("We have continuous frame sizes:\n");
- INFO("\tmin width: %d, min height: %d\n",
- size.stepwise.min_width, size.stepwise.min_height);
- INFO("\tmax width: %d, max height: %d\n",
- size.stepwise.max_width, size.stepwise.max_height);
+ if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
+ do {
+ INFO("\tGot a discrete frame size %dx%d\n",
+ size.discrete.width, size.discrete.height);
+ size.index++;
+ } while (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
+ } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
+ INFO("We have stepwise frame sizes:\n");
+ INFO("\tmin width: %d, min height: %d\n",
+ size.stepwise.min_width, size.stepwise.min_height);
+ INFO("\tmax width: %d, max height: %d\n",
+ size.stepwise.max_width, size.stepwise.max_height);
+ INFO("\tstep width: %d, step height: %d\n",
+ size.stepwise.step_width, size.stepwise.step_height);
+ } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
+ INFO("We have continuous frame sizes:\n");
+ INFO("\tmin width: %d, min height: %d\n",
+ size.stepwise.min_width, size.stepwise.min_height);
+ INFO("\tmax width: %d, max height: %d\n",
+ size.stepwise.max_width, size.stepwise.max_height);
- }
- format.index++;
- } while (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) >= 0);
- }
+ }
+ format.index++;
+ } while (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) >= 0);
close(tmp_fd);
gettimeofday(&t2, NULL);
INFO("Elapsed time: %lu:%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
-void marucam_device_init(MaruCamState *state)
+static void backend_v4l2_init(MaruCamState *state)
{
state->destroying = false;
- memset(&saved_frame, 0x00, sizeof(saved_frame));
-
qemu_thread_create(&state->thread_id,
MARUCAM_THREAD_NAME,
marucam_worker_thread,
(void *)state,
QEMU_THREAD_JOINABLE);
+ state->initialized = true;
}
-void marucam_device_exit(MaruCamState *state)
+static void backend_v4l2_reset(MaruCamState *state)
{
+ state->backend->close(state);
+}
+
+static void backend_v4l2_release(MaruCamState *state)
+{
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+
state->destroying = true;
qemu_mutex_lock(&state->thread_mutex);
qemu_cond_signal(&state->thread_cond);
qemu_mutex_unlock(&state->thread_mutex);
qemu_thread_join(&state->thread_id);
+ g_free(backend);
+ backend = NULL;
}
-void marucam_device_open(MaruCamState *state)
+static void backend_v4l2_open(MaruCamState *state)
{
- MaruCamParam *param = state->param;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+
+ if (backend->dev_name[0] == '\0') {
+ const char *dev_name = "/dev/video0";
+ pstrcpy(backend->dev_name, sizeof(backend->dev_name), dev_name);
+ }
- param->top = 0;
- v4l2_fd = v4l2_open(dev_name, O_RDWR | O_NONBLOCK, 0);
- if (v4l2_fd < 0) {
- ERR("The v4l2 device open failed: %s\n", dev_name);
- param->errCode = EINVAL;
+ backend->fd = v4l2_open(backend->dev_name, O_RDWR | O_NONBLOCK, 0);
+ if (backend->fd < 0) {
+ ERR("The v4l2 device open failed: %s\n", backend->dev_name);
+ state->ret_val = EINVAL;
return;
}
INFO("Opened\n");
- /* FIXME : Do not use fixed values */
+ /* FIXME : Do not use fixed values
CLEAR(dst_fmt);
dst_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
dst_fmt.fmt.pix.width = 640;
dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height,
dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.sizeimage,
dst_fmt.fmt.pix.colorspace, dst_fmt.fmt.pix.field);
+ */
}
-void marucam_device_start_preview(MaruCamState *state)
+static void backend_v4l2_close(MaruCamState *state)
{
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+
+ if (!is_stream_paused(state)) {
+ state->backend->stream_off(state);
+ }
+
+ backend_v4l2_reset_controls(backend);
+ backend_v4l2_storedframe_clear(backend);
+
+ if (backend->fd >= 0) {
+ v4l2_close(backend->fd);
+ backend->fd = -1;
+ INFO("Closed\n");
+ }
+}
+
+static void backend_v4l2_stream_on(MaruCamState *state)
+{
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+
struct timespec req;
- MaruCamParam *param = state->param;
- param->top = 0;
req.tv_sec = 0;
req.tv_nsec = 10000000;
INFO("Pixfmt(%c%c%c%C), W:H(%d:%d), buf size(%u)\n",
- (char)(dst_fmt.fmt.pix.pixelformat),
- (char)(dst_fmt.fmt.pix.pixelformat >> 8),
- (char)(dst_fmt.fmt.pix.pixelformat >> 16),
- (char)(dst_fmt.fmt.pix.pixelformat >> 24),
- dst_fmt.fmt.pix.width,
- dst_fmt.fmt.pix.height,
- dst_fmt.fmt.pix.sizeimage);
-
- param->errCode = mmap_framebuffers(&framebuffer, &n_framebuffer);
- if (param->errCode) {
+ (char)(backend->dst_fmt.fmt.pix.pixelformat),
+ (char)(backend->dst_fmt.fmt.pix.pixelformat >> 8),
+ (char)(backend->dst_fmt.fmt.pix.pixelformat >> 16),
+ (char)(backend->dst_fmt.fmt.pix.pixelformat >> 24),
+ backend->dst_fmt.fmt.pix.width,
+ backend->dst_fmt.fmt.pix.height,
+ backend->dst_fmt.fmt.pix.sizeimage);
+
+ state->ret_val = mmap_framebuffers(backend);
+ if (state->ret_val) {
ERR("Failed to mmap framebuffers\n");
- if (framebuffer != NULL) {
- free_framebuffers(framebuffer, n_framebuffer);
- g_free(framebuffer);
- framebuffer = NULL;
- n_framebuffer = 0;
+ if (backend->fbs != NULL) {
+ free_framebuffers(backend);
}
return;
}
- param->errCode = start_capturing();
- if (param->errCode) {
- if (framebuffer != NULL) {
- free_framebuffers(framebuffer, n_framebuffer);
- g_free(framebuffer);
- framebuffer = NULL;
- n_framebuffer = 0;
+ state->ret_val = start_capturing(backend);
+ if (state->ret_val) {
+ if (backend->fbs != NULL) {
+ free_framebuffers(backend);
}
return;
}
INFO("Starting preview\n");
- state->buf_size = dst_fmt.fmt.pix.sizeimage;
+ state->buf_size = backend->dst_fmt.fmt.pix.sizeimage;
qemu_mutex_lock(&state->thread_mutex);
qemu_cond_signal(&state->thread_cond);
qemu_mutex_unlock(&state->thread_mutex);
}
}
-void marucam_device_stop_preview(MaruCamState *state)
+static void backend_v4l2_stream_off(MaruCamState *state)
{
struct timespec req;
struct v4l2_requestbuffers reqbuf;
- MaruCamParam *param = state->param;
- param->top = 0;
req.tv_sec = 0;
req.tv_nsec = 50000000;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
if (is_streamon(state)) {
qemu_mutex_lock(&state->thread_mutex);
}
}
- if (has_success_frame) {
- saved_frame.width = dst_fmt.fmt.pix.width;
- saved_frame.height = dst_fmt.fmt.pix.height;
- saved_frame.size = dst_fmt.fmt.pix.sizeimage;
- if (saved_frame.data) {
- g_free(saved_frame.data);
- saved_frame.data = NULL;
- }
- saved_frame.data = (void *)g_malloc0(saved_frame.size);
- memcpy(saved_frame.data,
- framebuffer[previous_frame_index].data,
- saved_frame.size);
- TRACE("Saves a frame data\n");
+ if (backend->has_success) {
+ backend_v4l2_storedframe_set(backend);
}
- param->errCode = stop_capturing();
- if (framebuffer != NULL) {
- free_framebuffers(framebuffer, n_framebuffer);
- g_free(framebuffer);
- framebuffer = NULL;
- n_framebuffer = 0;
+ state->ret_val = stop_capturing(backend);
+ if (state->ret_val) {
+ ERR("Try again to turn off streaming\n");
+ state->ret_val = stop_capturing(backend);
+ }
+ if (backend->fbs != NULL) {
+ free_framebuffers(backend);
}
state->buf_size = 0;
reqbuf.count = 0;
reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
reqbuf.memory = V4L2_MEMORY_MMAP;
- if (xioctl(v4l2_fd, VIDIOC_REQBUFS, &reqbuf) < 0) {
+ if (xioctl(backend->fd, VIDIOC_REQBUFS, &reqbuf) < 0) {
ERR("Failed to ioctl() with VIDIOC_REQBUF in stop_preview: %s\n",
strerror(errno));
}
+
INFO("Stopping preview\n");
}
-void marucam_device_s_param(MaruCamState *state)
-{
- MaruCamParam *param = state->param;
- param->top = 0;
+static void backend_v4l2_s_parm(MaruCamState *state)
+{
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
/* If KVM enabled, We use default FPS of the webcam.
* If KVM disabled, we use mininum FPS of the webcam */
if (!kvm_enabled()) {
- set_maxframeinterval(state, dst_fmt.fmt.pix.pixelformat,
- dst_fmt.fmt.pix.width,
- dst_fmt.fmt.pix.height);
+ set_maxframeinterval(backend->fd,
+ backend->dst_fmt.fmt.pix.pixelformat,
+ backend->dst_fmt.fmt.pix.width,
+ backend->dst_fmt.fmt.pix.height);
}
}
-void marucam_device_g_param(MaruCamState *state)
+static void backend_v4l2_g_parm(MaruCamState *state)
{
- MaruCamParam *param = state->param;
-
+ struct v4l2_captureparm *cp =
+ (struct v4l2_captureparm *)state->io_ptr;
/* We use default FPS of the webcam
* return a fixed value on guest ini file (1/30).
*/
- param->top = 0;
- param->stack[0] = 0x1000; /* V4L2_CAP_TIMEPERFRAME */
- param->stack[1] = 1; /* numerator */
- param->stack[2] = 30; /* denominator */
+ cp->capability = V4L2_CAP_TIMEPERFRAME;
+ cp->timeperframe.numerator = 1;
+ cp->timeperframe.denominator = 30;
}
-void marucam_device_s_fmt(MaruCamState *state)
+static void backend_v4l2_s_fmt(MaruCamState *state)
{
- struct v4l2_format format;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- CLEAR(format);
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- format.fmt.pix.width = param->stack[0];
- format.fmt.pix.height = param->stack[1];
- format.fmt.pix.pixelformat = param->stack[2];
- format.fmt.pix.field = V4L2_FIELD_ANY;
-
- if (xioctl(v4l2_fd, VIDIOC_S_FMT, &format) < 0) {
- ERR("Failed to set video format: format(0x%x), width:height(%d:%d), "
- "errstr(%s)\n", format.fmt.pix.pixelformat, format.fmt.pix.width,
- format.fmt.pix.height, strerror(errno));
- param->errCode = errno;
+ struct v4l2_format fmt;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
+
+ CLEAR(fmt);
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ memcpy(&(fmt.fmt.pix), f, sizeof(struct v4l2_pix_format));
+
+ if (xioctl(backend->fd, VIDIOC_S_FMT, &fmt) < 0) {
+ ERR("Failed to set video format: format(0x%x), w:h(%d:%d), "
+ "errstr(%s)\n", fmt.fmt.pix.pixelformat, fmt.fmt.pix.width,
+ fmt.fmt.pix.height, strerror(errno));
+ state->ret_val = errno;
return;
}
-
- memcpy(&dst_fmt, &format, sizeof(format));
- param->stack[0] = dst_fmt.fmt.pix.width;
- param->stack[1] = dst_fmt.fmt.pix.height;
- param->stack[2] = dst_fmt.fmt.pix.field;
- param->stack[3] = dst_fmt.fmt.pix.pixelformat;
- param->stack[4] = dst_fmt.fmt.pix.bytesperline;
- param->stack[5] = dst_fmt.fmt.pix.sizeimage;
- param->stack[6] = dst_fmt.fmt.pix.colorspace;
- param->stack[7] = dst_fmt.fmt.pix.priv;
- TRACE("Set the format: w:h(%dx%d), fmt(0x%x), size(%d), "
- "color(%d), field(%d)\n",
- dst_fmt.fmt.pix.width, dst_fmt.fmt.pix.height,
- dst_fmt.fmt.pix.pixelformat, dst_fmt.fmt.pix.sizeimage,
- dst_fmt.fmt.pix.colorspace, dst_fmt.fmt.pix.field);
+ memcpy(f, &(fmt.fmt.pix), sizeof(struct v4l2_pix_format));
+ memcpy(&backend->dst_fmt, &fmt, sizeof(struct v4l2_format));
+
+ TRACE("Set the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ fmt.fmt.pix.width, fmt.fmt.pix.height,
+ fmt.fmt.pix.pixelformat, fmt.fmt.pix.bytesperline,
+ fmt.fmt.pix.sizeimage, fmt.fmt.pix.colorspace);
}
-void marucam_device_g_fmt(MaruCamState *state)
+static void backend_v4l2_g_fmt(MaruCamState *state)
{
- struct v4l2_format format;
- MaruCamParam *param = state->param;
+ struct v4l2_format fmt;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- param->top = 0;
- CLEAR(format);
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ CLEAR(fmt);
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ memcpy(&(fmt.fmt.pix), f, sizeof(struct v4l2_pix_format));
- if (xioctl(v4l2_fd, VIDIOC_G_FMT, &format) < 0) {
+ if (xioctl(backend->fd, VIDIOC_G_FMT, &fmt) < 0) {
ERR("Failed to get video format: %s\n", strerror(errno));
- param->errCode = errno;
- } else {
- param->stack[0] = format.fmt.pix.width;
- param->stack[1] = format.fmt.pix.height;
- param->stack[2] = format.fmt.pix.field;
- param->stack[3] = format.fmt.pix.pixelformat;
- param->stack[4] = format.fmt.pix.bytesperline;
- param->stack[5] = format.fmt.pix.sizeimage;
- param->stack[6] = format.fmt.pix.colorspace;
- param->stack[7] = format.fmt.pix.priv;
- TRACE("Get the format: w:h(%dx%d), fmt(0x%x), size(%d), "
- "color(%d), field(%d)\n",
- format.fmt.pix.width, format.fmt.pix.height,
- format.fmt.pix.pixelformat, format.fmt.pix.sizeimage,
- format.fmt.pix.colorspace, format.fmt.pix.field);
+ state->ret_val = errno;
+ return;
}
+ memcpy(f, &(fmt.fmt.pix), sizeof(struct v4l2_pix_format));
+
+ TRACE("Get the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ fmt.fmt.pix.width, fmt.fmt.pix.height,
+ fmt.fmt.pix.pixelformat, fmt.fmt.pix.bytesperline,
+ fmt.fmt.pix.sizeimage, fmt.fmt.pix.colorspace);
}
-void marucam_device_try_fmt(MaruCamState *state)
+static void backend_v4l2_try_fmt(MaruCamState *state)
{
- struct v4l2_format format;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- CLEAR(format);
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- format.fmt.pix.width = param->stack[0];
- format.fmt.pix.height = param->stack[1];
- format.fmt.pix.pixelformat = param->stack[2];
- format.fmt.pix.field = V4L2_FIELD_ANY;
-
- if (xioctl(v4l2_fd, VIDIOC_TRY_FMT, &format) < 0) {
- ERR("Failed to check video format: format(0x%x), width:height(%d:%d),"
- " errstr(%s)\n", format.fmt.pix.pixelformat, format.fmt.pix.width,
- format.fmt.pix.height, strerror(errno));
- param->errCode = errno;
+ struct v4l2_format fmt;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
+
+ CLEAR(fmt);
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ memcpy(&(fmt.fmt.pix), f, sizeof(struct v4l2_pix_format));
+
+ if (xioctl(backend->fd, VIDIOC_TRY_FMT, &fmt) < 0) {
+ ERR("Failed to check video format: format(0x%x), w:h(%d:%d),"
+ " errstr(%s)\n", fmt.fmt.pix.pixelformat, fmt.fmt.pix.width,
+ fmt.fmt.pix.height, strerror(errno));
+ state->ret_val = errno;
return;
}
- param->stack[0] = format.fmt.pix.width;
- param->stack[1] = format.fmt.pix.height;
- param->stack[2] = format.fmt.pix.field;
- param->stack[3] = format.fmt.pix.pixelformat;
- param->stack[4] = format.fmt.pix.bytesperline;
- param->stack[5] = format.fmt.pix.sizeimage;
- param->stack[6] = format.fmt.pix.colorspace;
- param->stack[7] = format.fmt.pix.priv;
- TRACE("Check the format: w:h(%dx%d), fmt(0x%x), size(%d), "
- "color(%d), field(%d)\n",
- format.fmt.pix.width, format.fmt.pix.height,
- format.fmt.pix.pixelformat, format.fmt.pix.sizeimage,
- format.fmt.pix.colorspace, format.fmt.pix.field);
+ memcpy(f, &(fmt.fmt.pix), sizeof(struct v4l2_pix_format));
+
+ TRACE("Check the format: w:h(%dx%d), pix_fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ fmt.fmt.pix.width, fmt.fmt.pix.height,
+ fmt.fmt.pix.pixelformat, fmt.fmt.pix.bytesperline,
+ fmt.fmt.pix.sizeimage, fmt.fmt.pix.colorspace);
}
-void marucam_device_enum_fmt(MaruCamState *state)
+static void backend_v4l2_enum_fmt(MaruCamState *state)
{
- uint32_t index;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- index = param->stack[0];
+ struct v4l2_fmtdesc *f = (struct v4l2_fmtdesc *)state->io_ptr;
- if (index >= ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (f->index >= ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;
return;
}
- param->stack[1] = 0; /* flags = NONE */
- param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */
+
+ f->flags = 0; /* flags = NONE */
+ f->pixelformat = support_fmts[f->index]; /* pixelformat */
+
/* set description */
- switch (supported_dst_pixfmts[index].fmt) {
+ switch (support_fmts[f->index]) {
case V4L2_PIX_FMT_YUYV:
- strcpy((char *)¶m->stack[3], "YUYV");
+ pstrcpy((char *)f->description, sizeof(f->description), "YUYV");
break;
case V4L2_PIX_FMT_YUV420:
- strcpy((char *)¶m->stack[3], "YU12");
+ pstrcpy((char *)f->description, sizeof(f->description), "YU12");
break;
case V4L2_PIX_FMT_YVU420:
- strcpy((char *)¶m->stack[3], "YV12");
+ pstrcpy((char *)f->description, sizeof(f->description), "YV12");
break;
default:
ERR("Invalid fixel format\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;
break;
}
}
-void marucam_device_qctrl(MaruCamState *state)
+static void backend_v4l2_query_ctrl(MaruCamState *state)
{
uint32_t i;
- char name[32] = {0,};
- struct v4l2_queryctrl ctrl;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- CLEAR(ctrl);
- ctrl.id = param->stack[0];
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
+ struct v4l2_queryctrl *qc = (struct v4l2_queryctrl *)state->io_ptr;
/* NOTICE: Tizen MMFW hardcoded for control name
Do Not Modified the name
*/
- switch (ctrl.id) {
+ switch (qc->id) {
case V4L2_CID_BRIGHTNESS:
TRACE("Query : BRIGHTNESS\n");
- strcpy(name, "brightness");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "brightness");
i = 0;
break;
case V4L2_CID_CONTRAST:
TRACE("Query : CONTRAST\n");
- strcpy(name, "contrast");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "contrast");
i = 1;
break;
case V4L2_CID_SATURATION:
TRACE("Query : SATURATION\n");
- strcpy(name, "saturation");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "saturation");
i = 2;
break;
case V4L2_CID_SHARPNESS:
TRACE("Query : SHARPNESS\n");
- strcpy(name, "sharpness");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "sharpness");
i = 3;
break;
default:
ERR("Invalid control ID\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;
return;
}
- if (xioctl(v4l2_fd, VIDIOC_QUERYCTRL, &ctrl) < 0) {
+ if (xioctl(backend->fd, VIDIOC_QUERYCTRL, qc) < 0) {
if (errno != EINVAL) {
ERR("Failed to query video controls: %s\n", strerror(errno));
}
- param->errCode = errno;
+ state->ret_val = errno;
return;
} else {
struct v4l2_control sctrl, gctrl;
CLEAR(sctrl);
CLEAR(gctrl);
- sctrl.id = gctrl.id = ctrl.id;
- if (xioctl(v4l2_fd, VIDIOC_G_CTRL, &gctrl) < 0) {
+ sctrl.id = gctrl.id = qc->id;
+ if (xioctl(backend->fd, VIDIOC_G_CTRL, &gctrl) < 0) {
ERR("[%s] Failed to get video control value: id(0x%x), "
"errstr(%s)\n",
__func__, gctrl.id, strerror(errno));
- param->errCode = errno;
+ state->ret_val = errno;
return;
}
- qctrl_tbl[i].hit = 1;
- qctrl_tbl[i].min = ctrl.minimum;
- qctrl_tbl[i].max = ctrl.maximum;
- qctrl_tbl[i].step = ctrl.step;
- qctrl_tbl[i].init_val = gctrl.value;
+ ctrl_tbl[i].hit = 1;
+ ctrl_tbl[i].min = qc->minimum;
+ ctrl_tbl[i].max = qc->maximum;
+ ctrl_tbl[i].step = qc->step;
+ ctrl_tbl[i].init_val = gctrl.value;
- if ((ctrl.maximum + ctrl.minimum) == 0) {
+ if ((qc->maximum + qc->minimum) == 0) {
sctrl.value = 0;
} else {
- sctrl.value = (ctrl.maximum + ctrl.minimum) / 2;
+ sctrl.value = (qc->maximum + qc->minimum) / 2;
}
- if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &sctrl) < 0) {
+ if (xioctl(backend->fd, VIDIOC_S_CTRL, &sctrl) < 0) {
ERR("[%s] Failed to set control value: id(0x%x), value(%d), "
"errstr(%s)\n",
__func__, sctrl.id, sctrl.value, strerror(errno));
- param->errCode = errno;
+ state->ret_val = errno;
return;
}
- INFO("Query Control: id(0x%x), name(%s), min(%d), max(%d), "
- "step(%d), def_value(%d)\n"
- "flags(0x%x), get_value(%d), set_value(%d)\n",
- ctrl.id, ctrl.name, ctrl.minimum, ctrl.maximum,
- ctrl.step, ctrl.default_value, ctrl.flags,
+ INFO("Query Control: id(0x%x), name(%s), min(%d), max(%d), step(%d), "
+ "def_value(%d), flags(0x%x), get_value(%d), set_value(%d)\n",
+ qc->id, qc->name, qc->minimum, qc->maximum,
+ qc->step, qc->default_value, qc->flags,
gctrl.value, sctrl.value);
}
/* set fixed values by FW configuration file */
- param->stack[0] = ctrl.id;
- param->stack[1] = MARUCAM_CTRL_VALUE_MIN; /* minimum */
- param->stack[2] = MARUCAM_CTRL_VALUE_MAX; /* maximum */
- param->stack[3] = MARUCAM_CTRL_VALUE_STEP; /* step */
- param->stack[4] = MARUCAM_CTRL_VALUE_MID; /* default_value */
- param->stack[5] = ctrl.flags;
- /* name field setting */
- memcpy(¶m->stack[6], (void *)name, sizeof(ctrl.name));
+ qc->minimum = MARUCAM_CTRL_VALUE_MIN; /* minimum */
+ qc->maximum = MARUCAM_CTRL_VALUE_MAX; /* maximum */
+ qc->step = MARUCAM_CTRL_VALUE_STEP; /* step */
+ qc->default_value = MARUCAM_CTRL_VALUE_MID; /* default_value */
}
-void marucam_device_s_ctrl(MaruCamState *state)
+static void backend_v4l2_s_ctrl(MaruCamState *state)
{
uint32_t i;
- struct v4l2_control ctrl;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- CLEAR(ctrl);
- ctrl.id = param->stack[0];
+ struct v4l2_control *ctrl = (struct v4l2_control *)state->io_ptr;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
- switch (ctrl.id) {
+ switch (ctrl->id) {
case V4L2_CID_BRIGHTNESS:
i = 0;
- TRACE("%d is set to the value of the BRIGHTNESS\n", param->stack[1]);
+ TRACE("%d is set to the value of the BRIGHTNESS\n", ctrl->value);
break;
case V4L2_CID_CONTRAST:
i = 1;
- TRACE("%d is set to the value of the CONTRAST\n", param->stack[1]);
+ TRACE("%d is set to the value of the CONTRAST\n", ctrl->value);
break;
case V4L2_CID_SATURATION:
i = 2;
- TRACE("%d is set to the value of the SATURATION\n", param->stack[1]);
+ TRACE("%d is set to the value of the SATURATION\n", ctrl->value);
break;
case V4L2_CID_SHARPNESS:
i = 3;
- TRACE("%d is set to the value of the SHARPNESS\n", param->stack[1]);
+ TRACE("%d is set to the value of the SHARPNESS\n", ctrl->value);
break;
default:
- ERR("Our emulator does not support this control: 0x%x\n", ctrl.id);
- param->errCode = EINVAL;
+ ERR("Our emulator does not support this control: 0x%x\n", ctrl->id);
+ state->ret_val = EINVAL;
return;
}
- ctrl.value = value_convert_from_guest(qctrl_tbl[i].min,
- qctrl_tbl[i].max, param->stack[1]);
- if (xioctl(v4l2_fd, VIDIOC_S_CTRL, &ctrl) < 0) {
- ERR("Failed to set control value: id(0x%x), value(r:%d, c:%d), "
- "errstr(%s)\n", ctrl.id, param->stack[1], ctrl.value,
- strerror(errno));
- param->errCode = errno;
+ ctrl->value = value_convert_from_guest(ctrl_tbl[i].min,
+ ctrl_tbl[i].max, ctrl->value);
+ if (xioctl(backend->fd, VIDIOC_S_CTRL, ctrl) < 0) {
+ ERR("Failed to set control value: id(0x%x), value(%d), errstr(%s)\n",
+ ctrl->id, ctrl->value, strerror(errno));
+ state->ret_val = errno;
return;
}
}
-void marucam_device_g_ctrl(MaruCamState *state)
+static void backend_v4l2_g_ctrl(MaruCamState *state)
{
uint32_t i;
- struct v4l2_control ctrl;
- MaruCamParam *param = state->param;
+ struct v4l2_control *ctrl = (struct v4l2_control *)state->io_ptr;
+ MCBackendV4l2 *backend = (MCBackendV4l2 *)(state->backend);
- param->top = 0;
- CLEAR(ctrl);
- ctrl.id = param->stack[0];
-
- switch (ctrl.id) {
+ switch (ctrl->id) {
case V4L2_CID_BRIGHTNESS:
TRACE("Gets the value of the BRIGHTNESS\n");
i = 0;
i = 3;
break;
default:
- ERR("Our emulator does not support this control: 0x%x\n", ctrl.id);
- param->errCode = EINVAL;
+ ERR("Our emulator does not support this control: 0x%x\n", ctrl->id);
+ state->ret_val = EINVAL;
return;
}
- if (xioctl(v4l2_fd, VIDIOC_G_CTRL, &ctrl) < 0) {
+ if (xioctl(backend->fd, VIDIOC_G_CTRL, ctrl) < 0) {
ERR("Failed to get video control value: %s\n", strerror(errno));
- param->errCode = errno;
+ state->ret_val = errno;
return;
}
- param->stack[0] = value_convert_to_guest(qctrl_tbl[i].min,
- qctrl_tbl[i].max, ctrl.value);
- TRACE("Value: %d\n", param->stack[0]);
+ ctrl->value = value_convert_to_guest(ctrl_tbl[i].min,
+ ctrl_tbl[i].max, ctrl->value);
+ TRACE("Value: %d\n", ctrl->value);
}
-void marucam_device_enum_fsizes(MaruCamState *state)
+static void backend_v4l2_enum_fsizes(MaruCamState *state)
{
- uint32_t index, pixfmt, i;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- index = param->stack[0];
- pixfmt = param->stack[1];
+ struct v4l2_frmsizeenum *fsize =
+ (struct v4l2_frmsizeenum *)state->io_ptr;
+ uint32_t i;
- if (index >= ARRAY_SIZE(supported_dst_frames)) {
- param->errCode = EINVAL;
+ if (fsize->index >= ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;
return;
}
- for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
- if (supported_dst_pixfmts[i].fmt == pixfmt) {
+ for (i = 0; i < ARRAY_SIZE(support_fmts); i++) {
+ if (support_fmts[i] == fsize->pixel_format) {
break;
}
}
- if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (i == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;
return;
}
- param->stack[0] = supported_dst_frames[index].width;
- param->stack[1] = supported_dst_frames[index].height;
+ fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
+ fsize->discrete.width = support_frames[fsize->index].width;
+ fsize->discrete.height = support_frames[fsize->index].height;
}
-void marucam_device_enum_fintv(MaruCamState *state)
+static void backend_v4l2_enum_fintv(MaruCamState *state)
{
- MaruCamParam *param = state->param;
-
- param->top = 0;
+ struct v4l2_frmivalenum *fival =
+ (struct v4l2_frmivalenum *)state->io_ptr;
- /* switch by index(param->stack[0]) */
- switch (param->stack[0]) {
+ /* switch by index) */
+ switch (fival->index) {
case 0:
/* we only use 1/30 frame interval */
- param->stack[1] = 30; /* denominator */
+ fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
+ fival->discrete.numerator = 1;
+ fival->discrete.denominator = 30;
break;
default:
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;
return;
}
- param->stack[0] = 1; /* numerator */
}
-void marucam_device_close(MaruCamState *state)
+MaruCamBackend *marucam_backend_create(MaruCamState *state)
{
- if (!is_stream_paused(state)) {
- marucam_device_stop_preview(state);
- }
+ MCBackendV4l2 *backend_v4l2;
- marucam_reset_controls();
-
- if (saved_frame.data) {
- g_free(saved_frame.data);
- saved_frame.data = NULL;
+ backend_v4l2 = g_new0(MCBackendV4l2, 1);
+ if (backend_v4l2 == NULL) {
+ ERR("Failed to allocate memory for the native backend\n");
+ return NULL;
}
- memset(&saved_frame, 0x00, sizeof(saved_frame));
- if (v4l2_fd >= 0) {
- v4l2_close(v4l2_fd);
- v4l2_fd = -1;
- }
- INFO("Closed\n");
+ backend_v4l2->base.state = state;
+ backend_v4l2->base.init = backend_v4l2_init;
+ backend_v4l2->base.reset = backend_v4l2_reset;
+ backend_v4l2->base.release = backend_v4l2_release;
+ backend_v4l2->base.open = backend_v4l2_open;
+ backend_v4l2->base.close = backend_v4l2_close;
+ backend_v4l2->base.stream_on = backend_v4l2_stream_on;
+ backend_v4l2->base.stream_off = backend_v4l2_stream_off;
+ backend_v4l2->base.enum_fmt = backend_v4l2_enum_fmt;
+ backend_v4l2->base.try_fmt = backend_v4l2_try_fmt;
+ backend_v4l2->base.s_fmt = backend_v4l2_s_fmt;
+ backend_v4l2->base.g_fmt = backend_v4l2_g_fmt;
+ backend_v4l2->base.s_parm = backend_v4l2_s_parm;
+ backend_v4l2->base.g_parm = backend_v4l2_g_parm;
+ backend_v4l2->base.query_ctrl = backend_v4l2_query_ctrl;
+ backend_v4l2->base.s_ctrl = backend_v4l2_s_ctrl;
+ backend_v4l2->base.g_ctrl = backend_v4l2_g_ctrl;
+ backend_v4l2->base.enum_framesizes = backend_v4l2_enum_fsizes;
+ backend_v4l2->base.enum_frameintervals = backend_v4l2_enum_fintv;
+
+ /* set default values */
+ backend_v4l2->fd = -1;
+
+ return &backend_v4l2->base;
}
#define CINTERFACE
#define COBJMACROS
#include "ocidl.h"
+#include "objidl.h"
#include "errors.h" /* for VFW_E_XXXX */
#include "mmsystem.h" /* for MAKEFOURCC macro */
#include "maru_camera_win32.h"
+#include "videodev2_min.h"
+#include "maru_camera_convert.h"
MULTI_DEBUG_CHANNEL(tizen, camera);
} \
} while (0)
-typedef HRESULT (STDAPICALLTYPE *CallbackFn)(ULONG dwSize, BYTE *pBuffer);
+typedef HRESULT (STDAPICALLTYPE *CallbackFn)(MaruCamState *pState,
+ ULONG dwSize, BYTE *pBuffer);
/*
* HWCGrabCallback
typedef struct HWCGrabCallback {
IGrabCallback IGrabCallback_iface;
long m_cRef;
+ MaruCamState *m_pState;
CallbackFn m_pCallback;
- STDMETHODIMP (*SetCallback)(IGrabCallback *iface, CallbackFn pCallbackFn);
} HWCGrabCallback;
static inline HWCGrabCallback *impl_from_IGrabCallback(IGrabCallback *iface)
if (InterlockedDecrement(&This->m_cRef) == 0) {
This->m_pCallback = NULL;
+ This->m_pState = NULL;
g_free((void *)This);
This = NULL;
return 0;
HWCGrabCallback *This = impl_from_IGrabCallback(iface);
if (This->m_pCallback) {
- HRESULT hr = This->m_pCallback(dwSize, pBuffer);
+ HRESULT hr = This->m_pCallback(This->m_pState, dwSize, pBuffer);
if (FAILED(hr)) {
return E_FAIL;
} else {
return E_FAIL;
}
-static STDMETHODIMP HWCGrabCallback_SetCallback(IGrabCallback *iface,
- CallbackFn pCallbackFn)
-{
- HWCGrabCallback *This = impl_from_IGrabCallback(iface);
-
- This->m_pCallback = pCallbackFn;
- return S_OK;
-}
-
static IGrabCallbackVtbl HWCGrabCallback_Vtbl = {
HWCGrabCallback_QueryInterface,
HWCGrabCallback_AddRef,
This->IGrabCallback_iface.lpVtbl = &HWCGrabCallback_Vtbl;
This->m_cRef = 1;
This->m_pCallback = NULL;
- This->SetCallback = HWCGrabCallback_SetCallback;
+ This->m_pState = NULL;
*ppv = &This->IGrabCallback_iface;
return S_OK;
}
+static STDMETHODIMP HWCGrabCallback_SetCallback(IGrabCallback *iface,
+ CallbackFn pCallbackFn)
+{
+ HWCGrabCallback *This = impl_from_IGrabCallback(iface);
+
+ This->m_pCallback = pCallbackFn;
+ return S_OK;
+}
+
+static STDMETHODIMP HWCGrabCallback_SetState(IGrabCallback *iface,
+ MaruCamState *pState)
+{
+ HWCGrabCallback *This = impl_from_IGrabCallback(iface);
+
+ This->m_pState = pState;
+ return S_OK;
+}
+
/*
* HWCPin
*/
IMemAllocator *m_pAllocator;
BOOL m_bReadOnly;
long m_cRef;
- STDMETHODIMP (*SetGrabCallbackIF)(IPin *iface, IGrabCallback *pCaptureCB);
} HWCInPin;
static inline HWCInPin *impl_from_IPin(IPin *iface)
This->m_pCallback = NULL;
This->m_pAllocator = NULL;
This->m_cRef = 1;
- This->SetGrabCallbackIF = HWCPin_SetCallback;
*ppv = &This->IPin_iface;
return S_OK;
*
**********************************************************/
-
-/*
- * Declaration global variables for Win32 COM Interfaces
- */
-IGraphBuilder *g_pGB ;
-ICaptureGraphBuilder2 *g_pCGB;
-IMediaControl *g_pMediaControl;
-
-IPin *g_pOutputPin;
-IPin *g_pInputPin;
-IBaseFilter *g_pDstFilter;
-IBaseFilter *g_pSrcFilter;
-
-IGrabCallback *g_pCallback;
-
-typedef struct tagMaruCamConvertPixfmt {
- uint32_t fmt; /* fourcc */
- uint32_t bpp; /* bits per pixel, 0 for compressed formats */
- uint32_t needs_conversion;
-} MaruCamConvertPixfmt;
-
-static MaruCamConvertPixfmt supported_dst_pixfmts[] = {
- { V4L2_PIX_FMT_YUYV, 16, 0 },
- { V4L2_PIX_FMT_YUV420, 12, 0 },
- { V4L2_PIX_FMT_YVU420, 12, 0 },
+static uint32_t support_fmts[] = {
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YVU420,
};
-typedef struct tagMaruCamConvertFrameInfo {
+struct MCFrame {
uint32_t width;
uint32_t height;
-} MaruCamConvertFrameInfo;
-
-static MaruCamConvertFrameInfo supported_dst_frames[] = {
- { 640, 480 },
- { 352, 288 },
- { 320, 240 },
- { 176, 144 },
- { 160, 120 },
};
-#define MARUCAM_CTRL_VALUE_MAX 20
-#define MARUCAM_CTRL_VALUE_MIN 1
-#define MARUCAM_CTRL_VALUE_MID 10
-#define MARUCAM_CTRL_VALUE_STEP 1
+static struct MCFrame support_frames[] = {
+ { 640, 480 },
+ { 352, 288 },
+ { 320, 240 },
+ { 176, 144 },
+ { 160, 120 },
+};
-struct marucam_qctrl {
+struct MCControls {
uint32_t id;
uint32_t hit;
long min;
long init_val;
};
-static struct marucam_qctrl qctrl_tbl[] = {
+static struct MCControls ctrl_tbl[] = {
{ V4L2_CID_BRIGHTNESS, 0, },
{ V4L2_CID_CONTRAST, 0, },
{ V4L2_CID_SATURATION, 0, },
{ V4L2_CID_SHARPNESS, 0, },
};
-static MaruCamState *g_state;
-
-static uint32_t ready_count;
-static uint32_t cur_fmt_idx;
-static uint32_t cur_frame_idx;
-static void *grab_buf;
-static uint32_t g_dwSrcFmt;
+typedef struct MCBackendWin {
+ MaruCamBackend base;
+ /*
+ * Declaration variables for Win32 COM Interfaces
+ */
+ IGraphBuilder *pGB ;
+ ICaptureGraphBuilder2 *pCGB;
+ IMediaControl *pMC;
+ IPin *pOutPin;
+ IPin *pInPin;
+ IBaseFilter *pDstFilter;
+ IBaseFilter *pSrcFilter;
+ IGrabCallback *pCallback;
+
+ uint32_t ready_count;
+ uint32_t src_fmt;
+ uint32_t dst_width;
+ uint32_t dst_height;
+ uint32_t dst_fmt;
+ void *buf;
+} MCBackendWin;
/*
* Helper functions - converting values
/*
* Callback function for grab frames
*/
-static STDMETHODIMP marucam_device_callbackfn(ULONG dwSize, BYTE *pBuffer)
+static STDMETHODIMP GrabFrameCallback(MaruCamState *state,
+ ULONG dwSize, BYTE *pBuffer)
{
void *tmp_buf;
- uint32_t width, height, fmt, imgsize;
+ uint32_t imgsize;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
- fmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- imgsize = get_sizeimage(fmt, width, height);
+ imgsize = get_sizeimage(backend->dst_fmt,
+ backend->dst_width,
+ backend->dst_height);
if (imgsize > (uint32_t)dwSize) {
ERR("Image size is mismatched\n");
return E_FAIL;
}
- if (convert_frame(g_dwSrcFmt, fmt, width, height,
- (size_t)dwSize, pBuffer, grab_buf) > 0) {
+ if (convert_frame(backend->src_fmt, backend->dst_fmt,
+ backend->dst_width, backend->dst_height,
+ (size_t)dwSize, pBuffer, backend->buf, true) > 0) {
return E_FAIL;
}
- qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon) {
- if (ready_count < MARUCAM_SKIPFRAMES) {
+ qemu_mutex_lock(&state->thread_mutex);
+ if (state->streamon) {
+ if (backend->ready_count < MARUCAM_SKIPFRAMES) {
/* skip a frame cause first some frame are distorted */
- ++ready_count;
- TRACE("skip %d frame\n", ready_count);
- qemu_mutex_unlock(&g_state->thread_mutex);
+ ++backend->ready_count;
+ TRACE("skip %d frame\n", backend->ready_count);
+ qemu_mutex_unlock(&state->thread_mutex);
return S_OK;
}
- if (g_state->req_frame == 0) {
+ if (state->req_frame == 0) {
TRACE("there is no request\n");
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
return S_OK;
}
- tmp_buf = g_state->vaddr + g_state->buf_size * (g_state->req_frame - 1);
- memcpy(tmp_buf, grab_buf, g_state->buf_size);
- g_state->req_frame = 0; /* clear request */
- g_state->isr |= 0x01; /* set a flag of rasing a interrupt */
- qemu_bh_schedule(g_state->tx_bh);
+ tmp_buf = state->fb_ptr + state->buf_size * (state->req_frame - 1);
+ memcpy(tmp_buf, backend->buf, state->buf_size);
+ state->req_frame = 0; /* clear request */
+ state->isr |= 0x01; /* set a flag of rasing a interrupt */
+ qemu_bh_schedule(state->tx_bh);
}
- qemu_mutex_unlock(&g_state->thread_mutex);
+ qemu_mutex_unlock(&state->thread_mutex);
return S_OK;
}
* Internal functions for manipulate interfaces
*/
-static STDMETHODIMP_(void) CloseInterfaces(void)
+static STDMETHODIMP_(void) CloseInterfaces(MCBackendWin *backend)
{
- if (g_pMediaControl) {
- g_pMediaControl->lpVtbl->Stop(g_pMediaControl);
+ if (backend->pMC) {
+ IMediaControl_Stop(backend->pMC);
}
- if (g_pOutputPin) {
- g_pOutputPin->lpVtbl->Disconnect(g_pOutputPin);
+ if (backend->pOutPin) {
+ IPin_Disconnect(backend->pOutPin);
}
- SAFE_RELEASE(g_pGB);
- SAFE_RELEASE(g_pCGB);
- SAFE_RELEASE(g_pMediaControl);
- SAFE_RELEASE(g_pOutputPin);
- SAFE_RELEASE(g_pInputPin);
- SAFE_RELEASE(g_pDstFilter);
- SAFE_RELEASE(g_pSrcFilter);
- SAFE_RELEASE(g_pCallback);
+ SAFE_RELEASE(backend->pGB);
+ SAFE_RELEASE(backend->pCGB);
+ SAFE_RELEASE(backend->pMC);
+ SAFE_RELEASE(backend->pOutPin);
+ SAFE_RELEASE(backend->pInPin);
+ SAFE_RELEASE(backend->pDstFilter);
+ SAFE_RELEASE(backend->pSrcFilter);
+ SAFE_RELEASE(backend->pCallback);
}
static STDMETHODIMP_(void) DeleteMediaType(AM_MEDIA_TYPE *pmt)
pmt->pbFormat = NULL;
}
if (pmt->pUnk != NULL) {
- pmt->pUnk->lpVtbl->Release(pmt->pUnk);
+ IUnknown_Release(pmt->pUnk);
pmt->pUnk = NULL;
}
return E_POINTER;
}
- hr = pFilter->lpVtbl->EnumPins(pFilter, &pEnum);
+ hr = IBaseFilter_EnumPins(pFilter, &pEnum);
if (FAILED(hr)) {
return hr;
}
- while (pEnum->lpVtbl->Next(pEnum, 1, &pPin, 0) == S_OK) {
+ while (IEnumPins_Next(pEnum, 1, &pPin, 0) == S_OK) {
PIN_DIRECTION PinDirThis;
- hr = pPin->lpVtbl->QueryDirection(pPin, &PinDirThis);
+ hr = IPin_QueryDirection(pPin, &PinDirThis);
if (FAILED(hr)) {
SAFE_RELEASE(pPin);
SAFE_RELEASE(pEnum);
return S_FALSE;
}
-static STDMETHODIMP GraphBuilder_Init(void)
+static STDMETHODIMP GraphBuilder_Init(MCBackendWin *backend)
{
HRESULT hr;
hr = CoCreateInstance(&CLSID_FilterGraph, NULL, CLSCTX_INPROC,
- &IID_IGraphBuilder, (void **)&g_pGB);
+ &IID_IGraphBuilder, (void **)&backend->pGB);
if (FAILED(hr)) {
ERR("Failed to create instance of GraphBuilder, 0x%x\n", hr);
return hr;
}
hr = CoCreateInstance(&CLSID_CaptureGraphBuilder2, NULL, CLSCTX_INPROC,
- &IID_ICaptureGraphBuilder2, (void **)&g_pCGB);
+ &IID_ICaptureGraphBuilder2,
+ (void **)&backend->pCGB);
if (FAILED(hr)) {
ERR("Failed to create instance of CaptureGraphBuilder2, 0x%x\n", hr);
return hr;
}
- hr = g_pCGB->lpVtbl->SetFiltergraph(g_pCGB, g_pGB);
+ hr = ICaptureGraphBuilder2_SetFiltergraph(backend->pCGB, backend->pGB);
if (FAILED(hr)) {
ERR("Failed to SetFiltergraph, 0x%x\n", hr);
return hr;
}
- hr = g_pGB->lpVtbl->QueryInterface(g_pGB, &IID_IMediaControl,
- (void **)&g_pMediaControl);
+ hr = IGraphBuilder_QueryInterface(backend->pGB, &IID_IMediaControl,
+ (void **)&backend->pMC);
if (FAILED(hr)) {
ERR("Failed to QueryInterface for IMediaControl, 0x%x\n", hr);
return hr;
}
- hr = HWCGrabCallback_Construct(&g_pCallback);
- if (g_pCallback == NULL) {
+ hr = HWCGrabCallback_Construct(&backend->pCallback);
+ if (backend->pCallback == NULL) {
hr = E_OUTOFMEMORY;
+ return hr;
}
- hr = ((HWCGrabCallback *)g_pCallback)->SetCallback(g_pCallback,
- (CallbackFn)marucam_device_callbackfn);
+ hr = HWCGrabCallback_SetCallback(backend->pCallback, GrabFrameCallback);
+ hr = HWCGrabCallback_SetState(backend->pCallback, backend->base.state);
return hr;
}
-static STDMETHODIMP BindSourceFilter(void)
+static STDMETHODIMP BindSourceFilter(MCBackendWin *backend)
{
HRESULT hr;
ICreateDevEnum *pCreateDevEnum = NULL;
IEnumMoniker *pEnumMK = NULL;
- IMoniker *pMoniKer;
+ IMoniker *pMoniker;
hr = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
&IID_ICreateDevEnum,
return hr;
}
- hr = pCreateDevEnum->lpVtbl->CreateClassEnumerator(pCreateDevEnum,
- &CLSID_VideoInputDeviceCategory,
- &pEnumMK, 0);
+ hr = ICreateDevEnum_CreateClassEnumerator(pCreateDevEnum,
+ &CLSID_VideoInputDeviceCategory,
+ &pEnumMK, 0);
if (FAILED(hr)) {
ERR("Failed to get VideoInputDeviceCategory, 0x%x\n", hr);
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pCreateDevEnum);
return E_FAIL;
}
- pEnumMK->lpVtbl->Reset(pEnumMK);
+ IEnumMoniker_Reset(pEnumMK);
- hr = pEnumMK->lpVtbl->Next(pEnumMK, 1, &pMoniKer, NULL);
+ hr = IEnumMoniker_Next(pEnumMK, 1, &pMoniker, NULL);
if (hr == S_FALSE) {
hr = E_FAIL;
}
if (SUCCEEDED(hr)) {
IPropertyBag *pBag = NULL;
- hr = pMoniKer->lpVtbl->BindToStorage(pMoniKer, 0, 0,
- &IID_IPropertyBag,
- (void **)&pBag);
+ hr = IMoniker_BindToStorage(pMoniker, 0, 0,
+ &IID_IPropertyBag,
+ (void **)&pBag);
if (SUCCEEDED(hr)) {
VARIANT var;
var.vt = VT_BSTR;
- hr = pBag->lpVtbl->Read(pBag, L"FriendlyName", &var, NULL);
+ hr = IPropertyBag_Read(pBag, L"FriendlyName", &var, NULL);
if (hr == NOERROR) {
- hr = pMoniKer->lpVtbl->BindToObject(pMoniKer, NULL, NULL,
- &IID_IBaseFilter,
- (void **)&g_pSrcFilter);
+ hr = IMoniker_BindToObject(pMoniker, NULL, NULL,
+ &IID_IBaseFilter,
+ (void **)&backend->pSrcFilter);
if (FAILED(hr)) {
ERR("Counldn't bind moniker to filter object!!\n");
} else {
- g_pSrcFilter->lpVtbl->AddRef(g_pSrcFilter);
+ IBaseFilter_AddRef(backend->pSrcFilter);
}
SysFreeString(var.bstrVal);
}
SAFE_RELEASE(pBag);
}
- SAFE_RELEASE(pMoniKer);
+ SAFE_RELEASE(pMoniker);
}
if (SUCCEEDED(hr)) {
- hr = g_pGB->lpVtbl->AddFilter(g_pGB, g_pSrcFilter, L"Video Capture");
+ hr = IGraphBuilder_AddFilter(backend->pGB,
+ backend->pSrcFilter,
+ L"Video Capture");
if (hr != S_OK && hr != S_FALSE) {
ERR("Counldn't add Video Capture filter to our graph!\n");
- SAFE_RELEASE(g_pSrcFilter);
+ SAFE_RELEASE(backend->pSrcFilter);
}
}
SAFE_RELEASE(pEnumMK);
return hr;
}
-static STDMETHODIMP BindTargetFilter(void)
+static STDMETHODIMP BindTargetFilter(MCBackendWin *backend)
{
HRESULT hr;
- hr = HWCFilter_Construct(&g_pDstFilter);
+ hr = HWCFilter_Construct(&backend->pDstFilter);
- if (SUCCEEDED(hr) && g_pDstFilter) {
- hr = g_pGB->lpVtbl->AddFilter(g_pGB, g_pDstFilter, L"HWCFilter");
+ if (SUCCEEDED(hr) && backend->pDstFilter) {
+ hr = IGraphBuilder_AddFilter(backend->pGB,
+ backend->pDstFilter,
+ L"HWCFilter");
if (FAILED(hr)) {
ERR("Counldn't add HWCFilterr to our graph!\n");
- SAFE_RELEASE(g_pDstFilter);
+ SAFE_RELEASE(backend->pDstFilter);
}
}
return hr;
}
-static STDMETHODIMP ConnectFilters(void)
+static STDMETHODIMP ConnectFilters(MCBackendWin *backend)
{
HRESULT hr;
- hr = GetPin(g_pSrcFilter, PINDIR_OUTPUT , &g_pOutputPin);
+ hr = GetPin(backend->pSrcFilter, PINDIR_OUTPUT , &backend->pOutPin);
if (FAILED(hr)) {
ERR("Failed to get output pin. 0x%x\n", hr);
return hr;
}
- hr = GetPin(g_pDstFilter, PINDIR_INPUT , &g_pInputPin);
+ hr = GetPin(backend->pDstFilter, PINDIR_INPUT , &backend->pInPin);
if (FAILED(hr)) {
ERR("Failed to get input pin. 0x%x\n", hr);
return hr;
}
- hr = g_pGB->lpVtbl->Connect(g_pGB, g_pOutputPin, g_pInputPin);
+ hr = IGraphBuilder_Connect(backend->pGB, backend->pOutPin, backend->pInPin);
if (FAILED(hr)) {
ERR("Failed to connect pins. 0x%x\n", hr);
}
return hr;
}
-static STDMETHODIMP DisconnectPins(void)
+static STDMETHODIMP DisconnectPins(MCBackendWin *backend)
{
HRESULT hr;
- hr = g_pGB->lpVtbl->Disconnect(g_pGB, g_pOutputPin);
+ hr = IGraphBuilder_Disconnect(backend->pGB, backend->pOutPin);
if (FAILED(hr)) {
ERR("Failed to disconnect output pin. 0x%x\n", hr);
return hr;
}
- hr = g_pGB->lpVtbl->Disconnect(g_pGB, g_pInputPin);
+ hr = IGraphBuilder_Disconnect(backend->pGB, backend->pInPin);
if (FAILED(hr)) {
ERR("Failed to disconnect input pin. 0x%x\n", hr);
}
return hr;
}
-static STDMETHODIMP RemoveFilters(void)
+static STDMETHODIMP RemoveFilters(MCBackendWin *backend)
{
HRESULT hr;
- hr = g_pGB->lpVtbl->RemoveFilter(g_pGB, g_pSrcFilter);
+ hr = IGraphBuilder_RemoveFilter(backend->pGB, backend->pSrcFilter);
if (FAILED(hr)) {
ERR("Failed to remove source filer. 0x%x\n", hr);
return hr;
}
- hr = g_pGB->lpVtbl->RemoveFilter(g_pGB, g_pDstFilter);
+ hr = IGraphBuilder_RemoveFilter(backend->pGB, backend->pDstFilter);
if (FAILED(hr)) {
ERR("Failed to remove destination filer. 0x%x\n", hr);
}
/* default fps is 15 */
#define MARUCAM_DEFAULT_FRAMEINTERVAL 666666
-static STDMETHODIMP SetFormat(uint32_t dwWidth, uint32_t dwHeight,
- uint32_t dwDstFmt, uint32_t *dwSrcFmt)
+static STDMETHODIMP SetFormat(MCBackendWin *backend, uint32_t dwWidth,
+ uint32_t dwHeight, uint32_t dwDstFmt)
{
HRESULT hr;
IAMStreamConfig *pSConfig;
DWORD dwYUY2 = MAKEFOURCC('Y', 'U', 'Y', '2');
DWORD dwI420 = MAKEFOURCC('I', '4', '2', '0');
- if (dwSrcFmt == NULL) {
- ERR("invalid the source format pointer\n");
- return E_FAIL;
- }
-
- hr = g_pCGB->lpVtbl->FindInterface(g_pCGB, &PIN_CATEGORY_CAPTURE, 0,
- g_pSrcFilter, &IID_IAMStreamConfig,
- (void **)&pSConfig);
+ hr = ICaptureGraphBuilder2_FindInterface(backend->pCGB,
+ &PIN_CATEGORY_CAPTURE, 0,
+ backend->pSrcFilter,
+ &IID_IAMStreamConfig,
+ (void **)&pSConfig);
if (FAILED(hr)) {
ERR("failed to FindInterface method\n");
return hr;
}
- hr = pSConfig->lpVtbl->GetNumberOfCapabilities(pSConfig, &iCount, &iSize);
+ hr = IAMStreamConfig_GetNumberOfCapabilities(pSConfig, &iCount, &iSize);
if (FAILED(hr)) {
ERR("failed to GetNumberOfCapabilities method\n");
SAFE_RELEASE(pSConfig);
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
- hr = pSConfig->lpVtbl->GetStreamCaps(pSConfig, iFormat,
- &pmtConfig, (BYTE *)&scc);
+ hr = IAMStreamConfig_GetStreamCaps(pSConfig, iFormat,
+ &pmtConfig, (BYTE *)&scc);
if (hr == S_OK) {
if (IsEqualIID(&pmtConfig->formattype, &FORMAT_VideoInfo)) {
VIDEOINFOHEADER *pvi =
if ((pvi->bmiHeader.biWidth == (LONG)dwWidth) &&
(pvi->bmiHeader.biHeight == (LONG)dwHeight)) {
if (pvi->bmiHeader.biCompression == dwYUY2) {
- *dwSrcFmt = V4L2_PIX_FMT_YUYV;
+ backend->src_fmt = V4L2_PIX_FMT_YUYV;
} else if ((pvi->bmiHeader.biCompression == BI_RGB) &&
(pvi->bmiHeader.biBitCount == 24)) {
- *dwSrcFmt = V4L2_PIX_FMT_RGB24;
+ backend->src_fmt = V4L2_PIX_FMT_RGB24;
} else if (pvi->bmiHeader.biCompression == dwI420) {
- *dwSrcFmt = V4L2_PIX_FMT_YUV420;
+ backend->src_fmt = V4L2_PIX_FMT_YUV420;
} else { /* not support format */
DeleteMediaType(pmtConfig);
continue;
pvi->AvgTimePerFrame =
(REFERENCE_TIME)scc.MaxFrameInterval;
#endif
- hr = pSConfig->lpVtbl->SetFormat(pSConfig, pmtConfig);
+ hr = IAMStreamConfig_SetFormat(pSConfig, pmtConfig);
DeleteMediaType(pmtConfig);
break;
}
"resolution or image formats(YUY2, RGB24, I420).\n",
dwWidth, dwHeight);
hr = E_FAIL;
+ } else {
+ TRACE("Source Pixel format = %c%c%c%c\n",
+ (char)(backend->src_fmt),
+ (char)(backend->src_fmt >> 8),
+ (char)(backend->src_fmt >> 16),
+ (char)(backend->src_fmt >> 24));
}
}
SAFE_RELEASE(pSConfig);
return hr;
}
-static STDMETHODIMP QueryVideoProcAmp(long nProperty, long *pMin, long *pMax,
+static STDMETHODIMP QueryVideoProcAmp(MCBackendWin *backend, long nProperty,
+ long *pMin, long *pMax,
long *pStep, long *pDefault)
{
HRESULT hr;
long Flags;
IAMVideoProcAmp *pProcAmp = NULL;
- hr = g_pSrcFilter->lpVtbl->QueryInterface(g_pSrcFilter,
- &IID_IAMVideoProcAmp,
- (void **)&pProcAmp);
+ hr = IBaseFilter_QueryInterface(backend->pSrcFilter,
+ &IID_IAMVideoProcAmp,
+ (void **)&pProcAmp);
if (FAILED(hr)) {
return hr;
}
- hr = pProcAmp->lpVtbl->GetRange(pProcAmp, nProperty, pMin, pMax,
+ hr = IAMVideoProcAmp_GetRange(pProcAmp, nProperty, pMin, pMax,
pStep, pDefault, &Flags);
SAFE_RELEASE(pProcAmp);
return hr;
}
-static STDMETHODIMP GetVideoProcAmp(long nProperty, long *pValue)
+static STDMETHODIMP GetVideoProcAmp(MCBackendWin *backend,
+ long nProperty, long *pValue)
{
HRESULT hr;
long Flags;
IAMVideoProcAmp *pProcAmp = NULL;
- hr = g_pSrcFilter->lpVtbl->QueryInterface(g_pSrcFilter,
- &IID_IAMVideoProcAmp,
- (void **)&pProcAmp);
+ hr = IBaseFilter_QueryInterface(backend->pSrcFilter,
+ &IID_IAMVideoProcAmp,
+ (void **)&pProcAmp);
if (FAILED(hr)) {
return hr;
}
- hr = pProcAmp->lpVtbl->Get(pProcAmp, nProperty, pValue, &Flags);
+ hr = IAMVideoProcAmp_Get(pProcAmp, nProperty, pValue, &Flags);
if (FAILED(hr)) {
ERR("Failed to get property for video\n");
}
return hr;
}
-static STDMETHODIMP SetVideoProcAmp(long nProperty, long value)
+static STDMETHODIMP SetVideoProcAmp(MCBackendWin *backend,
+ long nProperty, long value)
{
HRESULT hr;
IAMVideoProcAmp *pProcAmp = NULL;
- hr = g_pSrcFilter->lpVtbl->QueryInterface(g_pSrcFilter,
- &IID_IAMVideoProcAmp,
- (void **)&pProcAmp);
+ hr = IBaseFilter_QueryInterface(backend->pSrcFilter,
+ &IID_IAMVideoProcAmp,
+ (void **)&pProcAmp);
if (FAILED(hr)) {
return hr;
}
- hr = pProcAmp->lpVtbl->Set(pProcAmp, nProperty, value,
+ hr = IAMVideoProcAmp_Set(pProcAmp, nProperty, value,
VideoProcAmp_Flags_Manual);
if (FAILED(hr)) {
ERR("Failed to set property for video\n");
return pstr;
}
-int marucam_device_check(int log_flag)
+int marucam_device_check(void)
{
struct timeval t1, t2;
int ret = 0;
ICaptureGraphBuilder2 *pCGB = NULL;
IBaseFilter *pSrcFilter = NULL;
IEnumMoniker *pEnumMK = NULL;
- IMoniker *pMoniKer = NULL;
+ IMoniker *pMoniker = NULL;
IAMStreamConfig *pSConfig = NULL;
int iCount = 0, iSize = 0;
gettimeofday(&t1, NULL);
hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
if (FAILED(hr)) {
- ERR("Failed to CoInitailizeEx\n");
+ INFO("Failed to CoInitailizeEx\n");
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
&IID_IGraphBuilder,
(void **)&pGB);
if (FAILED(hr)) {
- ERR("Failed to create GraphBuilder, 0x%x\n", hr);
+ INFO("Failed to create GraphBuilder, 0x%x\n", hr);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
&IID_ICaptureGraphBuilder2,
(void **)&pCGB);
if (FAILED(hr)) {
- ERR("Failed to create CaptureGraphBuilder2, 0x%x\n", hr);
+ INFO("Failed to create CaptureGraphBuilder2, 0x%x\n", hr);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
hr = pCGB->lpVtbl->SetFiltergraph(pCGB, pGB);
if (FAILED(hr)) {
- ERR("Failed to SetFiltergraph, 0x%x\n", hr);
+ INFO("Failed to SetFiltergraph, 0x%x\n", hr);
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
&IID_ICreateDevEnum,
(void **)&pCreateDevEnum);
if (FAILED(hr)) {
- ERR("Failed to create instance of CLSID_SystemDeviceEnum\n");
+ INFO("Failed to create instance of CLSID_SystemDeviceEnum\n");
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
- hr = pCreateDevEnum->lpVtbl->CreateClassEnumerator(pCreateDevEnum,
+ hr = ICreateDevEnum_CreateClassEnumerator(pCreateDevEnum,
&CLSID_VideoInputDeviceCategory, &pEnumMK, 0);
if (FAILED(hr)) {
- ERR("Failed to create class enumerator\n");
+ INFO("Failed to create class enumerator\n");
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
if (!pEnumMK) {
- ERR("Class enumerator is NULL!!\n");
+ INFO("Class enumerator is NULL!!\n");
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
- pEnumMK->lpVtbl->Reset(pEnumMK);
+ IEnumMoniker_Reset(pEnumMK);
- hr = pEnumMK->lpVtbl->Next(pEnumMK, 1, &pMoniKer, NULL);
+ hr = IEnumMoniker_Next(pEnumMK, 1, &pMoniker, NULL);
if (FAILED(hr) || (hr == S_FALSE)) {
- ERR("Enum moniker returns a invalid value.\n");
+ INFO("Enum moniker returns a invalid value.\n");
SAFE_RELEASE(pEnumMK);
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
IPropertyBag *pBag = NULL;
- hr = pMoniKer->lpVtbl->BindToStorage(pMoniKer, 0, 0,
- &IID_IPropertyBag,
- (void **)&pBag);
+ hr = IMoniker_BindToStorage(pMoniker, 0, 0,
+ &IID_IPropertyBag,
+ (void **)&pBag);
if (FAILED(hr)) {
- ERR("Failed to bind to storage.\n");
+ INFO("Failed to bind to storage.\n");
SAFE_RELEASE(pEnumMK);
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
} else {
VARIANT var;
var.vt = VT_BSTR;
- hr = pBag->lpVtbl->Read(pBag, L"FriendlyName", &var, NULL);
+ hr = IPropertyBag_Read(pBag, L"FriendlyName", &var, NULL);
if (hr == S_OK) {
ret = 1;
- if (!log_flag) {
- SysFreeString(var.bstrVal);
- SAFE_RELEASE(pBag);
- SAFE_RELEASE(pMoniKer);
- SAFE_RELEASE(pEnumMK);
- SAFE_RELEASE(pCreateDevEnum);
- SAFE_RELEASE(pCGB);
- SAFE_RELEASE(pGB);
- CoUninitialize();
- gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
- t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
- return ret;
- }
device_name = __wchar_to_char(var.bstrVal);
INFO("Device name : %s\n", device_name);
g_free(device_name);
- hr = pMoniKer->lpVtbl->BindToObject(pMoniKer, NULL, NULL,
- &IID_IBaseFilter,
- (void **)&pSrcFilter);
+ hr = IMoniker_BindToObject(pMoniker, NULL, NULL,
+ &IID_IBaseFilter,
+ (void **)&pSrcFilter);
if (FAILED(hr)) {
- ERR("Counldn't bind moniker to filter object!!\n");
+ INFO("Counldn't bind moniker to filter object!!\n");
SysFreeString(var.bstrVal);
SAFE_RELEASE(pBag);
- SAFE_RELEASE(pMoniKer);
+ SAFE_RELEASE(pMoniker);
SAFE_RELEASE(pEnumMK);
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pCGB);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
} else {
- pSrcFilter->lpVtbl->AddRef(pSrcFilter);
+ IBaseFilter_AddRef(pSrcFilter);
}
SysFreeString(var.bstrVal);
}
SAFE_RELEASE(pBag);
}
- SAFE_RELEASE(pMoniKer);
+ SAFE_RELEASE(pMoniker);
- hr = pGB->lpVtbl->AddFilter(pGB, pSrcFilter, L"Video Capture");
+ hr = IGraphBuilder_AddFilter(pGB, pSrcFilter, L"Video Capture");
if (hr != S_OK && hr != S_FALSE) {
- ERR("Counldn't add Video Capture filter to our graph!\n");
+ INFO("Counldn't add Video Capture filter to our graph!\n");
SAFE_RELEASE(pSrcFilter);
SAFE_RELEASE(pEnumMK);
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
- hr = pCGB->lpVtbl->FindInterface(pCGB, &PIN_CATEGORY_CAPTURE, 0,
- pSrcFilter, &IID_IAMStreamConfig,
- (void **)&pSConfig);
+ hr = ICaptureGraphBuilder2_FindInterface(pCGB, &PIN_CATEGORY_CAPTURE, 0,
+ pSrcFilter, &IID_IAMStreamConfig,
+ (void **)&pSConfig);
if (FAILED(hr)) {
- ERR("Failed to FindInterface method\n");
+ INFO("Failed to FindInterface method\n");
SAFE_RELEASE(pSrcFilter);
SAFE_RELEASE(pEnumMK);
SAFE_RELEASE(pCreateDevEnum);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
- hr = pSConfig->lpVtbl->GetNumberOfCapabilities(pSConfig, &iCount, &iSize);
+ hr = IAMStreamConfig_GetNumberOfCapabilities(pSConfig, &iCount, &iSize);
if (FAILED(hr)) {
- ERR("Failed to GetNumberOfCapabilities method\n");
+ INFO("Failed to GetNumberOfCapabilities method\n");
SAFE_RELEASE(pSConfig);
SAFE_RELEASE(pSrcFilter);
SAFE_RELEASE(pEnumMK);
SAFE_RELEASE(pGB);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
VIDEO_STREAM_CONFIG_CAPS scc;
AM_MEDIA_TYPE *pmtConfig;
- hr = pSConfig->lpVtbl->GetStreamCaps(pSConfig, iFormat, &pmtConfig,
- (BYTE *)&scc);
+ hr = IAMStreamConfig_GetStreamCaps(pSConfig, iFormat, &pmtConfig,
+ (BYTE *)&scc);
if (hr == S_OK) {
if (IsEqualIID(&pmtConfig->formattype, &FORMAT_VideoInfo)) {
VIDEOINFOHEADER *pvi =
}
}
- hr = pGB->lpVtbl->RemoveFilter(pGB, pSrcFilter);
+ hr = IGraphBuilder_RemoveFilter(pGB, pSrcFilter);
if (FAILED(hr)) {
- ERR("Failed to remove source filer. 0x%x\n", hr);
+ INFO("Failed to remove source filer. 0x%x\n", hr);
}
SAFE_RELEASE(pSConfig);
SAFE_RELEASE(pCreateDevEnum);
CoUninitialize();
gettimeofday(&t2, NULL);
- ERR("Elapsed time : %lu.%06lu\n",
+ INFO("Elapsed time : %lu.%06lu\n",
t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);
return ret;
}
-/* MARUCAM_CMD_INIT */
-void marucam_device_init(MaruCamState *state)
+static void backend_win_init(MaruCamState *state)
{
- g_state = state;
+ state->initialized = true;
}
-void marucam_device_exit(MaruCamState *state)
+static void backend_win_reset(MaruCamState *state)
{
+ state->backend->close(state);
}
-/* MARUCAM_CMD_OPEN */
-void marucam_device_open(MaruCamState *state)
+static void backend_win_release(MaruCamState *state)
+{
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
+ g_free(backend);
+ backend = NULL;
+}
+
+static void backend_win_open(MaruCamState *state)
{
HRESULT hr;
- uint32_t dwHeight, dwWidth, dwDstFmt;
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
if (FAILED(hr)) {
- ERR("CoInitailizeEx\n");
- ERR("camera device open failed!!!, [HRESULT : 0x%x]\n", hr);
- param->errCode = EINVAL;
+ ERR("CoInitailizeEx failure\n");
+ ERR("Device open failed: HRESULT(0x%x)\n", hr);
+ state->ret_val = EINVAL;;
return;
}
- hr = GraphBuilder_Init();
+ hr = GraphBuilder_Init(backend);
if (FAILED(hr)) {
ERR("GraphBuilder_Init\n");
- DisconnectPins();
- RemoveFilters();
- CloseInterfaces();
+ ERR("Device open failed: HRESULT(0x%x)\n", hr);
+ DisconnectPins(backend);
+ RemoveFilters(backend);
+ CloseInterfaces(backend);
CoUninitialize();
- param->errCode = EINVAL;
- ERR("camera device open failed!!!, [HRESULT : 0x%x]\n", hr);
+ state->ret_val = EINVAL;;
return;
}
- hr = BindSourceFilter();
+ hr = BindSourceFilter(backend);
if (FAILED(hr)) {
ERR("BindSourceFilter\n");
- DisconnectPins();
- RemoveFilters();
- CloseInterfaces();
+ ERR("Device open failed: HRESULT(0x%x)\n", hr);
+ DisconnectPins(backend);
+ RemoveFilters(backend);
+ CloseInterfaces(backend);
CoUninitialize();
- param->errCode = EINVAL;
- ERR("camera device open failed!!!, [HRESULT : 0x%x]\n", hr);
+ state->ret_val = EINVAL;;
return;
}
- hr = BindTargetFilter();
+ hr = BindTargetFilter(backend);
if (FAILED(hr)) {
ERR("BindTargetFilter\n");
- DisconnectPins();
- RemoveFilters();
- CloseInterfaces();
+ ERR("Device open failed: HRESULT(0x%x)\n", hr);
+ DisconnectPins(backend);
+ RemoveFilters(backend);
+ CloseInterfaces(backend);
CoUninitialize();
- param->errCode = EINVAL;
- ERR("camera device open failed!!!, [HRESULT : 0x%x]\n", hr);
+ state->ret_val = EINVAL;;
return;
}
- hr = ConnectFilters();
+ hr = ConnectFilters(backend);
if (FAILED(hr)) {
ERR("ConnectFilters\n");
- DisconnectPins();
- RemoveFilters();
- CloseInterfaces();
+ ERR("Device open failed: HRESULT(0x%x)\n", hr);
+ DisconnectPins(backend);
+ RemoveFilters(backend);
+ CloseInterfaces(backend);
CoUninitialize();
- param->errCode = EINVAL;
- ERR("camera device open failed!!!, [HRESULT : 0x%x]\n", hr);
+ state->ret_val = EINVAL;;
return;
}
- cur_frame_idx = 0;
- cur_fmt_idx = 0;
-
- dwHeight = supported_dst_frames[cur_frame_idx].height;
- dwWidth = supported_dst_frames[cur_frame_idx].width;
- dwDstFmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- hr = SetFormat(dwWidth, dwHeight, dwDstFmt, &g_dwSrcFmt);
+ /* Set default values, TODO: can be smart? */
+ backend->dst_height = support_frames[0].height;
+ backend->dst_width = support_frames[0].width;
+ backend->dst_fmt = support_fmts[0];
+ hr = SetFormat(backend, backend->dst_width, backend->dst_height,
+ backend->dst_fmt);
if (hr != S_OK) {
ERR("failed to Set default values\n");
- DisconnectPins();
- RemoveFilters();
- CloseInterfaces();
+ ERR("Device open failed: HRESULT(0x%x)\n", hr);
+ DisconnectPins(backend);
+ RemoveFilters(backend);
+ CloseInterfaces(backend);
CoUninitialize();
- param->errCode = EINVAL;
- ERR("camera device open failed!!!, [HRESULT : 0x%x]\n", hr);
+ state->ret_val = EINVAL;;
return;
}
return;
}
-/* MARUCAM_CMD_CLOSE */
-void marucam_device_close(MaruCamState *state)
+static void backend_win_close(MaruCamState *state)
{
- MaruCamParam *param = state->param;
- int ret = 0;
- param->top = 0;
+ uint32_t ret;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
qemu_mutex_lock(&state->thread_mutex);
ret = state->streamon;
qemu_mutex_unlock(&state->thread_mutex);
if (ret) {
- marucam_device_stop_preview(state);
+ state->backend->stream_off(state);
}
- if (g_pGB) {
- DisconnectPins();
- RemoveFilters();
+ if (backend->pGB) {
+ DisconnectPins(backend);
+ RemoveFilters(backend);
}
- CloseInterfaces();
+ CloseInterfaces(backend);
CoUninitialize();
INFO("Closed\n");
}
-/* MARUCAM_CMD_START_PREVIEW */
-void marucam_device_start_preview(MaruCamState *state)
+static void backend_win_stream_on(MaruCamState *state)
{
HRESULT hr;
uint32_t pixfmt, width, height;
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
- ready_count = 0;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
+ backend->ready_count = 0;
+ width = backend->dst_width;
+ height = backend->dst_height;
+ pixfmt = backend->dst_fmt;
state->buf_size = get_sizeimage(pixfmt, width, height);
INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u)\n",
width, height, state->buf_size);
INFO("Starting preview\n");
- assert(g_pCallback != NULL);
- hr = ((HWCInPin *)g_pInputPin)->SetGrabCallbackIF(g_pInputPin,
- g_pCallback);
+ assert(backend->pCallback != NULL);
+ hr = HWCPin_SetCallback(backend->pInPin, backend->pCallback);
if (FAILED(hr)) {
ERR("Failed to set IGrabCallback interface.\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;;
return;
}
- if (grab_buf) {
- g_free(grab_buf);
- grab_buf = NULL;
+ if (backend->buf) {
+ g_free(backend->buf);
+ backend->buf = NULL;
}
- grab_buf = (void *)g_malloc0(state->buf_size);
- if (grab_buf == NULL) {
- param->errCode = ENOMEM;
+ backend->buf = (void *)g_malloc0(state->buf_size);
+ if (backend->buf == NULL) {
+ state->ret_val = ENOMEM;
return;
}
- hr = g_pMediaControl->lpVtbl->Run(g_pMediaControl);
+ hr = IMediaControl_Run(backend->pMC);
if (FAILED(hr)) {
ERR("Failed to run media control. hr=0x%x\n", hr);
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;;
return;
}
INFO("Streaming on ......\n");
}
-/* MARUCAM_CMD_STOP_PREVIEW */
-void marucam_device_stop_preview(MaruCamState *state)
+static void backend_win_stream_off(MaruCamState *state)
{
HRESULT hr;
- MaruCamParam *param = state->param;
- param->top = 0;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
INFO("...... Streaming off\n");
qemu_mutex_lock(&state->thread_mutex);
state->streamon = 0;
qemu_mutex_unlock(&state->thread_mutex);
- hr = ((HWCInPin *)g_pInputPin)->SetGrabCallbackIF(g_pInputPin, NULL);
+ hr = HWCPin_SetCallback(backend->pInPin, NULL);
if (FAILED(hr)) {
ERR("Failed to set IGrabCallback interface.\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;;
return;
}
- hr = g_pMediaControl->lpVtbl->Stop(g_pMediaControl);
+ hr = backend->pMC->lpVtbl->Stop(backend->pMC);
if (FAILED(hr)) {
ERR("Failed to stop media control.\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;;
return;
}
- if (grab_buf) {
- g_free(grab_buf);
- grab_buf = NULL;
+ if (backend->buf) {
+ g_free(backend->buf);
+ backend->buf = NULL;
}
state->buf_size = 0;
INFO("Stopping preview\n");
}
-/* MARUCAM_CMD_S_PARAM */
-void marucam_device_s_param(MaruCamState *state)
+static void backend_win_s_parm(MaruCamState *state)
{
- MaruCamParam *param = state->param;
-
/* We use default FPS of the webcam */
- param->top = 0;
}
-/* MARUCAM_CMD_G_PARAM */
-void marucam_device_g_param(MaruCamState *state)
+static void backend_win_g_parm(MaruCamState *state)
{
- MaruCamParam *param = state->param;
-
+ struct v4l2_captureparm *cp =
+ (struct v4l2_captureparm *)state->io_ptr;
/* We use default FPS of the webcam
* return a fixed value on guest ini file (1/30).
*/
- param->top = 0;
- param->stack[0] = 0x1000; /* V4L2_CAP_TIMEPERFRAME */
- param->stack[1] = 1; /* numerator */
- param->stack[2] = 30; /* denominator */
+ cp->capability = V4L2_CAP_TIMEPERFRAME;
+ cp->timeperframe.numerator = 1;
+ cp->timeperframe.denominator = 30;
}
-/* MARUCAM_CMD_S_FMT */
-void marucam_device_s_fmt(MaruCamState *state)
+static void backend_win_s_fmt(MaruCamState *state)
{
- uint32_t width, height, pixfmt, pidx, fidx;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- width = param->stack[0];
- height = param->stack[1];
- pixfmt = param->stack[2];
+ uint32_t pidx, fidx;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- for (fidx = 0; fidx < ARRAY_SIZE(supported_dst_frames); fidx++) {
- if ((supported_dst_frames[fidx].width == width) &&
- (supported_dst_frames[fidx].height == height)) {
+ for (fidx = 0; fidx < ARRAY_SIZE(support_frames); fidx++) {
+ if ((support_frames[fidx].width == f->width) &&
+ (support_frames[fidx].height == f->height)) {
break;
}
}
- if (fidx == ARRAY_SIZE(supported_dst_frames)) {
- param->errCode = EINVAL;
+ if (fidx == ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;;
return;
}
- for (pidx = 0; pidx < ARRAY_SIZE(supported_dst_pixfmts); pidx++) {
- if (supported_dst_pixfmts[pidx].fmt == pixfmt) {
+ for (pidx = 0; pidx < ARRAY_SIZE(support_fmts); pidx++) {
+ if (support_fmts[pidx] == f->pixelformat) {
break;
}
}
- if (pidx == ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (pidx == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;;
return;
}
- if ((supported_dst_frames[cur_frame_idx].width != width) &&
- (supported_dst_frames[cur_frame_idx].height != height)) {
- HRESULT hr = SetFormat(width, height, pixfmt, &g_dwSrcFmt);
+ if ((backend->dst_width != f->width) &&
+ (backend->dst_height != f->height)) {
+ HRESULT hr = SetFormat(backend, f->width, f->height, f->pixelformat);
if (FAILED(hr)) {
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;
return;
}
}
- cur_frame_idx = fidx;
- cur_fmt_idx = pidx;
+ f->width = backend->dst_width = support_frames[fidx].width;
+ f->height = backend->dst_height = support_frames[fidx].height;
+ f->pixelformat = backend->dst_fmt = support_fmts[pidx];
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
+ f->field = V4L2_FIELD_NONE;
+ f->bytesperline = get_bytesperline(backend->dst_fmt,
+ backend->dst_width);
+ f->sizeimage = get_sizeimage(backend->dst_fmt,
+ backend->dst_width,
+ backend->dst_height);
+ f->colorspace = 0;
+ f->priv = 0;
- param->stack[0] = width;
- param->stack[1] = height;
- param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
- param->stack[6] = 0;
- param->stack[7] = 0;
-
- TRACE("Set format...\n");
+ TRACE("Set the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ f->width, f->height, f->pixelformat, f->bytesperline,
+ f->sizeimage, f->colorspace);
}
-/* MARUCAM_CMD_G_FMT */
-void marucam_device_g_fmt(MaruCamState *state)
+static void backend_win_g_fmt(MaruCamState *state)
{
- uint32_t width, height, pixfmt;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- param->stack[0] = width;
- param->stack[1] = height;
- param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
- param->stack[6] = 0;
- param->stack[7] = 0;
+ f->width = backend->dst_width;
+ f->height = backend->dst_height;
+ f->pixelformat = backend->dst_fmt;
+ f->field = V4L2_FIELD_NONE;
+ f->bytesperline = get_bytesperline(backend->dst_fmt,
+ backend->dst_width);
+ f->sizeimage = get_sizeimage(backend->dst_fmt,
+ backend->dst_width,
+ backend->dst_height);
+ f->colorspace = 0;
+ f->priv = 0;
- TRACE("Get format...\n");
+ TRACE("Get the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ f->width, f->height, f->pixelformat, f->bytesperline,
+ f->sizeimage, f->colorspace);
}
-void marucam_device_try_fmt(MaruCamState *state)
+static void backend_win_try_fmt(MaruCamState *state)
{
- uint32_t width, height, pixfmt, i;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- width = param->stack[0];
- height = param->stack[1];
- pixfmt = param->stack[2];
+ uint32_t i;
+ struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- for (i = 0; i < ARRAY_SIZE(supported_dst_frames); i++) {
- if ((supported_dst_frames[i].width == width) &&
- (supported_dst_frames[i].height == height)) {
+ for (i = 0; i < ARRAY_SIZE(support_frames); i++) {
+ if ((support_frames[i].width == f->width) &&
+ (support_frames[i].height == f->height)) {
break;
}
}
- if (i == ARRAY_SIZE(supported_dst_frames)) {
- param->errCode = EINVAL;
+ if (i == ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;;
return;
}
- for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
- if (supported_dst_pixfmts[i].fmt == pixfmt) {
+ for (i = 0; i < ARRAY_SIZE(support_fmts); i++) {
+ if (support_fmts[i] == f->pixelformat) {
break;
}
}
- if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (i == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;;
return;
}
- param->stack[0] = width;
- param->stack[1] = height;
- param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
- param->stack[6] = 0;
- param->stack[7] = 0;
+ f->field = V4L2_FIELD_NONE;
+ f->bytesperline = get_bytesperline(f->pixelformat,
+ f->width);
+ f->sizeimage = get_sizeimage(f->pixelformat,
+ f->width,
+ f->height);
+ f->colorspace = 0;
+ f->priv = 0;
+
+ TRACE("Check the format: w:h(%dx%d), pix_fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ f->width, f->height, f->pixelformat, f->bytesperline,
+ f->sizeimage, f->colorspace);
}
-void marucam_device_enum_fmt(MaruCamState *state)
+static void backend_win_enum_fmt(MaruCamState *state)
{
- uint32_t index;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- index = param->stack[0];
+ struct v4l2_fmtdesc *f = (struct v4l2_fmtdesc *)state->io_ptr;
- if (index >= ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (f->index >= ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;;
return;
}
- param->stack[1] = 0; /* flags = NONE */
- param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */
+
+ f->flags = 0; /* flags = NONE */
+ f->pixelformat = support_fmts[f->index];
+
/* set description */
- switch (supported_dst_pixfmts[index].fmt) {
+ switch (support_fmts[f->index]) {
case V4L2_PIX_FMT_YUYV:
- memcpy(¶m->stack[3], "YUYV", 32);
+ pstrcpy((char *)f->description, sizeof(f->description), "YUYV");
break;
case V4L2_PIX_FMT_YUV420:
- memcpy(¶m->stack[3], "YU12", 32);
+ pstrcpy((char *)f->description, sizeof(f->description), "YU12");
break;
case V4L2_PIX_FMT_YVU420:
- memcpy(¶m->stack[3], "YV12", 32);
+ pstrcpy((char *)f->description, sizeof(f->description), "YV12");
break;
default:
- ERR("Invalid pixel format\n");
- param->errCode = EINVAL;
+ ERR("Invalid fixel format\n");
+ state->ret_val = EINVAL;;
break;
}
}
-void marucam_device_qctrl(MaruCamState *state)
+static void backend_win_query_ctrl(MaruCamState *state)
{
HRESULT hr;
- uint32_t id, i;
+ uint32_t i;
long property, min, max, step, def_val, set_val;
- char name[32] = {0,};
- MaruCamParam *param = state->param;
-
- param->top = 0;
- id = param->stack[0];
+ struct v4l2_queryctrl *qc = (struct v4l2_queryctrl *)state->io_ptr;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
- switch (id) {
+ /* NOTICE: Tizen MMFW hardcoded for control name
+ Do Not Modified the name
+ */
+ switch (qc->id) {
case V4L2_CID_BRIGHTNESS:
- TRACE("V4L2_CID_BRIGHTNESS\n");
- property = VideoProcAmp_Brightness;
- memcpy((void *)name, (void *)"brightness", 32);
+ TRACE("Query : BRIGHTNESS\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "brightness");
i = 0;
+ property = VideoProcAmp_Brightness;
break;
case V4L2_CID_CONTRAST:
- TRACE("V4L2_CID_CONTRAST\n");
- property = VideoProcAmp_Contrast;
- memcpy((void *)name, (void *)"contrast", 32);
+ TRACE("Query : CONTRAST\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "contrast");
i = 1;
+ property = VideoProcAmp_Contrast;
break;
case V4L2_CID_SATURATION:
- TRACE("V4L2_CID_SATURATION\n");
- property = VideoProcAmp_Saturation;
- memcpy((void *)name, (void *)"saturation", 32);
+ TRACE("Query : SATURATION\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "saturation");
i = 2;
+ property = VideoProcAmp_Saturation;
break;
case V4L2_CID_SHARPNESS:
- TRACE("V4L2_CID_SHARPNESS\n");
- property = VideoProcAmp_Sharpness;
- memcpy((void *)name, (void *)"sharpness", 32);
+ TRACE("Query : SHARPNESS\n");
+ pstrcpy((char *)qc->name, sizeof(qc->name), "sharpness");
i = 3;
+ property = VideoProcAmp_Sharpness;
break;
default:
ERR("Invalid control ID\n");
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;;
return;
}
- hr = QueryVideoProcAmp(property, &min, &max, &step, &def_val);
+ hr = QueryVideoProcAmp(backend, property, &min, &max, &step, &def_val);
if (FAILED(hr)) {
- param->errCode = EINVAL;
- ERR("failed to query video controls [HRESULT : 0x%x]\n", hr);
+ ERR("Failed to query video controls: HRESULT(0x%x)\n", hr);
+ state->ret_val = EINVAL;;
return;
} else {
- qctrl_tbl[i].hit = 1;
- qctrl_tbl[i].min = min;
- qctrl_tbl[i].max = max;
- qctrl_tbl[i].step = step;
- qctrl_tbl[i].init_val = def_val;
+ ctrl_tbl[i].hit = 1;
+ ctrl_tbl[i].min = min;
+ ctrl_tbl[i].max = max;
+ ctrl_tbl[i].step = step;
+ ctrl_tbl[i].init_val = def_val;
- if ((qctrl_tbl[i].min + qctrl_tbl[i].max) == 0) {
+ if ((ctrl_tbl[i].min + ctrl_tbl[i].max) == 0) {
set_val = 0;
} else {
- set_val = (qctrl_tbl[i].min + qctrl_tbl[i].max) / 2;
+ set_val = (ctrl_tbl[i].min + ctrl_tbl[i].max) / 2;
}
- hr = SetVideoProcAmp(property, set_val);
+ hr = SetVideoProcAmp(backend, property, set_val);
if (FAILED(hr)) {
- param->errCode = EINVAL;
- ERR("failed to set video control value, [HRESULT : 0x%x]\n", hr);
+ ERR("Failed to set video control value when query controls: "
+ "HRESULT(0x%x)\n", hr);
+ state->ret_val = EINVAL;
return;
}
+ INFO("Query Control: id(0x%x), name(%s), "
+ "min(%l), max(%l), step(%l), "
+ "def_value(%l), set_value(%l)\n",
+ qc->id, qc->name, min, max,
+ step, def_val, set_val);
}
- param->stack[0] = id;
- param->stack[1] = MARUCAM_CTRL_VALUE_MIN; /* minimum */
- param->stack[2] = MARUCAM_CTRL_VALUE_MAX; /* maximum */
- param->stack[3] = MARUCAM_CTRL_VALUE_STEP; /* step */
- param->stack[4] = MARUCAM_CTRL_VALUE_MID; /* default_value */
- param->stack[5] = V4L2_CTRL_FLAG_SLIDER;
- /* name field setting */
- memcpy(¶m->stack[6], (void *)name, sizeof(name)/sizeof(name[0]));
+ /* set fixed values by FW configuration file */
+ qc->minimum = MARUCAM_CTRL_VALUE_MIN; /* minimum */
+ qc->maximum = MARUCAM_CTRL_VALUE_MAX; /* maximum */
+ qc->step = MARUCAM_CTRL_VALUE_STEP; /* step */
+ qc->default_value = MARUCAM_CTRL_VALUE_MID; /* default_value */
+ qc->flags = V4L2_CTRL_FLAG_SLIDER;
}
-void marucam_device_s_ctrl(MaruCamState *state)
+static void backend_win_s_ctrl(MaruCamState *state)
{
HRESULT hr;
uint32_t i;
long property, set_val;
- MaruCamParam *param = state->param;
-
- param->top = 0;
+ struct v4l2_control *ctrl = (struct v4l2_control *)state->io_ptr;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
- switch (param->stack[0]) {
+ switch (ctrl->id) {
case V4L2_CID_BRIGHTNESS:
+ TRACE("%d is set to the value of the BRIGHTNESS\n", ctrl->value);
i = 0;
property = VideoProcAmp_Brightness;
break;
case V4L2_CID_CONTRAST:
+ TRACE("%d is set to the value of the CONTRAST\n", ctrl->value);
i = 1;
property = VideoProcAmp_Contrast;
break;
case V4L2_CID_SATURATION:
+ TRACE("%d is set to the value of the SATURATION\n", ctrl->value);
i = 2;
property = VideoProcAmp_Saturation;
break;
case V4L2_CID_SHARPNESS:
+ TRACE("%d is set to the value of the SHARPNESS\n", ctrl->value);
i = 3;
property = VideoProcAmp_Sharpness;
break;
default:
- param->errCode = EINVAL;
+ ERR("Our emulator does not support this control: 0x%x\n", ctrl->id);
+ state->ret_val = EINVAL;;
return;
}
- set_val = value_convert_from_guest(qctrl_tbl[i].min,
- qctrl_tbl[i].max, (long)param->stack[1]);
- hr = SetVideoProcAmp(property, set_val);
+ set_val = value_convert_from_guest(ctrl_tbl[i].min,
+ ctrl_tbl[i].max, (long)ctrl->value);
+ hr = SetVideoProcAmp(backend, property, set_val);
if (FAILED(hr)) {
- param->errCode = EINVAL;
- ERR("failed to set video control value, [HRESULT : 0x%x]\n", hr);
+ ERR("Failed to set video control value: HRESULT(0x%x)\n", hr);
+ state->ret_val = EINVAL;;
return;
}
}
-void marucam_device_g_ctrl(MaruCamState *state)
+static void backend_win_g_ctrl(MaruCamState *state)
{
HRESULT hr;
uint32_t i;
long property, get_val;
- MaruCamParam *param = state->param;
+ struct v4l2_control *ctrl = (struct v4l2_control *)state->io_ptr;
+ MCBackendWin *backend = (MCBackendWin *)(state->backend);
- param->top = 0;
- switch (param->stack[0]) {
+ switch (ctrl->id) {
case V4L2_CID_BRIGHTNESS:
+ TRACE("To get a value of the BRIGHTNESS\n");
i = 0;
property = VideoProcAmp_Brightness;
break;
case V4L2_CID_CONTRAST:
+ TRACE("To get a value of the CONTRAST\n");
i = 1;
property = VideoProcAmp_Contrast;
break;
case V4L2_CID_SATURATION:
+ TRACE("To get a value of the SATURATION\n");
i = 2;
property = VideoProcAmp_Saturation;
break;
case V4L2_CID_SHARPNESS:
+ TRACE("To get a value of the SHARPNESS\n");
i = 3;
property = VideoProcAmp_Sharpness;
break;
default:
- param->errCode = EINVAL;
+ ERR("Our emulator does not support this control: 0x%x\n", ctrl->id);
+ state->ret_val = EINVAL;;
return;
}
- hr = GetVideoProcAmp(property, &get_val);
+ hr = GetVideoProcAmp(backend, property, &get_val);
if (FAILED(hr)) {
- param->errCode = EINVAL;
- ERR("failed to get video control value!!!, [HRESULT : 0x%x]\n", hr);
+ ERR("Failed to get video control value: HRESULT(0x%x)\n", hr);
+ state->ret_val = EINVAL;;
return;
}
- param->stack[0] = (uint32_t)value_convert_to_guest(qctrl_tbl[i].min,
- qctrl_tbl[i].max, get_val);
+
+ ctrl->value = (uint32_t)value_convert_to_guest(ctrl_tbl[i].min,
+ ctrl_tbl[i].max, get_val);
+ TRACE("Value: %u\n", ctrl->value);
}
-void marucam_device_enum_fsizes(MaruCamState *state)
+static void backend_win_enum_fsizes(MaruCamState *state)
{
- uint32_t index, pixfmt, i;
- MaruCamParam *param = state->param;
-
- param->top = 0;
- index = param->stack[0];
- pixfmt = param->stack[1];
+ struct v4l2_frmsizeenum *fsize =
+ (struct v4l2_frmsizeenum *)state->io_ptr;
+ uint32_t i;
- if (index >= ARRAY_SIZE(supported_dst_frames)) {
- param->errCode = EINVAL;
+ if (fsize->index >= ARRAY_SIZE(support_frames)) {
+ state->ret_val = EINVAL;;
return;
}
- for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
- if (supported_dst_pixfmts[i].fmt == pixfmt) {
+ for (i = 0; i < ARRAY_SIZE(support_fmts); i++) {
+ if (support_fmts[i] == fsize->pixel_format) {
break;
}
}
- if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
- param->errCode = EINVAL;
+ if (i == ARRAY_SIZE(support_fmts)) {
+ state->ret_val = EINVAL;;
return;
}
- param->stack[0] = supported_dst_frames[index].width;
- param->stack[1] = supported_dst_frames[index].height;
+ fsize->type = V4L2_FRMSIZE_TYPE_DISCRETE;
+ fsize->discrete.width = support_frames[fsize->index].width;
+ fsize->discrete.height = support_frames[fsize->index].height;
}
-void marucam_device_enum_fintv(MaruCamState *state)
+static void backend_win_enum_fintv(MaruCamState *state)
{
- MaruCamParam *param = state->param;
+ struct v4l2_frmivalenum *fival =
+ (struct v4l2_frmivalenum *)state->io_ptr;
- param->top = 0;
-
- /* switch by index(param->stack[0]) */
- switch (param->stack[0]) {
+ /* switch by index) */
+ switch (fival->index) {
case 0:
- param->stack[1] = 30; /* denominator */
+ /* we only use 1/30 frame interval */
+ fival->type = V4L2_FRMIVAL_TYPE_DISCRETE;
+ fival->discrete.numerator = 1;
+ fival->discrete.denominator = 30;
break;
default:
- param->errCode = EINVAL;
+ state->ret_val = EINVAL;;
return;
}
- param->stack[0] = 1; /* numerator */
+}
+
+MaruCamBackend *marucam_backend_create(MaruCamState *state)
+{
+ MCBackendWin *backend_win;
+
+ backend_win = g_new0(MCBackendWin, 1);
+ if (backend_win == NULL) {
+ ERR("Failed to allocate memory for the native backend\n");
+ return NULL;
+ }
+
+ backend_win->base.state = state;
+ backend_win->base.init = backend_win_init;
+ backend_win->base.reset = backend_win_reset;
+ backend_win->base.release = backend_win_release;
+ backend_win->base.open = backend_win_open;
+ backend_win->base.close = backend_win_close;
+ backend_win->base.stream_on = backend_win_stream_on;
+ backend_win->base.stream_off = backend_win_stream_off;
+ backend_win->base.enum_fmt = backend_win_enum_fmt;
+ backend_win->base.try_fmt = backend_win_try_fmt;
+ backend_win->base.s_fmt = backend_win_s_fmt;
+ backend_win->base.g_fmt = backend_win_g_fmt;
+ backend_win->base.s_parm = backend_win_s_parm;
+ backend_win->base.g_parm = backend_win_g_parm;
+ backend_win->base.query_ctrl = backend_win_query_ctrl;
+ backend_win->base.s_ctrl = backend_win_s_ctrl;
+ backend_win->base.g_ctrl = backend_win_g_ctrl;
+ backend_win->base.enum_framesizes = backend_win_enum_fsizes;
+ backend_win->base.enum_frameintervals = backend_win_enum_fintv;
+
+ return &backend_win->base;
}
--- /dev/null
+/*
+ * Video for Linux Two header file
+ *
+ * Copyright (C) 1999-2012 the contributors
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * Alternatively you can redistribute this file under the terms of the
+ * BSD license as stated below:
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ * 3. The names of its contributors may not be used to endorse or promote
+ * products derived from this software without specific prior written
+ * permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
+ * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+ * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+ * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+ * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ * Header file for v4l or V4L2 drivers and applications
+ * with public API.
+ * All kernel-specific stuff were moved to media/v4l2-dev.h, so
+ * no #if __KERNEL tests are allowed here
+ *
+ * See http://linuxtv.org for more info
+ *
+ * Author: Bill Dirks <bill@thedirks.org>
+ * Justin Schoeman
+ * Hans Verkuil <hverkuil@xs4all.nl>
+ * et al.
+ */
+
+ /* This file has been modified from the videodev2.h for the Tizen emulator. */
+#ifndef __VIDEODEV2_MIN_H
+#define __VIDEODEV2_MIN_H
+
+/*
+ * Common stuff for both V4L1 and V4L2
+ * Moved from videodev.h
+ */
+#define VIDEO_MAX_FRAME 32
+#define VIDEO_MAX_PLANES 8
+
+typedef signed char __s8;
+typedef signed short __s16;
+typedef signed int __s32;
+typedef signed long long __s64;
+typedef unsigned char __u8;
+typedef unsigned short __u16;
+typedef unsigned int __u32;
+typedef unsigned long long __u64;
+
+/*
+ * M I S C E L L A N E O U S
+ */
+
+/* V4L2 defines copy from videodev2.h */
+#define V4L2_CTRL_FLAG_SLIDER 0x0020
+
+#define V4L2_CTRL_CLASS_USER 0x00980000
+#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
+#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE + 0)
+#define V4L2_CID_CONTRAST (V4L2_CID_BASE + 1)
+#define V4L2_CID_SATURATION (V4L2_CID_BASE + 2)
+#define V4L2_CID_SHARPNESS (V4L2_CID_BASE + 27)
+
+/* Four-character-code (FOURCC) */
+#define v4l2_fourcc(a, b, c, d)\
+ ((__u32)(a) | ((__u32)(b) << 8) | ((__u32)(c) << 16) | ((__u32)(d) << 24))
+
+/*
+ * E N U M S
+ */
+enum v4l2_field {
+ V4L2_FIELD_ANY = 0, /* driver can choose from none,
+ top, bottom, interlaced
+ depending on whatever it thinks
+ is approximate ... */
+ V4L2_FIELD_NONE = 1, /* this device has no fields ... */
+ V4L2_FIELD_TOP = 2, /* top field only */
+ V4L2_FIELD_BOTTOM = 3, /* bottom field only */
+ V4L2_FIELD_INTERLACED = 4, /* both fields interlaced */
+ V4L2_FIELD_SEQ_TB = 5, /* both fields sequential into one
+ buffer, top-bottom order */
+ V4L2_FIELD_SEQ_BT = 6, /* same as above + bottom-top order */
+ V4L2_FIELD_ALTERNATE = 7, /* both fields alternating into
+ separate buffers */
+ V4L2_FIELD_INTERLACED_TB = 8, /* both fields interlaced, top field
+ first and the top field is
+ transmitted first */
+ V4L2_FIELD_INTERLACED_BT = 9, /* both fields interlaced, top field
+ first and the bottom field is
+ transmitted first */
+};
+#define V4L2_FIELD_HAS_TOP(field) \
+ ((field) == V4L2_FIELD_TOP ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+#define V4L2_FIELD_HAS_BOTTOM(field) \
+ ((field) == V4L2_FIELD_BOTTOM ||\
+ (field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+#define V4L2_FIELD_HAS_BOTH(field) \
+ ((field) == V4L2_FIELD_INTERLACED ||\
+ (field) == V4L2_FIELD_INTERLACED_TB ||\
+ (field) == V4L2_FIELD_INTERLACED_BT ||\
+ (field) == V4L2_FIELD_SEQ_TB ||\
+ (field) == V4L2_FIELD_SEQ_BT)
+
+enum v4l2_buf_type {
+ V4L2_BUF_TYPE_VIDEO_CAPTURE = 1,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT = 2,
+ V4L2_BUF_TYPE_VIDEO_OVERLAY = 3,
+ V4L2_BUF_TYPE_VBI_CAPTURE = 4,
+ V4L2_BUF_TYPE_VBI_OUTPUT = 5,
+ V4L2_BUF_TYPE_SLICED_VBI_CAPTURE = 6,
+ V4L2_BUF_TYPE_SLICED_VBI_OUTPUT = 7,
+#if 1
+ /* Experimental */
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY = 8,
+#endif
+ V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE = 9,
+ V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE = 10,
+ /* Deprecated, do not use */
+ V4L2_BUF_TYPE_PRIVATE = 0x80,
+};
+
+#define V4L2_TYPE_IS_MULTIPLANAR(type) \
+ ((type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE)
+
+#define V4L2_TYPE_IS_OUTPUT(type) \
+ ((type) == V4L2_BUF_TYPE_VIDEO_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OVERLAY \
+ || (type) == V4L2_BUF_TYPE_VIDEO_OUTPUT_OVERLAY \
+ || (type) == V4L2_BUF_TYPE_VBI_OUTPUT \
+ || (type) == V4L2_BUF_TYPE_SLICED_VBI_OUTPUT)
+
+enum v4l2_tuner_type {
+ V4L2_TUNER_RADIO = 1,
+ V4L2_TUNER_ANALOG_TV = 2,
+ V4L2_TUNER_DIGITAL_TV = 3,
+};
+
+enum v4l2_memory {
+ V4L2_MEMORY_MMAP = 1,
+ V4L2_MEMORY_USERPTR = 2,
+ V4L2_MEMORY_OVERLAY = 3,
+ V4L2_MEMORY_DMABUF = 4,
+};
+
+/* see also http://vektor.theorem.ca/graphics/ycbcr/ */
+enum v4l2_colorspace {
+ /* ITU-R 601 -- broadcast NTSC/PAL */
+ V4L2_COLORSPACE_SMPTE170M = 1,
+
+ /* 1125-Line (US) HDTV */
+ V4L2_COLORSPACE_SMPTE240M = 2,
+
+ /* HD and modern captures. */
+ V4L2_COLORSPACE_REC709 = 3,
+
+ /* broken BT878 extents (601, luma range 16-253 instead of 16-235) */
+ V4L2_COLORSPACE_BT878 = 4,
+
+ /* These should be useful. Assume 601 extents. */
+ V4L2_COLORSPACE_470_SYSTEM_M = 5,
+ V4L2_COLORSPACE_470_SYSTEM_BG = 6,
+
+ /* I know there will be cameras that send this. So, this is
+ * unspecified chromaticities and full 0-255 on each of the
+ * Y'CbCr components
+ */
+ V4L2_COLORSPACE_JPEG = 7,
+
+ /* For RGB colourspaces, this is probably a good start. */
+ V4L2_COLORSPACE_SRGB = 8,
+};
+
+enum v4l2_priority {
+ V4L2_PRIORITY_UNSET = 0, /* not initialized */
+ V4L2_PRIORITY_BACKGROUND = 1,
+ V4L2_PRIORITY_INTERACTIVE = 2,
+ V4L2_PRIORITY_RECORD = 3,
+ V4L2_PRIORITY_DEFAULT = V4L2_PRIORITY_INTERACTIVE,
+};
+
+struct v4l2_rect {
+ __s32 left;
+ __s32 top;
+ __s32 width;
+ __s32 height;
+};
+
+struct v4l2_fract {
+ __u32 numerator;
+ __u32 denominator;
+};
+
+/*
+ * V I D E O I M A G E F O R M A T
+ */
+struct v4l2_pix_format {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field; /* enum v4l2_field */
+ __u32 bytesperline; /* for padding, zero if unused */
+ __u32 sizeimage;
+ __u32 colorspace; /* enum v4l2_colorspace */
+ __u32 priv; /* private data, depends on pixelformat */
+};
+
+/* Pixel format FOURCC depth Description */
+
+/* RGB formats */
+#define V4L2_PIX_FMT_RGB332 v4l2_fourcc('R', 'G', 'B', '1') /* 8 RGB-3-3-2 */
+#define V4L2_PIX_FMT_RGB444 v4l2_fourcc('R', '4', '4', '4') /* 16 xxxxrrrr ggggbbbb */
+#define V4L2_PIX_FMT_RGB555 v4l2_fourcc('R', 'G', 'B', 'O') /* 16 RGB-5-5-5 */
+#define V4L2_PIX_FMT_RGB565 v4l2_fourcc('R', 'G', 'B', 'P') /* 16 RGB-5-6-5 */
+#define V4L2_PIX_FMT_RGB555X v4l2_fourcc('R', 'G', 'B', 'Q') /* 16 RGB-5-5-5 BE */
+#define V4L2_PIX_FMT_RGB565X v4l2_fourcc('R', 'G', 'B', 'R') /* 16 RGB-5-6-5 BE */
+#define V4L2_PIX_FMT_BGR666 v4l2_fourcc('B', 'G', 'R', 'H') /* 18 BGR-6-6-6 */
+#define V4L2_PIX_FMT_BGR24 v4l2_fourcc('B', 'G', 'R', '3') /* 24 BGR-8-8-8 */
+#define V4L2_PIX_FMT_RGB24 v4l2_fourcc('R', 'G', 'B', '3') /* 24 RGB-8-8-8 */
+#define V4L2_PIX_FMT_BGR32 v4l2_fourcc('B', 'G', 'R', '4') /* 32 BGR-8-8-8-8 */
+#define V4L2_PIX_FMT_RGB32 v4l2_fourcc('R', 'G', 'B', '4') /* 32 RGB-8-8-8-8 */
+
+/* Grey formats */
+#define V4L2_PIX_FMT_GREY v4l2_fourcc('G', 'R', 'E', 'Y') /* 8 Greyscale */
+#define V4L2_PIX_FMT_Y4 v4l2_fourcc('Y', '0', '4', ' ') /* 4 Greyscale */
+#define V4L2_PIX_FMT_Y6 v4l2_fourcc('Y', '0', '6', ' ') /* 6 Greyscale */
+#define V4L2_PIX_FMT_Y10 v4l2_fourcc('Y', '1', '0', ' ') /* 10 Greyscale */
+#define V4L2_PIX_FMT_Y12 v4l2_fourcc('Y', '1', '2', ' ') /* 12 Greyscale */
+#define V4L2_PIX_FMT_Y16 v4l2_fourcc('Y', '1', '6', ' ') /* 16 Greyscale */
+
+/* Grey bit-packed formats */
+#define V4L2_PIX_FMT_Y10BPACK v4l2_fourcc('Y', '1', '0', 'B') /* 10 Greyscale bit-packed */
+
+/* Palette formats */
+#define V4L2_PIX_FMT_PAL8 v4l2_fourcc('P', 'A', 'L', '8') /* 8 8-bit palette */
+
+/* Chrominance formats */
+#define V4L2_PIX_FMT_UV8 v4l2_fourcc('U', 'V', '8', ' ') /* 8 UV 4:4 */
+
+/* Luminance+Chrominance formats */
+#define V4L2_PIX_FMT_YVU410 v4l2_fourcc('Y', 'V', 'U', '9') /* 9 YVU 4:1:0 */
+#define V4L2_PIX_FMT_YVU420 v4l2_fourcc('Y', 'V', '1', '2') /* 12 YVU 4:2:0 */
+#define V4L2_PIX_FMT_YUYV v4l2_fourcc('Y', 'U', 'Y', 'V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YYUV v4l2_fourcc('Y', 'Y', 'U', 'V') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YVYU v4l2_fourcc('Y', 'V', 'Y', 'U') /* 16 YVU 4:2:2 */
+#define V4L2_PIX_FMT_UYVY v4l2_fourcc('U', 'Y', 'V', 'Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_VYUY v4l2_fourcc('V', 'Y', 'U', 'Y') /* 16 YUV 4:2:2 */
+#define V4L2_PIX_FMT_YUV422P v4l2_fourcc('4', '2', '2', 'P') /* 16 YVU422 planar */
+#define V4L2_PIX_FMT_YUV411P v4l2_fourcc('4', '1', '1', 'P') /* 16 YVU411 planar */
+#define V4L2_PIX_FMT_Y41P v4l2_fourcc('Y', '4', '1', 'P') /* 12 YUV 4:1:1 */
+#define V4L2_PIX_FMT_YUV444 v4l2_fourcc('Y', '4', '4', '4') /* 16 xxxxyyyy uuuuvvvv */
+#define V4L2_PIX_FMT_YUV555 v4l2_fourcc('Y', 'U', 'V', 'O') /* 16 YUV-5-5-5 */
+#define V4L2_PIX_FMT_YUV565 v4l2_fourcc('Y', 'U', 'V', 'P') /* 16 YUV-5-6-5 */
+#define V4L2_PIX_FMT_YUV32 v4l2_fourcc('Y', 'U', 'V', '4') /* 32 YUV-8-8-8-8 */
+#define V4L2_PIX_FMT_YUV410 v4l2_fourcc('Y', 'U', 'V', '9') /* 9 YUV 4:1:0 */
+#define V4L2_PIX_FMT_YUV420 v4l2_fourcc('Y', 'U', '1', '2') /* 12 YUV 4:2:0 */
+#define V4L2_PIX_FMT_HI240 v4l2_fourcc('H', 'I', '2', '4') /* 8 8-bit color */
+#define V4L2_PIX_FMT_HM12 v4l2_fourcc('H', 'M', '1', '2') /* 8 YUV 4:2:0 16x16 macroblocks */
+#define V4L2_PIX_FMT_M420 v4l2_fourcc('M', '4', '2', '0') /* 12 YUV 4:2:0 2 lines y, 1 line uv interleaved */
+
+/* two planes -- one Y, one Cr + Cb interleaved */
+#define V4L2_PIX_FMT_NV12 v4l2_fourcc('N', 'V', '1', '2') /* 12 Y/CbCr 4:2:0 */
+#define V4L2_PIX_FMT_NV21 v4l2_fourcc('N', 'V', '2', '1') /* 12 Y/CrCb 4:2:0 */
+#define V4L2_PIX_FMT_NV16 v4l2_fourcc('N', 'V', '1', '6') /* 16 Y/CbCr 4:2:2 */
+#define V4L2_PIX_FMT_NV61 v4l2_fourcc('N', 'V', '6', '1') /* 16 Y/CrCb 4:2:2 */
+#define V4L2_PIX_FMT_NV24 v4l2_fourcc('N', 'V', '2', '4') /* 24 Y/CbCr 4:4:4 */
+#define V4L2_PIX_FMT_NV42 v4l2_fourcc('N', 'V', '4', '2') /* 24 Y/CrCb 4:4:4 */
+
+/* two non contiguous planes - one Y, one Cr + Cb interleaved */
+#define V4L2_PIX_FMT_NV12M v4l2_fourcc('N', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 */
+#define V4L2_PIX_FMT_NV21M v4l2_fourcc('N', 'M', '2', '1') /* 21 Y/CrCb 4:2:0 */
+#define V4L2_PIX_FMT_NV16M v4l2_fourcc('N', 'M', '1', '6') /* 16 Y/CbCr 4:2:2 */
+#define V4L2_PIX_FMT_NV61M v4l2_fourcc('N', 'M', '6', '1') /* 16 Y/CrCb 4:2:2 */
+#define V4L2_PIX_FMT_NV12MT v4l2_fourcc('T', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 64x32 macroblocks */
+#define V4L2_PIX_FMT_NV12MT_16X16 v4l2_fourcc('V', 'M', '1', '2') /* 12 Y/CbCr 4:2:0 16x16 macroblocks */
+
+/* three non contiguous planes - Y, Cb, Cr */
+#define V4L2_PIX_FMT_YUV420M v4l2_fourcc('Y', 'M', '1', '2') /* 12 YUV420 planar */
+#define V4L2_PIX_FMT_YVU420M v4l2_fourcc('Y', 'M', '2', '1') /* 12 YVU420 planar */
+
+/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
+#define V4L2_PIX_FMT_SBGGR8 v4l2_fourcc('B', 'A', '8', '1') /* 8 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG8 v4l2_fourcc('G', 'B', 'R', 'G') /* 8 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG8 v4l2_fourcc('G', 'R', 'B', 'G') /* 8 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB8 v4l2_fourcc('R', 'G', 'G', 'B') /* 8 RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SBGGR10 v4l2_fourcc('B', 'G', '1', '0') /* 10 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG10 v4l2_fourcc('G', 'B', '1', '0') /* 10 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG10 v4l2_fourcc('B', 'A', '1', '0') /* 10 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB10 v4l2_fourcc('R', 'G', '1', '0') /* 10 RGRG.. GBGB.. */
+#define V4L2_PIX_FMT_SBGGR12 v4l2_fourcc('B', 'G', '1', '2') /* 12 BGBG.. GRGR.. */
+#define V4L2_PIX_FMT_SGBRG12 v4l2_fourcc('G', 'B', '1', '2') /* 12 GBGB.. RGRG.. */
+#define V4L2_PIX_FMT_SGRBG12 v4l2_fourcc('B', 'A', '1', '2') /* 12 GRGR.. BGBG.. */
+#define V4L2_PIX_FMT_SRGGB12 v4l2_fourcc('R', 'G', '1', '2') /* 12 RGRG.. GBGB.. */
+ /* 10bit raw bayer a-law compressed to 8 bits */
+#define V4L2_PIX_FMT_SBGGR10ALAW8 v4l2_fourcc('a', 'B', 'A', '8')
+#define V4L2_PIX_FMT_SGBRG10ALAW8 v4l2_fourcc('a', 'G', 'A', '8')
+#define V4L2_PIX_FMT_SGRBG10ALAW8 v4l2_fourcc('a', 'g', 'A', '8')
+#define V4L2_PIX_FMT_SRGGB10ALAW8 v4l2_fourcc('a', 'R', 'A', '8')
+ /* 10bit raw bayer DPCM compressed to 8 bits */
+#define V4L2_PIX_FMT_SBGGR10DPCM8 v4l2_fourcc('b', 'B', 'A', '8')
+#define V4L2_PIX_FMT_SGBRG10DPCM8 v4l2_fourcc('b', 'G', 'A', '8')
+#define V4L2_PIX_FMT_SGRBG10DPCM8 v4l2_fourcc('B', 'D', '1', '0')
+#define V4L2_PIX_FMT_SRGGB10DPCM8 v4l2_fourcc('b', 'R', 'A', '8')
+ /*
+ * 10bit raw bayer, expanded to 16 bits
+ * xxxxrrrrrrrrrrxxxxgggggggggg xxxxggggggggggxxxxbbbbbbbbbb...
+ */
+#define V4L2_PIX_FMT_SBGGR16 v4l2_fourcc('B', 'Y', 'R', '2') /* 16 BGBG.. GRGR.. */
+
+/* compressed formats */
+#define V4L2_PIX_FMT_MJPEG v4l2_fourcc('M', 'J', 'P', 'G') /* Motion-JPEG */
+#define V4L2_PIX_FMT_JPEG v4l2_fourcc('J', 'P', 'E', 'G') /* JFIF JPEG */
+#define V4L2_PIX_FMT_DV v4l2_fourcc('d', 'v', 's', 'd') /* 1394 */
+#define V4L2_PIX_FMT_MPEG v4l2_fourcc('M', 'P', 'E', 'G') /* MPEG-1/2/4 Multiplexed */
+#define V4L2_PIX_FMT_H264 v4l2_fourcc('H', '2', '6', '4') /* H264 with start codes */
+#define V4L2_PIX_FMT_H264_NO_SC v4l2_fourcc('A', 'V', 'C', '1') /* H264 without start codes */
+#define V4L2_PIX_FMT_H264_MVC v4l2_fourcc('M', '2', '6', '4') /* H264 MVC */
+#define V4L2_PIX_FMT_H263 v4l2_fourcc('H', '2', '6', '3') /* H263 */
+#define V4L2_PIX_FMT_MPEG1 v4l2_fourcc('M', 'P', 'G', '1') /* MPEG-1 ES */
+#define V4L2_PIX_FMT_MPEG2 v4l2_fourcc('M', 'P', 'G', '2') /* MPEG-2 ES */
+#define V4L2_PIX_FMT_MPEG4 v4l2_fourcc('M', 'P', 'G', '4') /* MPEG-4 part 2 ES */
+#define V4L2_PIX_FMT_XVID v4l2_fourcc('X', 'V', 'I', 'D') /* Xvid */
+#define V4L2_PIX_FMT_VC1_ANNEX_G v4l2_fourcc('V', 'C', '1', 'G') /* SMPTE 421M Annex G compliant stream */
+#define V4L2_PIX_FMT_VC1_ANNEX_L v4l2_fourcc('V', 'C', '1', 'L') /* SMPTE 421M Annex L compliant stream */
+#define V4L2_PIX_FMT_VP8 v4l2_fourcc('V', 'P', '8', '0') /* VP8 */
+
+/* Vendor-specific formats */
+#define V4L2_PIX_FMT_CPIA1 v4l2_fourcc('C', 'P', 'I', 'A') /* cpia1 YUV */
+#define V4L2_PIX_FMT_WNVA v4l2_fourcc('W', 'N', 'V', 'A') /* Winnov hw compress */
+#define V4L2_PIX_FMT_SN9C10X v4l2_fourcc('S', '9', '1', '0') /* SN9C10x compression */
+#define V4L2_PIX_FMT_SN9C20X_I420 v4l2_fourcc('S', '9', '2', '0') /* SN9C20x YUV 4:2:0 */
+#define V4L2_PIX_FMT_PWC1 v4l2_fourcc('P', 'W', 'C', '1') /* pwc older webcam */
+#define V4L2_PIX_FMT_PWC2 v4l2_fourcc('P', 'W', 'C', '2') /* pwc newer webcam */
+#define V4L2_PIX_FMT_ET61X251 v4l2_fourcc('E', '6', '2', '5') /* ET61X251 compression */
+#define V4L2_PIX_FMT_SPCA501 v4l2_fourcc('S', '5', '0', '1') /* YUYV per line */
+#define V4L2_PIX_FMT_SPCA505 v4l2_fourcc('S', '5', '0', '5') /* YYUV per line */
+#define V4L2_PIX_FMT_SPCA508 v4l2_fourcc('S', '5', '0', '8') /* YUVY per line */
+#define V4L2_PIX_FMT_SPCA561 v4l2_fourcc('S', '5', '6', '1') /* compressed GBRG bayer */
+#define V4L2_PIX_FMT_PAC207 v4l2_fourcc('P', '2', '0', '7') /* compressed BGGR bayer */
+#define V4L2_PIX_FMT_MR97310A v4l2_fourcc('M', '3', '1', '0') /* compressed BGGR bayer */
+#define V4L2_PIX_FMT_JL2005BCD v4l2_fourcc('J', 'L', '2', '0') /* compressed RGGB bayer */
+#define V4L2_PIX_FMT_SN9C2028 v4l2_fourcc('S', 'O', 'N', 'X') /* compressed GBRG bayer */
+#define V4L2_PIX_FMT_SQ905C v4l2_fourcc('9', '0', '5', 'C') /* compressed RGGB bayer */
+#define V4L2_PIX_FMT_PJPG v4l2_fourcc('P', 'J', 'P', 'G') /* Pixart 73xx JPEG */
+#define V4L2_PIX_FMT_OV511 v4l2_fourcc('O', '5', '1', '1') /* ov511 JPEG */
+#define V4L2_PIX_FMT_OV518 v4l2_fourcc('O', '5', '1', '8') /* ov518 JPEG */
+#define V4L2_PIX_FMT_STV0680 v4l2_fourcc('S', '6', '8', '0') /* stv0680 bayer */
+#define V4L2_PIX_FMT_TM6000 v4l2_fourcc('T', 'M', '6', '0') /* tm5600/tm60x0 */
+#define V4L2_PIX_FMT_CIT_YYVYUY v4l2_fourcc('C', 'I', 'T', 'V') /* one line of Y then 1 line of VYUY */
+#define V4L2_PIX_FMT_KONICA420 v4l2_fourcc('K', 'O', 'N', 'I') /* YUV420 planar in blocks of 256 pixels */
+#define V4L2_PIX_FMT_JPGL v4l2_fourcc('J', 'P', 'G', 'L') /* JPEG-Lite */
+#define V4L2_PIX_FMT_SE401 v4l2_fourcc('S', '4', '0', '1') /* se401 janggu compressed rgb */
+#define V4L2_PIX_FMT_S5C_UYVY_JPG v4l2_fourcc('S', '5', 'C', 'I') /* S5C73M3 interleaved UYVY/JPEG */
+
+/*
+ * F O R M A T E N U M E R A T I O N
+ */
+struct v4l2_fmtdesc {
+ __u32 index; /* Format number */
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 flags;
+ __u8 description[32]; /* Description string */
+ __u32 pixelformat; /* Format fourcc */
+ __u32 reserved[4];
+};
+
+#define V4L2_FMT_FLAG_COMPRESSED 0x0001
+#define V4L2_FMT_FLAG_EMULATED 0x0002
+
+#if 1
+ /* Experimental Frame Size and frame rate enumeration */
+/*
+ * F R A M E S I Z E E N U M E R A T I O N
+ */
+enum v4l2_frmsizetypes {
+ V4L2_FRMSIZE_TYPE_DISCRETE = 1,
+ V4L2_FRMSIZE_TYPE_CONTINUOUS = 2,
+ V4L2_FRMSIZE_TYPE_STEPWISE = 3,
+};
+
+struct v4l2_frmsize_discrete {
+ __u32 width; /* Frame width [pixel] */
+ __u32 height; /* Frame height [pixel] */
+};
+
+struct v4l2_frmsize_stepwise {
+ __u32 min_width; /* Minimum frame width [pixel] */
+ __u32 max_width; /* Maximum frame width [pixel] */
+ __u32 step_width; /* Frame width step size [pixel] */
+ __u32 min_height; /* Minimum frame height [pixel] */
+ __u32 max_height; /* Maximum frame height [pixel] */
+ __u32 step_height; /* Frame height step size [pixel] */
+};
+
+struct v4l2_frmsizeenum {
+ __u32 index; /* Frame size number */
+ __u32 pixel_format; /* Pixel format */
+ __u32 type; /* Frame size type the device supports. */
+
+ union { /* Frame size */
+ struct v4l2_frmsize_discrete discrete;
+ struct v4l2_frmsize_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+};
+
+/*
+ * F R A M E R A T E E N U M E R A T I O N
+ */
+enum v4l2_frmivaltypes {
+ V4L2_FRMIVAL_TYPE_DISCRETE = 1,
+ V4L2_FRMIVAL_TYPE_CONTINUOUS = 2,
+ V4L2_FRMIVAL_TYPE_STEPWISE = 3,
+};
+
+struct v4l2_frmival_stepwise {
+ struct v4l2_fract min; /* Minimum frame interval [s] */
+ struct v4l2_fract max; /* Maximum frame interval [s] */
+ struct v4l2_fract step; /* Frame interval step size [s] */
+};
+
+struct v4l2_frmivalenum {
+ __u32 index; /* Frame format index */
+ __u32 pixel_format; /* Pixel format */
+ __u32 width; /* Frame width */
+ __u32 height; /* Frame height */
+ __u32 type; /* Frame interval type the device supports. */
+
+ union { /* Frame interval */
+ struct v4l2_fract discrete;
+ struct v4l2_frmival_stepwise stepwise;
+ };
+
+ __u32 reserved[2]; /* Reserved space for future use */
+};
+#endif
+
+/*
+ * T I M E C O D E
+ */
+struct v4l2_timecode {
+ __u32 type;
+ __u32 flags;
+ __u8 frames;
+ __u8 seconds;
+ __u8 minutes;
+ __u8 hours;
+ __u8 userbits[4];
+};
+
+/* Type */
+#define V4L2_TC_TYPE_24FPS 1
+#define V4L2_TC_TYPE_25FPS 2
+#define V4L2_TC_TYPE_30FPS 3
+#define V4L2_TC_TYPE_50FPS 4
+#define V4L2_TC_TYPE_60FPS 5
+
+/* Flags */
+#define V4L2_TC_FLAG_DROPFRAME 0x0001 /* "drop-frame" mode */
+#define V4L2_TC_FLAG_COLORFRAME 0x0002
+#define V4L2_TC_USERBITS_field 0x000C
+#define V4L2_TC_USERBITS_USERDEFINED 0x0000
+#define V4L2_TC_USERBITS_8BITCHARS 0x0008
+/* The above is based on SMPTE timecodes */
+
+/*
+ * M E M O R Y - M A P P I N G B U F F E R S
+ */
+struct v4l2_requestbuffers {
+ __u32 count;
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 memory; /* enum v4l2_memory */
+ __u32 reserved[2];
+};
+
+/**
+ * struct v4l2_plane - plane info for multi-planar buffers
+ * @bytesused: number of bytes occupied by data in the plane (payload)
+ * @length: size of this plane (NOT the payload) in bytes
+ * @mem_offset: when memory in the associated struct v4l2_buffer is
+ * V4L2_MEMORY_MMAP, equals the offset from the start of
+ * the device memory for this plane (or is a "cookie" that
+ * should be passed to mmap() called on the video node)
+ * @userptr: when memory is V4L2_MEMORY_USERPTR, a userspace pointer
+ * pointing to this plane
+ * @fd: when memory is V4L2_MEMORY_DMABUF, a userspace file
+ * descriptor associated with this plane
+ * @data_offset: offset in the plane to the start of data; usually 0,
+ * unless there is a header in front of the data
+ *
+ * Multi-planar buffers consist of one or more planes, e.g. an YCbCr buffer
+ * with two planes can have one plane for Y, and another for interleaved CbCr
+ * components. Each plane can reside in a separate memory buffer, or even in
+ * a completely separate memory node (e.g. in embedded devices).
+ */
+struct v4l2_plane {
+ __u32 bytesused;
+ __u32 length;
+ union {
+ __u32 mem_offset;
+ unsigned long userptr;
+ __s32 fd;
+ } m;
+ __u32 data_offset;
+ __u32 reserved[11];
+};
+
+/**
+ * struct v4l2_buffer - video buffer info
+ * @index: id number of the buffer
+ * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for
+ * multiplanar buffers);
+ * @bytesused: number of bytes occupied by data in the buffer (payload);
+ * unused (set to 0) for multiplanar buffers
+ * @flags: buffer informational flags
+ * @field: enum v4l2_field; field order of the image in the buffer
+ * @timestamp: frame timestamp
+ * @timecode: frame timecode
+ * @sequence: sequence count of this frame
+ * @memory: enum v4l2_memory; the method, in which the actual video data is
+ * passed
+ * @offset: for non-multiplanar buffers with memory == V4L2_MEMORY_MMAP;
+ * offset from the start of the device memory for this plane,
+ * (or a "cookie" that should be passed to mmap() as offset)
+ * @userptr: for non-multiplanar buffers with memory == V4L2_MEMORY_USERPTR;
+ * a userspace pointer pointing to this buffer
+ * @fd: for non-multiplanar buffers with memory == V4L2_MEMORY_DMABUF;
+ * a userspace file descriptor associated with this buffer
+ * @planes: for multiplanar buffers; userspace pointer to the array of plane
+ * info structs for this buffer
+ * @length: size in bytes of the buffer (NOT its payload) for single-plane
+ * buffers (when type != *_MPLANE); number of elements in the
+ * planes array for multi-plane buffers
+ * @input: input number from which the video data has has been captured
+ *
+ * Contains data exchanged by application and driver using one of the Streaming
+ * I/O methods.
+ */
+struct v4l2_buffer {
+ __u32 index;
+ __u32 type;
+ __u32 bytesused;
+ __u32 flags;
+ __u32 field;
+ struct timeval timestamp;
+ struct v4l2_timecode timecode;
+ __u32 sequence;
+
+ /* memory location */
+ __u32 memory;
+ union {
+ __u32 offset;
+ unsigned long userptr;
+ struct v4l2_plane *planes;
+ __s32 fd;
+ } m;
+ __u32 length;
+ __u32 reserved2;
+ __u32 reserved;
+};
+
+/* Flags for 'flags' field */
+#define V4L2_BUF_FLAG_MAPPED 0x0001 /* Buffer is mapped (flag) */
+#define V4L2_BUF_FLAG_QUEUED 0x0002 /* Buffer is queued for processing */
+#define V4L2_BUF_FLAG_DONE 0x0004 /* Buffer is ready */
+#define V4L2_BUF_FLAG_KEYFRAME 0x0008 /* Image is a keyframe (I-frame) */
+#define V4L2_BUF_FLAG_PFRAME 0x0010 /* Image is a P-frame */
+#define V4L2_BUF_FLAG_BFRAME 0x0020 /* Image is a B-frame */
+/* Buffer is ready, but the data contained within is corrupted. */
+#define V4L2_BUF_FLAG_ERROR 0x0040
+#define V4L2_BUF_FLAG_TIMECODE 0x0100 /* timecode field is valid */
+#define V4L2_BUF_FLAG_PREPARED 0x0400 /* Buffer is prepared for queuing */
+/* Cache handling flags */
+#define V4L2_BUF_FLAG_NO_CACHE_INVALIDATE 0x0800
+#define V4L2_BUF_FLAG_NO_CACHE_CLEAN 0x1000
+/* Timestamp type */
+#define V4L2_BUF_FLAG_TIMESTAMP_MASK 0xe000
+#define V4L2_BUF_FLAG_TIMESTAMP_UNKNOWN 0x0000
+#define V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC 0x2000
+#define V4L2_BUF_FLAG_TIMESTAMP_COPY 0x4000
+
+/**
+ * struct v4l2_exportbuffer - export of video buffer as DMABUF file descriptor
+ *
+ * @index: id number of the buffer
+ * @type: enum v4l2_buf_type; buffer type (type == *_MPLANE for
+ * multiplanar buffers);
+ * @plane: index of the plane to be exported, 0 for single plane queues
+ * @flags: flags for newly created file, currently only O_CLOEXEC is
+ * supported, refer to manual of open syscall for more details
+ * @fd: file descriptor associated with DMABUF (set by driver)
+ *
+ * Contains data used for exporting a video buffer as DMABUF file descriptor.
+ * The buffer is identified by a 'cookie' returned by VIDIOC_QUERYBUF
+ * (identical to the cookie used to mmap() the buffer to userspace). All
+ * reserved fields must be set to zero. The field reserved0 is expected to
+ * become a structure 'type' allowing an alternative layout of the structure
+ * content. Therefore this field should not be used for any other extensions.
+ */
+struct v4l2_exportbuffer {
+ __u32 type; /* enum v4l2_buf_type */
+ __u32 index;
+ __u32 plane;
+ __u32 flags;
+ __s32 fd;
+ __u32 reserved[11];
+};
+
+/*
+ * O V E R L A Y P R E V I E W
+ */
+struct v4l2_framebuffer {
+ __u32 capability;
+ __u32 flags;
+/* FIXME: in theory we should pass something like PCI device + memory
+ * region + offset instead of some physical address */
+ void *base;
+ struct v4l2_pix_format fmt;
+};
+/* Flags for the 'capability' field. Read only */
+#define V4L2_FBUF_CAP_EXTERNOVERLAY 0x0001
+#define V4L2_FBUF_CAP_CHROMAKEY 0x0002
+#define V4L2_FBUF_CAP_LIST_CLIPPING 0x0004
+#define V4L2_FBUF_CAP_BITMAP_CLIPPING 0x0008
+#define V4L2_FBUF_CAP_LOCAL_ALPHA 0x0010
+#define V4L2_FBUF_CAP_GLOBAL_ALPHA 0x0020
+#define V4L2_FBUF_CAP_LOCAL_INV_ALPHA 0x0040
+#define V4L2_FBUF_CAP_SRC_CHROMAKEY 0x0080
+/* Flags for the 'flags' field. */
+#define V4L2_FBUF_FLAG_PRIMARY 0x0001
+#define V4L2_FBUF_FLAG_OVERLAY 0x0002
+#define V4L2_FBUF_FLAG_CHROMAKEY 0x0004
+#define V4L2_FBUF_FLAG_LOCAL_ALPHA 0x0008
+#define V4L2_FBUF_FLAG_GLOBAL_ALPHA 0x0010
+#define V4L2_FBUF_FLAG_LOCAL_INV_ALPHA 0x0020
+#define V4L2_FBUF_FLAG_SRC_CHROMAKEY 0x0040
+
+struct v4l2_clip {
+ struct v4l2_rect c;
+ struct v4l2_clip *next;
+};
+
+struct v4l2_window {
+ struct v4l2_rect w;
+ __u32 field; /* enum v4l2_field */
+ __u32 chromakey;
+ struct v4l2_clip *clips;
+ __u32 clipcount;
+ void *bitmap;
+ __u8 global_alpha;
+};
+
+/*
+ * C A P T U R E P A R A M E T E R S
+ */
+struct v4l2_captureparm {
+ __u32 capability; /* Supported modes */
+ __u32 capturemode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 readbuffers; /* # of buffers for read */
+ __u32 reserved[4];
+};
+
+/* Flags for 'capability' and 'capturemode' fields */
+#define V4L2_MODE_HIGHQUALITY 0x0001 /* High quality imaging mode */
+#define V4L2_CAP_TIMEPERFRAME 0x1000 /* timeperframe field is supported */
+
+struct v4l2_outputparm {
+ __u32 capability; /* Supported modes */
+ __u32 outputmode; /* Current mode */
+ struct v4l2_fract timeperframe; /* Time per frame in seconds */
+ __u32 extendedmode; /* Driver-specific extensions */
+ __u32 writebuffers; /* # of buffers for write */
+ __u32 reserved[4];
+};
+
+/*
+ * C O N T R O L S
+ */
+struct v4l2_control {
+ __u32 id;
+ __s32 value;
+};
+
+struct v4l2_ext_control {
+ __u32 id;
+ __u32 size;
+ __u32 reserved2[1];
+ union {
+ __s32 value;
+ __s64 value64;
+ char *string;
+ };
+} __attribute__ ((packed));
+
+struct v4l2_ext_controls {
+ __u32 ctrl_class;
+ __u32 count;
+ __u32 error_idx;
+ __u32 reserved[2];
+ struct v4l2_ext_control *controls;
+};
+
+#define V4L2_CTRL_ID_MASK (0x0fffffff)
+#define V4L2_CTRL_ID2CLASS(id) ((id) & 0x0fff0000UL)
+#define V4L2_CTRL_DRIVER_PRIV(id) (((id) & 0xffff) >= 0x1000)
+
+enum v4l2_ctrl_type {
+ V4L2_CTRL_TYPE_INTEGER = 1,
+ V4L2_CTRL_TYPE_BOOLEAN = 2,
+ V4L2_CTRL_TYPE_MENU = 3,
+ V4L2_CTRL_TYPE_BUTTON = 4,
+ V4L2_CTRL_TYPE_INTEGER64 = 5,
+ V4L2_CTRL_TYPE_CTRL_CLASS = 6,
+ V4L2_CTRL_TYPE_STRING = 7,
+ V4L2_CTRL_TYPE_BITMASK = 8,
+ V4L2_CTRL_TYPE_INTEGER_MENU = 9,
+};
+
+/* Used in the VIDIOC_QUERYCTRL ioctl for querying controls */
+struct v4l2_queryctrl {
+ __u32 id;
+ __u32 type; /* enum v4l2_ctrl_type */
+ __u8 name[32]; /* Whatever */
+ __s32 minimum; /* Note signedness */
+ __s32 maximum;
+ __s32 step;
+ __s32 default_value;
+ __u32 flags;
+ __u32 reserved[2];
+};
+
+/* Control flags */
+#define V4L2_CTRL_FLAG_DISABLED 0x0001
+#define V4L2_CTRL_FLAG_GRABBED 0x0002
+#define V4L2_CTRL_FLAG_READ_ONLY 0x0004
+#define V4L2_CTRL_FLAG_UPDATE 0x0008
+#define V4L2_CTRL_FLAG_INACTIVE 0x0010
+#define V4L2_CTRL_FLAG_SLIDER 0x0020
+#define V4L2_CTRL_FLAG_WRITE_ONLY 0x0040
+#define V4L2_CTRL_FLAG_VOLATILE 0x0080
+
+/* Query flag, to be ORed with the control ID */
+#define V4L2_CTRL_FLAG_NEXT_CTRL 0x80000000
+
+/* User-class control IDs defined by V4L2 */
+#define V4L2_CID_MAX_CTRLS 1024
+/* IDs reserved for driver specific controls */
+#define V4L2_CID_PRIVATE_BASE 0x08000000
+
+/*
+ * D A T A S E R V I C E S ( V B I )
+ *
+ * Data services API by Michael Schimek
+ */
+
+/* Raw VBI */
+struct v4l2_vbi_format {
+ __u32 sampling_rate; /* in 1 Hz */
+ __u32 offset;
+ __u32 samples_per_line;
+ __u32 sample_format; /* V4L2_PIX_FMT_* */
+ __s32 start[2];
+ __u32 count[2];
+ __u32 flags; /* V4L2_VBI_* */
+ __u32 reserved[2]; /* must be zero */
+};
+
+/* Sliced VBI
+ *
+ * This implements is a proposal V4L2 API to allow SLICED VBI
+ * required for some hardware encoders. It should change without
+ * notice in the definitive implementation.
+ */
+
+struct v4l2_sliced_vbi_format {
+ __u16 service_set;
+ /* service_lines[0][...] specifies lines 0-23 (1-23 used) of the first field
+ service_lines[1][...] specifies lines 0-23 (1-23 used) of the second field
+ (equals frame lines 313-336 for 625 line video
+ standards, 263-286 for 525 line standards) */
+ __u16 service_lines[2][24];
+ __u32 io_size;
+ __u32 reserved[2]; /* must be zero */
+};
+
+/*
+ * A G G R E G A T E S T R U C T U R E S
+ */
+
+/**
+ * struct v4l2_plane_pix_format - additional, per-plane format definition
+ * @sizeimage: maximum size in bytes required for data, for which
+ * this plane will be used
+ * @bytesperline: distance in bytes between the leftmost pixels in two
+ * adjacent lines
+ */
+struct v4l2_plane_pix_format {
+ __u32 sizeimage;
+ __u16 bytesperline;
+ __u16 reserved[7];
+} __attribute__ ((packed));
+
+/**
+ * struct v4l2_pix_format_mplane - multiplanar format definition
+ * @width: image width in pixels
+ * @height: image height in pixels
+ * @pixelformat: little endian four character code (fourcc)
+ * @field: enum v4l2_field; field order (for interlaced video)
+ * @colorspace: enum v4l2_colorspace; supplemental to pixelformat
+ * @plane_fmt: per-plane information
+ * @num_planes: number of planes for this format
+ */
+struct v4l2_pix_format_mplane {
+ __u32 width;
+ __u32 height;
+ __u32 pixelformat;
+ __u32 field;
+ __u32 colorspace;
+
+ struct v4l2_plane_pix_format plane_fmt[VIDEO_MAX_PLANES];
+ __u8 num_planes;
+ __u8 reserved[11];
+} __attribute__ ((packed));
+
+/**
+ * struct v4l2_format - stream data format
+ * @type: enum v4l2_buf_type; type of the data stream
+ * @pix: definition of an image format
+ * @pix_mp: definition of a multiplanar image format
+ * @win: definition of an overlaid image
+ * @vbi: raw VBI capture or output parameters
+ * @sliced: sliced VBI capture or output parameters
+ * @raw_data: placeholder for future extensions and custom formats
+ */
+struct v4l2_format {
+ __u32 type;
+ union {
+ struct v4l2_pix_format pix; /* V4L2_BUF_TYPE_VIDEO_CAPTURE */
+ struct v4l2_pix_format_mplane pix_mp; /* V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE */
+ struct v4l2_window win; /* V4L2_BUF_TYPE_VIDEO_OVERLAY */
+ struct v4l2_vbi_format vbi; /* V4L2_BUF_TYPE_VBI_CAPTURE */
+ struct v4l2_sliced_vbi_format sliced; /* V4L2_BUF_TYPE_SLICED_VBI_CAPTURE */
+ __u8 raw_data[200]; /* user-defined */
+ } fmt;
+};
+
+#endif /* __VIDEODEV2_MIN_H */