#include "qemu-common.h"
#include "maru_camera_darwin.h"
+#include "tizen/src/debug_ch.h"
-static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest, int width, int height);
-static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest, int width, int height);
-static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest, int width, int height);
+MULTI_DEBUG_CHANNEL(tizen, camera_darwin);
+
+static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
+ int width, int height);
+static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
+ int width, int height);
+static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
+ int width, int height);
/* Convert pixel format to YUV420 */
void convert_frame(uint32_t pixel_format, int frame_width, int frame_height,
- size_t frame_size, void* frame_pixels, void* video_buf)
+ size_t frame_size, void *frame_pixels, void *video_buf)
{
switch (pixel_format) {
- case V4L2_PIX_FMT_YUV420:
- memcpy(video_buf, (void*)frame_pixels, (size_t)frame_size);
- break;
- case V4L2_PIX_FMT_YVU420:
- YVU420ToYUV420(frame_pixels, video_buf, frame_width, frame_height);
- break;
- case V4L2_PIX_FMT_YUYV:
- YUYVToYUV420(frame_pixels, video_buf, frame_width, frame_height);
- break;
- case V4L2_PIX_FMT_UYVY: // Mac default format
- UYVYToYUV420(frame_pixels, video_buf, frame_width, frame_height);
- break;
- default:
- printf("Cannot convert the pixel format (%.4s)...\n", (const char*)&pixel_format);
- break;
+ case V4L2_PIX_FMT_YUV420:
+ memcpy(video_buf, (void *)frame_pixels, (size_t)frame_size);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ YVU420ToYUV420(frame_pixels, video_buf, frame_width, frame_height);
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ YUYVToYUV420(frame_pixels, video_buf, frame_width, frame_height);
+ break;
+ case V4L2_PIX_FMT_UYVY: /* Mac default format */
+ UYVYToYUV420(frame_pixels, video_buf, frame_width, frame_height);
+ break;
+ default:
+ ERR("Cannot convert the pixel format (%.4s)...\n",
+ (const char *)&pixel_format);
+ break;
}
}
-static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest, int width, int height)
+static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
+ int width, int height)
{
int i, j;
/* Source */
unsigned char *ptrsrcy1, *ptrsrcy2;
unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1, *ptrsrccb2;
- unsigned char *ptrsrccb3, *ptrsrccb4;
- unsigned char *ptrsrccr1, *ptrsrccr2;
- unsigned char *ptrsrccr3, *ptrsrccr4;
+ unsigned char *ptrsrccb1;
+ unsigned char *ptrsrccb3;
+ unsigned char *ptrsrccr1;
+ unsigned char *ptrsrccr3;
int srcystride, srcccstride;
ptrsrcy1 = bufsrc + 1;
- ptrsrcy2 = bufsrc + (width<<1) + 1;
- ptrsrcy3 = bufsrc + (width<<1)*2 + 1;
- ptrsrcy4 = bufsrc + (width<<1)*3 + 1;
+ ptrsrcy2 = bufsrc + (width << 1) + 1;
+ ptrsrcy3 = bufsrc + (width << 1) * 2 + 1;
+ ptrsrcy4 = bufsrc + (width << 1) * 3 + 1;
ptrsrccb1 = bufsrc;
- ptrsrccb2 = bufsrc + (width<<1);
- ptrsrccb3 = bufsrc + (width<<1)*2;
- ptrsrccb4 = bufsrc + (width<<1)*3;
+ ptrsrccb3 = bufsrc + (width << 1) * 2;
ptrsrccr1 = bufsrc + 2;
- ptrsrccr2 = bufsrc + (width<<1) + 2;
- ptrsrccr3 = bufsrc + (width<<1)*2 + 2;
- ptrsrccr4 = bufsrc + (width<<1)*3 + 2;
+ ptrsrccr3 = bufsrc + (width << 1) * 2 + 2;
- srcystride = (width<<1)*3;
- srcccstride = (width<<1)*3;
+ srcystride = (width << 1) * 3;
+ srcccstride = (width << 1) * 3;
/* Destination */
unsigned char *ptrdesty1, *ptrdesty2;
ptrdesty1 = bufdest;
ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width*2;
- ptrdesty4 = bufdest + width*3;
+ ptrdesty3 = bufdest + width * 2;
+ ptrdesty4 = bufdest + width * 3;
- ptrdestcb1 = bufdest + width*height;
- ptrdestcb2 = bufdest + width*height + (width>>1);
+ ptrdestcb1 = bufdest + width * height;
+ ptrdestcb2 = bufdest + width * height + (width >> 1);
- ptrdestcr1 = bufdest + width*height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width*height + ((width*height) >> 2) + (width>>1);
+ ptrdestcr1 = bufdest + width * height + ((width*height) >> 2);
+ ptrdestcr2 = bufdest + width * height + ((width*height) >> 2)
+ + (width >> 1);
destystride = (width)*3;
destccstride = (width>>1);
- for(j=0; j<(height/4); j++)
- {
- for(i=0;i<(width/2);i++)
- {
+ for (j = 0; j < (height / 4); j++) {
+ for (i = 0; i < (width / 2); i++) {
(*ptrdesty1++) = (*ptrsrcy1);
(*ptrdesty2++) = (*ptrsrcy2);
(*ptrdesty3++) = (*ptrsrcy3);
}
}
-static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest, int width, int height)
+static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
+ int width, int height)
{
int i, j;
ptrdesty1 = bufdest;
ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width*2;
- ptrdesty4 = bufdest + width*3;
+ ptrdesty3 = bufdest + width * 2;
+ ptrdesty4 = bufdest + width * 3;
- ptrdestcb1 = bufdest + width*height;
- ptrdestcb2 = bufdest + width*height + (width>>1);
+ ptrdestcb1 = bufdest + width * height;
+ ptrdestcb2 = bufdest + width * height + (width >> 1);
- ptrdestcr1 = bufdest + width*height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width*height + ((width*height) >> 2) + (width>>1);
+ ptrdestcr1 = bufdest + width * height + ((width*height) >> 2);
+ ptrdestcr2 = bufdest + width * height + ((width*height) >> 2)
+ + (width >> 1);
destystride = (width)*3;
destccstride = (width>>1);
- for(j=0; j<(height/4); j++)
- {
- for(i=0;i<(width/2);i++)
- {
+ for (j = 0; j < (height / 4); j++) {
+ for (i = 0; i < (width / 2); i++) {
(*ptrdesty1++) = (*ptrsrcy1++);
(*ptrdesty2++) = (*ptrsrcy2++);
}
-static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest, int width, int height)
+static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
+ int width, int height)
{
int i, j;
/* Source*/
unsigned char *ptrsrcy1, *ptrsrcy2;
unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1, *ptrsrccb2;
- unsigned char *ptrsrccb3, *ptrsrccb4;
- unsigned char *ptrsrccr1, *ptrsrccr2;
- unsigned char *ptrsrccr3, *ptrsrccr4;
+ unsigned char *ptrsrccb1;
+ unsigned char *ptrsrccb3;
+ unsigned char *ptrsrccr1;
+ unsigned char *ptrsrccr3;
int srcystride, srcccstride;
ptrsrcy1 = bufsrc ;
- ptrsrcy2 = bufsrc + (width<<1) ;
- ptrsrcy3 = bufsrc + (width<<1)*2 ;
- ptrsrcy4 = bufsrc + (width<<1)*3 ;
+ ptrsrcy2 = bufsrc + (width << 1);
+ ptrsrcy3 = bufsrc + (width << 1) * 2;
+ ptrsrcy4 = bufsrc + (width << 1) * 3;
ptrsrccb1 = bufsrc + 1;
- ptrsrccb2 = bufsrc + (width<<1) + 1;
- ptrsrccb3 = bufsrc + (width<<1)*2 + 1;
- ptrsrccb4 = bufsrc + (width<<1)*3 + 1;
+ ptrsrccb3 = bufsrc + (width << 1) * 2 + 1;
ptrsrccr1 = bufsrc + 3;
- ptrsrccr2 = bufsrc + (width<<1) + 3;
- ptrsrccr3 = bufsrc + (width<<1)*2 + 3;
- ptrsrccr4 = bufsrc + (width<<1)*3 + 3;
+ ptrsrccr3 = bufsrc + (width << 1) * 2 + 3;
- srcystride = (width<<1)*3;
- srcccstride = (width<<1)*3;
+ srcystride = (width << 1) * 3;
+ srcccstride = (width << 1) * 3;
/* Destination */
unsigned char *ptrdesty1, *ptrdesty2;
ptrdesty1 = bufdest;
ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width*2;
- ptrdesty4 = bufdest + width*3;
+ ptrdesty3 = bufdest + width * 2;
+ ptrdesty4 = bufdest + width * 3;
- ptrdestcb1 = bufdest + width*height;
- ptrdestcb2 = bufdest + width*height + (width>>1);
+ ptrdestcb1 = bufdest + width * height;
+ ptrdestcb2 = bufdest + width * height + (width >> 1);
- ptrdestcr1 = bufdest + width*height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width*height + ((width*height) >> 2) + (width>>1);
+ ptrdestcr1 = bufdest + width * height + ((width * height) >> 2);
+ ptrdestcr2 = bufdest + width * height + ((width * height) >> 2)
+ + (width >> 1);
- destystride = (width)*3;
- destccstride = (width>>1);
+ destystride = width * 3;
+ destccstride = (width >> 1);
- for(j=0; j<(height/4); j++)
- {
- for(i=0;i<(width/2);i++)
- {
+ for (j = 0; j < (height / 4); j++) {
+ for (i = 0; i < (width / 2); i++) {
(*ptrdesty1++) = (*ptrsrcy1);
(*ptrdesty2++) = (*ptrsrcy2);
(*ptrdesty3++) = (*ptrsrcy3);
#define V4L2_CTRL_CLASS_USER 0x00980000
#define V4L2_CID_BASE (V4L2_CTRL_CLASS_USER | 0x900)
-#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE+0)
-#define V4L2_CID_CONTRAST (V4L2_CID_BASE+1)
-#define V4L2_CID_SATURATION (V4L2_CID_BASE+2)
-#define V4L2_CID_SHARPNESS (V4L2_CID_BASE+27)
+#define V4L2_CID_BRIGHTNESS (V4L2_CID_BASE + 0)
+#define V4L2_CID_CONTRAST (V4L2_CID_BASE + 1)
+#define V4L2_CID_SATURATION (V4L2_CID_BASE + 2)
+#define V4L2_CID_SHARPNESS (V4L2_CID_BASE + 27)
typedef struct tagMaruCamConvertPixfmt {
uint32_t fmt; /* fourcc */
#define MARUCAM_CTRL_VALUE_MID 10
#define MARUCAM_CTRL_VALUE_STEP 1
+enum {
+ _MC_THREAD_PAUSED,
+ _MC_THREAD_STREAMON,
+ _MC_THREAD_STREAMOFF,
+};
+
#if 0
struct marucam_qctrl {
uint32_t id;
static struct marucam_qctrl qctrl_tbl[] = {
{ V4L2_CID_BRIGHTNESS, 0, },
{ V4L2_CID_CONTRAST, 0, },
- { V4L2_CID_SATURATION,0, },
+ { V4L2_CID_SATURATION, 0, },
{ V4L2_CID_SHARPNESS, 0, },
};
#endif
-static MaruCamState *g_state = NULL;
+static MaruCamState *g_state;
-static uint32_t ready_count = 0;
-static uint32_t cur_fmt_idx = 0;
-static uint32_t cur_frame_idx = 0;
-static void *grab_buf;
+static uint32_t ready_count;
+static uint32_t cur_fmt_idx;
+static uint32_t cur_frame_idx;
/***********************************
* Mac camera helper functions
***********************************/
-/* Returns current time in microseconds. */
-static uint64_t camera_get_timestamp(void)
-{
- struct timeval t;
- t.tv_sec = t.tv_usec = 0;
- gettimeofday(&t, NULL);
- return (uint64_t)t.tv_sec * 1000000LL + t.tv_usec;
-}
-
-/* Sleeps for the given amount of milliseconds */
-static void camera_sleep(int millisec)
-{
- struct timeval t;
- const uint64_t wake_at = camera_get_timestamp() + (uint64_t)millisec * 1000;
- do {
- const uint64_t stamp = camera_get_timestamp();
- if ((stamp / 1000) >= (wake_at / 1000)) {
- break;
- }
- t.tv_sec = (wake_at - stamp) / 1000000;
- t.tv_usec = (wake_at - stamp) - (uint64_t)t.tv_sec * 1000000;
- } while (select(0, NULL, NULL, NULL, &t) < 0 && errno == EINTR);
-}
-
-// Convert Core Video format to FOURCC
+/* Convert Core Video format to FOURCC */
static uint32_t corevideo_to_fourcc(uint32_t cv_pix_fmt)
{
- //INFO("%s, cv_pix_fmt:%d, %s\n", __FUNCTION__, cv_pix_fmt, (const char*)&cv_pix_fmt);
switch (cv_pix_fmt) {
- case kCVPixelFormatType_420YpCbCr8Planar:
- return V4L2_PIX_FMT_YVU420;
- case kCVPixelFormatType_422YpCbCr8:
- return V4L2_PIX_FMT_UYVY;
- case kCVPixelFormatType_422YpCbCr8_yuvs:
- return V4L2_PIX_FMT_YUYV;
- case kCVPixelFormatType_32ARGB:
- case kCVPixelFormatType_32RGBA:
- return V4L2_PIX_FMT_RGB32;
- case kCVPixelFormatType_32BGRA:
- case kCVPixelFormatType_32ABGR:
- return V4L2_PIX_FMT_BGR32;
- case kCVPixelFormatType_24RGB:
- return V4L2_PIX_FMT_RGB24;
- case kCVPixelFormatType_24BGR:
- return V4L2_PIX_FMT_BGR32;
- default:
- ERR("Unknown pixel format '%.4s'", (const char*)&cv_pix_fmt);
- return 0;
+ case kCVPixelFormatType_420YpCbCr8Planar:
+ return V4L2_PIX_FMT_YVU420;
+ case kCVPixelFormatType_422YpCbCr8:
+ return V4L2_PIX_FMT_UYVY;
+ case kCVPixelFormatType_422YpCbCr8_yuvs:
+ return V4L2_PIX_FMT_YUYV;
+ case kCVPixelFormatType_32ARGB:
+ case kCVPixelFormatType_32RGBA:
+ return V4L2_PIX_FMT_RGB32;
+ case kCVPixelFormatType_32BGRA:
+ case kCVPixelFormatType_32ABGR:
+ return V4L2_PIX_FMT_BGR32;
+ case kCVPixelFormatType_24RGB:
+ return V4L2_PIX_FMT_RGB24;
+ case kCVPixelFormatType_24BGR:
+ return V4L2_PIX_FMT_BGR32;
+ default:
+ ERR("Unknown pixel format '%.4s'", (const char *)&cv_pix_fmt);
+ return 0;
}
}
uint32_t bytesperline;
switch (pixfmt) {
- case V4L2_PIX_FMT_YUV420:
- case V4L2_PIX_FMT_YVU420:
- bytesperline = (width * 12) >> 3;
- break;
- case V4L2_PIX_FMT_YUYV:
- case V4L2_PIX_FMT_UYVY:
- default:
- bytesperline = width * 2;
- break;
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_YVU420:
+ bytesperline = (width * 12) >> 3;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ case V4L2_PIX_FMT_UYVY:
+ default:
+ bytesperline = width * 2;
+ break;
}
return bytesperline;
static uint32_t get_sizeimage(uint32_t pixfmt, uint32_t width, uint32_t height)
{
- return (get_bytesperline(pixfmt, width) * height);
+ return get_bytesperline(pixfmt, width) * height;
}
-///////////////////////////////////////////////////////////////////
-// Maru Camera Implementation
-///////////////////////////////////////////////////////////////////
+/******************************************************************
+ ** Maru Camera Implementation
+ *****************************************************************/
@interface MaruCameraDriver : NSObject {
QTCaptureSession *mCaptureSession;
QTCaptureDeviceInput *mCaptureVideoDeviceInput;
QTCaptureVideoPreviewOutput *mCaptureVideoPreviewOutput;
- CVImageBufferRef mCurrentImageBuffer;
- CVImageBufferRef imageBuffer;
+ CVImageBufferRef mCurrentImageBuffer;
BOOL mDeviceIsOpened;
BOOL mCaptureIsStarted;
}
-- (MaruCameraDriver*)init;
+- (MaruCameraDriver *)init;
- (int)startCapture:(int)width:(int)height;
-- (int)stopCapture;
-- (int)readFrame:(void*)video_buf;
+- (void)stopCapture;
+- (int)readFrame:(void *)video_buf;
- (int)setCaptureFormat:(int)width:(int)height:(int)pix_format;
- (int)getCaptureFormat:(int)width:(int)height:(int)pix_format;
- (BOOL)deviceStatus;
@implementation MaruCameraDriver
-- (MaruCameraDriver*)init
+- (MaruCameraDriver *)init
{
- BOOL success = NO;
- NSError *error;
+ BOOL success = NO;
+ NSError *error;
mDeviceIsOpened = NO;
mCaptureIsStarted = NO;
- NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
+ NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
- // Create the capture session
+ /* Create the capture session */
mCaptureSession = [[QTCaptureSession alloc] init];
- // Find a video device
+ /* Find a video device */
QTCaptureDevice *videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
success = [videoDevice open:&error];
- // If a video input device can't be found or opened, try to find and open a muxed input device
- if (!success) {
+ /* If a video input device can't be found or opened, try to find and open a muxed input device */
+ if (!success) {
videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed];
- success = [videoDevice open:&error];
+ success = [videoDevice open:&error];
[pool release];
return nil;
}
}
if (videoDevice) {
- // Add the video device to the session as a device input
- mCaptureVideoDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:videoDevice];
+ /* Add the video device to the session as a device input */
+ mCaptureVideoDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:videoDevice];
success = [mCaptureSession addInput:mCaptureVideoDeviceInput error:&error];
if (!success) {
[pool release];
return nil;
- }
+ }
mCaptureVideoPreviewOutput = [[QTCaptureVideoPreviewOutput alloc] init];
success = [mCaptureSession addOutput:mCaptureVideoPreviewOutput error:&error];
if ([mCaptureSession isRunning]) {
while(!mCaptureIsStarted) {
- // Wait Until Capture is started
+ /* Wait Until Capture is started */
[[NSRunLoop currentRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.5]];
}
ret = 0;
return ret;
}
-- (int)stopCapture
+- (void)stopCapture
{
if ([mCaptureSession isRunning]) {
[mCaptureSession stopRunning];
+ while([mCaptureSession isRunning]) {
+ /* Wait Until Capture is stopped */
+ [[NSRunLoop currentRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.1]];
+ }
+
}
+ mCaptureIsStarted = NO;
}
-- (int)readFrame:(void*)video_buf
+- (int)readFrame:(void *)video_buf
{
- NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
+ NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
@synchronized (self) {
+ if (mCaptureIsStarted == NO) {
+ [pool release];
+ return 0;
+ }
if (mCurrentImageBuffer != nil) {
CVPixelBufferLockBaseAddress(mCurrentImageBuffer, 0);
const uint32_t pixel_format = corevideo_to_fourcc(CVPixelBufferGetPixelFormatType(mCurrentImageBuffer));
const int frame_width = CVPixelBufferGetWidth(mCurrentImageBuffer);
const int frame_height = CVPixelBufferGetHeight(mCurrentImageBuffer);
- const size_t frame_size =CVPixelBufferGetBytesPerRow(mCurrentImageBuffer) * frame_height;
- const void* frame_pixels = CVPixelBufferGetBaseAddress(mCurrentImageBuffer);
+ const size_t frame_size = CVPixelBufferGetBytesPerRow(mCurrentImageBuffer) * frame_height;
+ const void *frame_pixels = CVPixelBufferGetBaseAddress(mCurrentImageBuffer);
- /*
- INFO("buffer(%p), pixel_format(%d,%.4s), frame_width(%d), frame_height(%d), frame_size(%d)\n",
- mCurrentImageBuffer, (int)pixel_format, (const char*)&pixel_format,
- frame_width, frame_height, (int)frame_size);
- */
+ TRACE("buffer(%p), pixel_format(%d,%.4s), frame_width(%d), "
+ "frame_height(%d), frame_size(%d)\n",
+ mCurrentImageBuffer, (int)pixel_format,
+ (const char *)&pixel_format, frame_width,
+ frame_height, (int)frame_size);
- // convert frame to v4l2 format
+ /* convert frame to v4l2 format */
convert_frame(pixel_format, frame_width, frame_height,
- frame_size, (void*)frame_pixels, video_buf);
+ frame_size, (void *)frame_pixels, video_buf);
CVPixelBufferUnlockBaseAddress(mCurrentImageBuffer, 0);
[pool release];
- return 0;
+ return 1;
}
}
- (void)dealloc
{
- [mCaptureSession release];
- [mCaptureVideoDeviceInput release];
+ [mCaptureSession release];
+ [mCaptureVideoDeviceInput release];
[mCaptureVideoPreviewOutput release];
[super dealloc];
}
@end
-///////////////////////////////////////////////////////////////////
-// Maru Camera APIs
-///////////////////////////////////////////////////////////////////
+/******************************************************************
+ ** Maru Camera APIs
+ *****************************************************************/
typedef struct MaruCameraDevice MaruCameraDevice;
struct MaruCameraDevice {
/* Golbal representation of the Maru camera */
MaruCameraDevice *mcd = NULL;
+static int is_streamon()
+{
+ int st;
+ qemu_mutex_lock(&g_state->thread_mutex);
+ st = g_state->streamon;
+ qemu_mutex_unlock(&g_state->thread_mutex);
+ return (st == _MC_THREAD_STREAMON);
+}
+
static void __raise_err_intr()
{
qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon) {
+ if (g_state->streamon == _MC_THREAD_STREAMON) {
g_state->req_frame = 0; /* clear request */
g_state->isr = 0x08; /* set a error flag of rasing a interrupt */
qemu_bh_schedule(g_state->tx_bh);
static int marucam_device_read_frame()
{
int ret;
- void* tmp_buf;
+ void *tmp_buf;
qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon) {
+ if (g_state->streamon == _MC_THREAD_STREAMON) {
+#if 0
if (ready_count < MARUCAM_SKIPFRAMES) {
/* skip a frame cause first some frame are distorted */
++ready_count;
- INFO("Skip %d frame\n", ready_count);
+ TRACE("Skip %d frame\n", ready_count);
qemu_mutex_unlock(&g_state->thread_mutex);
return 0;
}
-
+#endif
if (g_state->req_frame == 0) {
- //INFO("There is no request\n");
+ TRACE("There is no request\n");
qemu_mutex_unlock(&g_state->thread_mutex);
return 0;
}
- // Grab the camera frame into temp buffer
- ret = [mcd->driver readFrame: grab_buf];
+ /* Grab the camera frame into temp buffer */
+ tmp_buf = g_state->vaddr + g_state->buf_size * (g_state->req_frame - 1);
+ ret = [mcd->driver readFrame: tmp_buf];
if (ret < 0) {
- INFO("%s, Capture error\n", __FUNCTION__);
+ ERR("%s, Capture error\n", __func__);
qemu_mutex_unlock(&g_state->thread_mutex);
__raise_err_intr();
return -1;
+ } else if (!ret) {
+ qemu_mutex_unlock(&g_state->thread_mutex);
+ return 0;
}
- tmp_buf = g_state->vaddr + g_state->buf_size * (g_state->req_frame - 1);
- memcpy(tmp_buf, grab_buf, g_state->buf_size);
- g_state->req_frame = 0; // clear request
- g_state->isr |= 0x01; // set a flag of rasing a interrupt
+ g_state->req_frame = 0; /* clear request */
+ g_state->isr |= 0x01; /* set a flag of rasing a interrupt */
qemu_bh_schedule(g_state->tx_bh);
+ } else {
+ qemu_mutex_unlock(&g_state->thread_mutex);
+ return -1;
}
qemu_mutex_unlock(&g_state->thread_mutex);
return 0;
{
while (1) {
qemu_mutex_lock(&g_state->thread_mutex);
+ g_state->streamon = _MC_THREAD_PAUSED;
qemu_cond_wait(&g_state->thread_cond, &g_state->thread_mutex);
qemu_mutex_unlock(&g_state->thread_mutex);
break;
}
- //convert_trial = 10;
ready_count = 0;
+ qemu_mutex_lock(&g_state->thread_mutex);
+ g_state->streamon = _MC_THREAD_STREAMON;
+ qemu_mutex_unlock(&g_state->thread_mutex);
+ INFO("Streaming on ......\n");
/* Loop: capture frame -> convert format -> render to screen */
while (1) {
- if (g_state->streamon) {
+ if (is_streamon()) {
if (marucam_device_read_frame() < 0) {
INFO("Streaming is off ...\n");
break;
+ } else {
+ /* wait until next frame is avalilable */
+ usleep(22000);
}
} else {
INFO("Streaming is off ...\n");
int marucam_device_check(int log_flag)
{
- // FIXME: check the device parameters
+ /* FIXME: check the device parameters */
INFO("Checking camera device\n");
return 1;
}
* MARU camera routines
**********************************************/
-void marucam_device_init(MaruCamState* state)
+void marucam_device_init(MaruCamState *state)
{
g_state = state;
g_state->destroying = false;
- qemu_thread_create(&state->thread_id, marucam_worker_thread, (void*)g_state, QEMU_THREAD_JOINABLE);
+ qemu_thread_create(&state->thread_id, marucam_worker_thread,
+ NULL, QEMU_THREAD_JOINABLE);
}
void marucam_device_exit(MaruCamState *state)
}
/* MARUCAM_CMD_OPEN */
-void marucam_device_open(MaruCamState* state)
+void marucam_device_open(MaruCamState *state)
{
MaruCamParam *param = state->param;
param->top = 0;
- mcd = (MaruCameraDevice*)malloc(sizeof(MaruCameraDevice));
+ mcd = (MaruCameraDevice *)malloc(sizeof(MaruCameraDevice));
if (mcd == NULL) {
- ERR("%s: MaruCameraDevice allocate failed\n", __FUNCTION__);
+ ERR("%s: MaruCameraDevice allocate failed\n", __func__);
+ param->errCode = EINVAL;
return;
}
memset(mcd, 0, sizeof(MaruCameraDevice));
mcd->driver = [[MaruCameraDriver alloc] init];
if (mcd->driver == nil) {
- INFO("Camera device open failed\n");
+ ERR("Camera device open failed\n");
+ [mcd->driver dealloc];
+ free(mcd);
+ param->errCode = EINVAL;
return;
}
INFO("Camera opened!\n");
}
/* MARUCAM_CMD_CLOSE */
-void marucam_device_close(MaruCamState* state)
+void marucam_device_close(MaruCamState *state)
{
MaruCamParam *param = state->param;
param->top = 0;
free(mcd);
}
- //marucam_reset_controls();
+ /* marucam_reset_controls(); */
INFO("Camera closed\n");
}
/* MARUCAM_CMD_START_PREVIEW */
-void marucam_device_start_preview(MaruCamState* state)
+void marucam_device_start_preview(MaruCamState *state)
{
uint32_t width, height, pixfmt;
MaruCamParam *param = state->param;
param->top = 0;
- //ready_count = 0;
width = supported_dst_frames[cur_frame_idx].width;
height = supported_dst_frames[cur_frame_idx].height;
pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
state->buf_size = get_sizeimage(pixfmt, width, height);
- /*INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u), frame idx(%d), fmt idx(%d)\n",
+ INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u), frame idx(%d), fmt idx(%d)\n",
(char)(pixfmt), (char)(pixfmt >> 8),
(char)(pixfmt >> 16), (char)(pixfmt >> 24),
width, height, state->buf_size,
cur_frame_idx, cur_fmt_idx);
- */
if (mcd->driver == nil) {
- ERR("%s: Start capture failed: vaild device", __FUNCTION__);
+ ERR("%s: Start capture failed: vaild device", __func__);
+ param->errCode = EINVAL;
return;
}
+
+ INFO("Starting preview ...\n");
[mcd->driver startCapture: width: height];
- if (grab_buf) {
- g_free(grab_buf);
- grab_buf = NULL;
- }
- grab_buf = (void *)malloc(state->buf_size);
- if (grab_buf == NULL) {
- param->errCode = ENOMEM;
- return;
- }
-
- // Enable the condition to capture frames now
+ /* Enable the condition to capture frames now */
qemu_mutex_lock(&state->thread_mutex);
- state->streamon = 1;
qemu_cond_signal(&state->thread_cond);
qemu_mutex_unlock(&state->thread_mutex);
- INFO("Starting preview ...\n");
+ while (!is_streamon()) {
+ usleep(10000);
+ }
}
/* MARUCAM_CMD_STOP_PREVIEW */
-void marucam_device_stop_preview(MaruCamState* state)
+void marucam_device_stop_preview(MaruCamState *state)
{
MaruCamParam *param = state->param;
param->top = 0;
- INFO("Stopping preview ...\n");
+ if (is_streamon()) {
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = _MC_THREAD_STREAMOFF;
+ qemu_mutex_unlock(&state->thread_mutex);
+
+ while (is_streamon()) {
+ usleep(10000);
+ }
+ }
+
if (mcd->driver != nil) {
[mcd->driver stopCapture];
}
- qemu_mutex_lock(&state->thread_mutex);
- state->streamon = 0;
- qemu_mutex_unlock(&state->thread_mutex);
-
- if (grab_buf) {
- g_free(grab_buf);
- grab_buf = NULL;
- }
state->buf_size = 0;
+ INFO("Stopping preview ...\n");
}
/* MARUCAM_CMD_S_PARAM */
-void marucam_device_s_param(MaruCamState* state)
+void marucam_device_s_param(MaruCamState *state)
{
MaruCamParam *param = state->param;
}
/* MARUCAM_CMD_G_PARAM */
-void marucam_device_g_param(MaruCamState* state)
+void marucam_device_g_param(MaruCamState *state)
{
MaruCamParam *param = state->param;
/* We use default FPS of the webcam
}
/* MARUCAM_CMD_S_FMT */
-void marucam_device_s_fmt(MaruCamState* state)
+void marucam_device_s_fmt(MaruCamState *state)
{
MaruCamParam *param = state->param;
uint32_t width, height, pixfmt, pidx, fidx;
height = param->stack[1];
pixfmt = param->stack[2];
- INFO("Set format: width(%d), height(%d), pixfmt(%d, %.4s)\n",
+ TRACE("Set format: width(%d), height(%d), pixfmt(%d, %.4s)\n",
width, height, pixfmt, (const char*)&pixfmt);
for (fidx = 0; fidx < ARRAY_SIZE(supported_dst_frames); fidx++) {
for (pidx = 0; pidx < ARRAY_SIZE(supported_dst_pixfmts); pidx++) {
if (supported_dst_pixfmts[pidx].fmt == pixfmt) {
- INFO("pixfmt index is match: %d\n", pidx);
+ TRACE("pixfmt index is match: %d\n", pidx);
break;
}
}
(supported_dst_frames[cur_frame_idx].height != height)) {
if (mcd->driver == nil || [mcd->driver setCaptureFormat: width: height: 0] < 0) {
ERR("Set pixel format failed\n");
+ param->errCode = EINVAL;
return;
}
- INFO("cur_frame_idx:%d, supported_dst_frames[cur_frame_idx].width:%d\n",
+ TRACE("cur_frame_idx:%d, supported_dst_frames[cur_frame_idx].width:%d\n",
cur_frame_idx, supported_dst_frames[cur_frame_idx].width);
}
param->stack[6] = 0;
param->stack[7] = 0;
- INFO("Set device pixel format ...\n");
+ TRACE("Set device pixel format ...\n");
}
/* MARUCAM_CMD_G_FMT */
-void marucam_device_g_fmt(MaruCamState* state)
+void marucam_device_g_fmt(MaruCamState *state)
{
uint32_t width, height, pixfmt;
MaruCamParam *param = state->param;
param->stack[6] = 0;
param->stack[7] = 0;
- INFO("Get device frame format ...\n");
+ TRACE("Get device frame format ...\n");
}
-void marucam_device_try_fmt(MaruCamState* state)
+void marucam_device_try_fmt(MaruCamState *state)
{
- //INFO("Try device frame format, use default setting ...\n");
+ TRACE("Try device frame format, use default setting ...\n");
}
/* Get specific pixelformat description */
-void marucam_device_enum_fmt(MaruCamState* state)
+void marucam_device_enum_fmt(MaruCamState *state)
{
uint32_t index;
MaruCamParam *param = state->param;
param->stack[1] = 0; /* flags = NONE */
param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */
switch (supported_dst_pixfmts[index].fmt) {
- case V4L2_PIX_FMT_YUYV:
- memcpy(¶m->stack[3], "YUYV", 32);
- break;
- case V4L2_PIX_FMT_UYVY:
- memcpy(¶m->stack[3], "UYVY", 32);
- break;
- case V4L2_PIX_FMT_YUV420:
- memcpy(¶m->stack[3], "YU12", 32);
- break;
- case V4L2_PIX_FMT_YVU420:
- memcpy(¶m->stack[3], "YV12", 32);
- break;
- default:
- param->errCode = EINVAL;
- break;
+ case V4L2_PIX_FMT_YUYV:
+ memcpy(¶m->stack[3], "YUYV", 32);
+ break;
+ case V4L2_PIX_FMT_UYVY:
+ memcpy(¶m->stack[3], "UYVY", 32);
+ break;
+ case V4L2_PIX_FMT_YUV420:
+ memcpy(¶m->stack[3], "YU12", 32);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ memcpy(¶m->stack[3], "YV12", 32);
+ break;
+ default:
+ param->errCode = EINVAL;
+ break;
}
}
/*
* QTKit don't support setting brightness, contrast, saturation & sharpness
*/
-void marucam_device_qctrl(MaruCamState* state)
+void marucam_device_qctrl(MaruCamState *state)
{
uint32_t id, i;
- //long property, min, max, step, def_val, set_val;
+ /* long property, min, max, step, def_val, set_val; */
char name[32] = {0,};
MaruCamParam *param = state->param;
id = param->stack[0];
switch (id) {
- case V4L2_CID_BRIGHTNESS:
- INFO("V4L2_CID_BRIGHTNESS\n");
- memcpy((void*)name, (void*)"brightness", 32);
- i = 0;
- break;
- case V4L2_CID_CONTRAST:
- INFO("V4L2_CID_CONTRAST\n");
- memcpy((void*)name, (void*)"contrast", 32);
- i = 1;
- break;
- case V4L2_CID_SATURATION:
- INFO("V4L2_CID_SATURATION\n");
- memcpy((void*)name, (void*)"saturation", 32);
- i = 2;
- break;
- case V4L2_CID_SHARPNESS:
- INFO("V4L2_CID_SHARPNESS\n");
- memcpy((void*)name, (void*)"sharpness", 32);
- i = 3;
- break;
- default:
- param->errCode = EINVAL;
- return;
+ case V4L2_CID_BRIGHTNESS:
+ TRACE("V4L2_CID_BRIGHTNESS\n");
+ memcpy((void *)name, (void *)"brightness", 32);
+ i = 0;
+ break;
+ case V4L2_CID_CONTRAST:
+ TRACE("V4L2_CID_CONTRAST\n");
+ memcpy((void *)name, (void *)"contrast", 32);
+ i = 1;
+ break;
+ case V4L2_CID_SATURATION:
+ TRACE("V4L2_CID_SATURATION\n");
+ memcpy((void *)name, (void *)"saturation", 32);
+ i = 2;
+ break;
+ case V4L2_CID_SHARPNESS:
+ TRACE("V4L2_CID_SHARPNESS\n");
+ memcpy((void *)name, (void *)"sharpness", 32);
+ i = 3;
+ break;
+ default:
+ param->errCode = EINVAL;
+ return;
}
param->stack[0] = id;
param->stack[4] = MARUCAM_CTRL_VALUE_MID; /* default_value */
param->stack[5] = V4L2_CTRL_FLAG_SLIDER;
/* name field setting */
- memcpy(¶m->stack[6], (void*)name, sizeof(name)/sizeof(name[0]));
+ memcpy(¶m->stack[6], (void *)name, sizeof(name)/sizeof(name[0]));
}
-void marucam_device_s_ctrl(MaruCamState* state)
+void marucam_device_s_ctrl(MaruCamState *state)
{
INFO("Set control\n");
}
-void marucam_device_g_ctrl(MaruCamState* state)
+void marucam_device_g_ctrl(MaruCamState *state)
{
INFO("Get control\n");
}
/* Get frame width & height */
-void marucam_device_enum_fsizes(MaruCamState* state)
+void marucam_device_enum_fsizes(MaruCamState *state)
{
uint32_t index, pixfmt, i;
MaruCamParam *param = state->param;
return;
}
for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
- if (supported_dst_pixfmts[i].fmt == pixfmt)
+ if (supported_dst_pixfmts[i].fmt == pixfmt) {
break;
+ }
}
if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
param->stack[1] = supported_dst_frames[index].height;
}
-void marucam_device_enum_fintv(MaruCamState* state)
+void marucam_device_enum_fintv(MaruCamState *state)
{
MaruCamParam *param = state->param;
param->top = 0;
/* switch by index(param->stack[0]) */
switch (param->stack[0]) {
- case 0:
- param->stack[1] = 30; /* denominator */
- break;
- default:
- param->errCode = EINVAL;
- return;
+ case 0:
+ param->stack[1] = 30; /* denominator */
+ break;
+ default:
+ param->errCode = EINVAL;
+ return;
}
param->stack[0] = 1; /* numerator */
}