MULTI_DEBUG_CHANNEL(tizen, camera);
-static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- uint32_t width, uint32_t height);
-static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- uint32_t width, uint32_t height);
static void yuyv_to_yuv420(const unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height, uint32_t yvu);
static void rgb24_to_yuv420(const unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height);
static void yuv420_to_yuyv(unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height);
+static void uyvy_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, bool is_yvu);
+static void uyvy_to_yuyv(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height);
uint32_t get_bytesperline(uint32_t pixfmt, uint32_t width)
{
case V4L2_PIX_FMT_YUV420:
switch (dst_format) {
case V4L2_PIX_FMT_YUV420:
- memcpy(dst_buf, (void *)src_buf, (size_t)frame_size);
+ memcpy(dst_buf, src_buf, frame_size);
break;
case V4L2_PIX_FMT_YVU420:
yuv420_to_yvu420(src_buf, dst_buf, width, height);
yuv420_to_yuyv(src_buf, dst_buf, width, height);
break;
default:
- ERR("Cannot convert from the pixel format (%.4s)...\n",
- (const char *)&src_format);
- return 1;
- }
- break;
- case V4L2_PIX_FMT_YVU420:
- switch (dst_format) {
- case V4L2_PIX_FMT_YUV420:
- YVU420ToYUV420(src_buf, dst_buf, width, height);
- break;
- default:
- ERR("Cannot convert from the pixel format (%.4s)...\n",
- (const char *)&src_format);
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
return 1;
}
break;
yuyv_to_yuv420(src_buf, dst_buf, width, height, 1);
break;
case V4L2_PIX_FMT_YUYV:
- memcpy(dst_buf, (void *)src_buf, (size_t)frame_size);
+ memcpy(dst_buf, src_buf, frame_size);
break;
default:
- ERR("Cannot convert from the pixel format (%.4s)...\n",
- (const char *)&src_format);
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
return 1;
}
break;
case V4L2_PIX_FMT_UYVY: /* Mac default format */
switch (dst_format) {
case V4L2_PIX_FMT_YUV420:
- UYVYToYUV420(src_buf, dst_buf, width, height);
+ uyvy_to_yuv420(src_buf, dst_buf, width, height, 0);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ uyvy_to_yuv420(src_buf, dst_buf, width, height, 1);
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ uyvy_to_yuyv(src_buf, dst_buf, width, height);
break;
default:
- ERR("Cannot convert from the pixel format (%.4s)...\n",
- (const char *)&src_format);
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
return 1;
}
break;
rgb24_to_yuyv(src_buf, dst_buf, width, height, reverse);
break;
default:
- ERR("Cannot convert from the pixel format (%.4s)...\n",
- (const char *)&src_format);
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
return 1;
}
break;
default:
- ERR("Cannot convert from the pixel format (%.4s)...\n",
- (const char *)&src_format);
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
return 1;
}
return 0;
}
-static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- uint32_t width, uint32_t height)
-{
- uint32_t i, j;
-
- /* Source */
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1;
- unsigned char *ptrsrccb3;
- unsigned char *ptrsrccr1;
- unsigned char *ptrsrccr3;
- uint32_t srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc + 1;
- ptrsrcy2 = bufsrc + (width << 1) + 1;
- ptrsrcy3 = bufsrc + (width << 1) * 2 + 1;
- ptrsrcy4 = bufsrc + (width << 1) * 3 + 1;
-
- ptrsrccb1 = bufsrc;
- ptrsrccb3 = bufsrc + (width << 1) * 2;
-
- ptrsrccr1 = bufsrc + 2;
- ptrsrccr3 = bufsrc + (width << 1) * 2 + 2;
-
- srcystride = (width << 1) * 3;
- srcccstride = (width << 1) * 3;
-
- /* Destination */
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- uint32_t destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width * 2;
- ptrdesty4 = bufdest + width * 3;
-
- ptrdestcb1 = bufdest + width * height;
- ptrdestcb2 = bufdest + width * height + (width >> 1);
-
- ptrdestcr1 = bufdest + width * height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width * height + ((width*height) >> 2)
- + (width >> 1);
-
- destystride = (width)*3;
- destccstride = (width>>1);
-
- for (j = 0; j < (height / 4); j++) {
- for (i = 0; i < (width / 2); i++) {
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdestcb1++) = (*ptrsrccb1);
- (*ptrdestcb2++) = (*ptrsrccb3);
-
- ptrsrccb1 += 4;
- ptrsrccb3 += 4;
-
- (*ptrdestcr1++) = (*ptrsrccr1);
- (*ptrdestcr2++) = (*ptrsrccr3);
-
- ptrsrccr1 += 4;
- ptrsrccr3 += 4;
-
- }
-
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
-
- ptrsrccb1 += srcccstride;
- ptrsrccb3 += srcccstride;
-
- ptrsrccr1 += srcccstride;
- ptrsrccr3 += srcccstride;
-
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
-
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
-
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
- }
-}
-
-static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- uint32_t width, uint32_t height)
-{
- uint32_t i, j;
-
- /* Source*/
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1, *ptrsrccb2;
- unsigned char *ptrsrccr1, *ptrsrccr2;
- uint32_t srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc;
- ptrsrcy2 = bufsrc + width;
- ptrsrcy3 = bufsrc + width*2;
- ptrsrcy4 = bufsrc + width*3;
-
- ptrsrccr1 = bufsrc + width*height;
- ptrsrccr2 = bufsrc + width*height + (width>>1);
-
- ptrsrccb1 = bufsrc + width*height + ((width*height) >> 2);
- ptrsrccb2 = bufsrc + width*height + ((width*height) >> 2) + (width>>1);
-
- srcystride = (width)*3;
- srcccstride = (width>>1);
-
- /* Destination */
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- uint32_t destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width * 2;
- ptrdesty4 = bufdest + width * 3;
-
- ptrdestcb1 = bufdest + width * height;
- ptrdestcb2 = bufdest + width * height + (width >> 1);
-
- ptrdestcr1 = bufdest + width * height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width * height + ((width*height) >> 2)
- + (width >> 1);
-
- destystride = (width)*3;
- destccstride = (width>>1);
-
- for (j = 0; j < (height / 4); j++) {
- for (i = 0; i < (width / 2); i++) {
-
- (*ptrdesty1++) = (*ptrsrcy1++);
- (*ptrdesty2++) = (*ptrsrcy2++);
- (*ptrdesty3++) = (*ptrsrcy3++);
- (*ptrdesty4++) = (*ptrsrcy4++);
- (*ptrdesty1++) = (*ptrsrcy1++);
- (*ptrdesty2++) = (*ptrsrcy2++);
- (*ptrdesty3++) = (*ptrsrcy3++);
- (*ptrdesty4++) = (*ptrsrcy4++);
-
- (*ptrdestcb1++) = (*ptrsrccb1++);
- (*ptrdestcr1++) = (*ptrsrccr1++);
- (*ptrdestcb2++) = (*ptrsrccb2++);
- (*ptrdestcr2++) = (*ptrsrccr2++);
-
- }
-
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
-
- ptrsrccb1 += srcccstride;
- ptrsrccb2 += srcccstride;
-
- ptrsrccr1 += srcccstride;
- ptrsrccr2 += srcccstride;
-
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
-
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
-
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
-
- }
-
-}
-
static void yuyv_to_yuv420(const unsigned char *src, unsigned char *dest,
uint32_t width, uint32_t height, uint32_t yvu)
{
huv++;
}
}
+
+static void uyvy_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, bool is_yvu)
+{
+ uint32_t i, j;
+ const unsigned char *src1;
+ unsigned char *udest, *vdest;
+
+ /* copy the Y values */
+ src1 = src;
+ for (i = 0; i < height; i++) {
+ for (j = 0; j + 1 < width; j += 2) {
+ *dest++ = src1[1];
+ *dest++ = src1[3];
+ src1 += 4;
+ }
+ }
+
+ /* copy the U and V values */
+ src1 = src + width * 2; /* next line */
+ if (is_yvu) {
+ vdest = dest;
+ udest = dest + width * height / 4;
+ } else {
+ udest = dest;
+ vdest = dest + width * height / 4;
+ }
+ for (i = 0; i < height; i += 2) {
+ for (j = 0; j + 1 < width; j += 2) {
+ *udest++ = ((int) src[0] + src1[0]) / 2; /* U */
+ *vdest++ = ((int) src[2] + src1[2]) / 2; /* V */
+ src += 4;
+ src1 += 4;
+ }
+ src = src1;
+ src1 += width * 2;
+ }
+}
+
+static void uyvy_to_yuyv(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height)
+{
+ uint32_t i, j;
+ const unsigned char *src1;
+
+ src1 = src;
+ for (i = 0; i < height; i++) {
+ for (j = 0; j + 1 < width; j += 2) {
+ *dest++ = src1[1];
+ *dest++ = src1[0];
+ *dest++ = src1[3];
+ *dest++ = src1[2];
+ src1 += 4;
+ }
+ }
+}
#import <Cocoa/Cocoa.h>
-#import <QTKit/QTKit.h>
-#import <CoreAudio/CoreAudio.h>
+#import <AVFoundation/AVFoundation.h>
-#include <pthread.h>
#include "qemu-common.h"
#include "maru_camera.h"
#include "maru_camera_convert.h"
static uint32_t support_fmts[] = {
V4L2_PIX_FMT_YUYV,
- V4L2_PIX_FMT_UYVY,
V4L2_PIX_FMT_YUV420,
V4L2_PIX_FMT_YVU420,
};
***********************************/
/* Convert Core Video format to FOURCC */
-static uint32_t corevideo_to_fourcc(uint32_t cv_pix_fmt)
+static uint32_t cv_to_fourcc(uint32_t cv_pix_fmt)
{
switch (cv_pix_fmt) {
case kCVPixelFormatType_420YpCbCr8Planar:
case kCVPixelFormatType_24RGB:
return V4L2_PIX_FMT_RGB24;
case kCVPixelFormatType_24BGR:
- return V4L2_PIX_FMT_BGR32;
+ return V4L2_PIX_FMT_BGR24;
default:
- ERR("Unknown pixel format '%.4s'", (const char *)&cv_pix_fmt);
+ ERR("Unknown pixel format '%.4s'\n", (const char *)&cv_pix_fmt);
return 0;
}
}
** Maru Camera Implementation
*****************************************************************/
-@interface MaruCameraDriver : NSObject {
- QTCaptureSession *mCaptureSession;
- QTCaptureDeviceInput *mCaptureVideoDeviceInput;
- QTCaptureVideoPreviewOutput *mCaptureVideoPreviewOutput;
+@interface MaruCameraDriver : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
+ MaruCamState *mBase;
+
+ AVCaptureDevice *mDevice;
+ AVCaptureSession *mSession;
+ AVCaptureDeviceInput *mInput;
+ AVCaptureVideoDataOutput *mOutput;
+
+ dispatch_queue_t mOutputQueue;
- CVImageBufferRef mCurrentImageBuffer;
- BOOL mDeviceIsOpened;
- BOOL mCaptureIsStarted;
- uint32_t mRequestFormat;
+ uint32_t mFormat;
}
-- (id)init;
-- (int)startCapture:(int)width setHeight:(int)height;
+- (id)init:(MaruCamState *)setBase;
+- (int)startCapture;
- (void)stopCapture;
-- (int)readFrame:(void *)video_buf;
- (int)setCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format;
-- (int)getCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format;
-- (BOOL)deviceStatus;
+- (void)deviceWillClose;
@end
@implementation MaruCameraDriver
-- (id)init
+- (id)init:(MaruCamState *)setBase
{
BOOL success = NO;
- NSError *error;
- mDeviceIsOpened = NO;
- mCaptureIsStarted = NO;
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
+ NSError *error = nil;
- /* Create the capture session */
- mCaptureSession = [[QTCaptureSession alloc] init];
-
- /* Find a video device */
- QTCaptureDevice *videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
- success = [videoDevice open:&error];
-
- /* If a video input device can't be found or opened, try to find and open a muxed input device */
- if (!success) {
- videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed];
- success = [videoDevice open:&error];
- [pool release];
+ self = [super init];
+ if (self) {
+ /* Create the capture session */
+ mSession = [[AVCaptureSession alloc] init];
+ } else {
+ ERR("Failed to initialize NSObject\n");
return nil;
}
- if (!success) {
- videoDevice = nil;
- [pool release];
- return nil;
+ /* Find a video device */
+ mDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
+ if (!mDevice) {
+ /* If a video input device can't be found or opened,
+ try to find and open a muxed input device. */
+ mDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed];
}
- if (videoDevice) {
+ if (mDevice) {
/* Add the video device to the session as a device input */
- mCaptureVideoDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:videoDevice];
- success = [mCaptureSession addInput:mCaptureVideoDeviceInput error:&error];
-
- if (!success) {
- [pool release];
+ mInput = [AVCaptureDeviceInput deviceInputWithDevice:mDevice error:&error];
+ if (mInput == nil) {
+ ERR("Failed to get AVCaptureDeviceInput instance\n");
+ return nil;
+ }
+ success = [mSession canAddInput:mInput];
+ if (success) {
+ [mSession addInput:mInput];
+ } else {
+ ERR("Failed to add AVCaptureInput to the session\n");
return nil;
}
- mCaptureVideoPreviewOutput = [[QTCaptureVideoPreviewOutput alloc] init];
- success = [mCaptureSession addOutput:mCaptureVideoPreviewOutput error:&error];
- if (!success) {
- [pool release];
+ mOutput = [[AVCaptureVideoDataOutput alloc] init];
+ success = [mSession canAddOutput:mOutput];
+ if (success) {
+ [mSession addOutput:mOutput];
+ mOutputQueue = dispatch_queue_create("VideoDataOutputQueue",
+ DISPATCH_QUEUE_SERIAL);
+
+ [mOutput setSampleBufferDelegate:self queue:mOutputQueue];
+ } else {
+ ERR("Failed to add AVCaptureOutput to the session\n");
return nil;
}
- mDeviceIsOpened = YES;
- [mCaptureVideoPreviewOutput setDelegate:self];
+ mBase = setBase;
INFO("Camera session bundling successfully!\n");
- [pool release];
return self;
} else {
- [pool release];
+ ERR("There is no available devices\n");
return nil;
}
}
-- (int)startCapture:(int)width setHeight:(int)height
+- (int)startCapture
{
- int ret = -1;
-
- if (![mCaptureSession isRunning]) {
- /* Set width & height, using default pixel format to capture */
- NSDictionary *attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
- nil];
- [mCaptureVideoPreviewOutput setPixelBufferAttributes:attributes];
- [mCaptureSession startRunning];
+ if (![mSession isRunning]) {
+ [mSession startRunning];
} else {
ERR("Capture session is already running, exit\n");
- return ret;
+ return -1;
}
- if ([mCaptureSession isRunning]) {
- while (!mCaptureIsStarted) {
- /* Wait Until Capture is started */
- [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.5]];
- }
- ret = 0;
- }
- return ret;
+ return 0;
}
- (void)stopCapture
{
- if ([mCaptureSession isRunning]) {
- [mCaptureSession stopRunning];
- while ([mCaptureSession isRunning]) {
+ if ([mSession isRunning]) {
+ [mSession stopRunning];
+ while ([mSession isRunning]) {
/* Wait Until Capture is stopped */
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
}
-
}
- mCaptureIsStarted = NO;
-}
-
-- (int)readFrame:(void *)video_buf
-{
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
-
- @synchronized (self) {
- if (mCaptureIsStarted == NO) {
- [pool release];
- return 0;
- }
- if (mCurrentImageBuffer != nil) {
- CVPixelBufferLockBaseAddress(mCurrentImageBuffer, 0);
- const uint32_t pixel_format = corevideo_to_fourcc(CVPixelBufferGetPixelFormatType(mCurrentImageBuffer));
- const int frame_width = CVPixelBufferGetWidth(mCurrentImageBuffer);
- const int frame_height = CVPixelBufferGetHeight(mCurrentImageBuffer);
- const size_t frame_size = CVPixelBufferGetBytesPerRow(mCurrentImageBuffer) * frame_height;
- const void *frame_pixels = CVPixelBufferGetBaseAddress(mCurrentImageBuffer);
-
- TRACE("buffer(%p), pixel_format(%d,%.4s), frame_width(%d), "
- "frame_height(%d), frame_size(%d)\n",
- mCurrentImageBuffer, (int)pixel_format,
- (const char *)&pixel_format, frame_width,
- frame_height, (int)frame_size);
-
- /* convert frame to v4l2 format */
- convert_frame(pixel_format, mRequestFormat, frame_width, frame_height,
- frame_size, (void *)frame_pixels, video_buf, false);
- CVPixelBufferUnlockBaseAddress(mCurrentImageBuffer, 0);
- [pool release];
- return 1;
- }
- }
-
- [pool release];
- return -1;
}
- (int)setCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format
{
int ret = -1;
- NSDictionary *attributes;
- if (mCaptureSession == nil || mCaptureVideoPreviewOutput == nil) {
+ if (mSession == nil) {
ERR("Capture session is not initiated.\n");
return ret;
}
- /* Set the pixel buffer attributes before running the capture session */
- if (![mCaptureSession isRunning]) {
- attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
- nil];
-/*
- if (format) {
- attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
- [NSNumber numberWithInt:format], (id)kCVPixelBufferPixelFormatTypeKey,
- nil];
- } else {
- attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt:width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt:height], (id)kCVPixelBufferHeightKey,
- nil];
- }
-*/
- [mCaptureVideoPreviewOutput setPixelBufferAttributes:attributes];
- mRequestFormat = format;
+ if (![mSession isRunning]) {
+ /* Use default pixel format, UYVY */
+ NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt:width],
+ kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt:height],
+ kCVPixelBufferHeightKey,
+ nil];
+ [mOutput setVideoSettings:settings];
+ mFormat = format;
ret = 0;
} else {
ERR("Cannot set pixel buffer attributes when it's running.\n");
return ret;
}
-- (int)getCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format
+- (void)deviceWillClose
{
- return 0;
-}
-
-/* Get the device bundling status */
-- (BOOL)deviceStatus
-{
- return mDeviceIsOpened;
+ [mOutput setSampleBufferDelegate:nil queue:nil];
+ dispatch_release(mOutputQueue);
}
/* Handle deallocation of memory for your capture objects */
-
- (void)dealloc
{
- [mCaptureSession release];
- [mCaptureVideoDeviceInput release];
- [mCaptureVideoPreviewOutput release];
+ [mSession release];
+ [mInput release];
+ [mOutput release];
+ [mDevice release];
+
[super dealloc];
}
-/* Receive this method whenever the output decompresses and outputs a new video frame */
-- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame
- withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
{
- CVImageBufferRef imageBufferToRelease;
- CVBufferRetain(videoFrame);
+ /* Get a CMSampleBuffer's Core Video image buffer for the media data */
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
- @synchronized (self)
- {
- imageBufferToRelease = mCurrentImageBuffer;
- mCurrentImageBuffer = videoFrame;
- mCaptureIsStarted = YES;
+ @synchronized (self) {
+ // Lock the base address of the pixel buffer
+ CVPixelBufferLockBaseAddress(imageBuffer, 0);
+
+ const uint32_t src_fmt =
+ cv_to_fourcc(CVPixelBufferGetPixelFormatType(imageBuffer));
+ const uint32_t width = CVPixelBufferGetWidth(imageBuffer);
+ const uint32_t height = CVPixelBufferGetHeight(imageBuffer);
+ const size_t imgsize = CVPixelBufferGetBytesPerRow(imageBuffer) * height;
+ const void *pixels = CVPixelBufferGetBaseAddress(imageBuffer);
+
+ qemu_mutex_lock(&mBase->thread_mutex);
+ if (mBase->streamon) {
+ if (mBase->req_frame) {
+ void *tmp_buf = mBase->fb_ptr +
+ mBase->buf_size *
+ (mBase->req_frame - 1);
+ /* convert frame to v4l2 format */
+ convert_frame(src_fmt, mFormat, width, height, imgsize,
+ (void *)pixels, tmp_buf, false);
+ mBase->req_frame = 0; /* clear request */
+ mBase->isr |= 0x01; /* set a flag of rasing a interrupt */
+ qemu_bh_schedule(mBase->tx_bh);
+ }
+ }
+ qemu_mutex_unlock(&mBase->thread_mutex);
+
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
- CVBufferRelease(imageBufferToRelease);
}
@end
/* Maru camera device object. */
MaruCameraDriver *driver;
- uint32_t src_fmt;
uint32_t dst_width;
uint32_t dst_height;
uint32_t dst_fmt;
} MCBackendMac;
-static int is_streamon(MaruCamState *state)
+static uint32_t is_streamon(MaruCamState *state)
{
- int st;
+ uint32_t st;
qemu_mutex_lock(&state->thread_mutex);
st = state->streamon;
qemu_mutex_unlock(&state->thread_mutex);
return (st == _MC_THREAD_STREAMON);
}
-static void __raise_err_intr(MaruCamState *state)
+static void set_stream_state(MaruCamState *state, uint32_t set_state)
{
qemu_mutex_lock(&state->thread_mutex);
- if (state->streamon == _MC_THREAD_STREAMON) {
- state->req_frame = 0; /* clear request */
- state->isr = 0x08; /* set a error flag of rasing a interrupt */
- qemu_bh_schedule(state->tx_bh);
- }
+ state->streamon = set_state;
qemu_mutex_unlock(&state->thread_mutex);
}
-static int backend_mac_read_frame(MaruCamState *state)
-{
- int ret;
- void *tmp_buf;
- MCBackendMac *backend = (MCBackendMac *)(state->backend);
-
- qemu_mutex_lock(&state->thread_mutex);
- if (state->streamon == _MC_THREAD_STREAMON) {
- if (state->req_frame == 0) {
- TRACE("There is no request\n");
- qemu_mutex_unlock(&state->thread_mutex);
- return 0;
- }
-
- /* Grab the camera frame into temp buffer */
- tmp_buf = state->fb_ptr + state->buf_size * (state->req_frame - 1);
- ret = [backend->driver readFrame: tmp_buf];
- if (ret < 0) {
- ERR("%s, Capture error\n", __func__);
- qemu_mutex_unlock(&state->thread_mutex);
- __raise_err_intr(state);
- return -1;
- } else if (!ret) {
- qemu_mutex_unlock(&state->thread_mutex);
- return 0;
- }
-
- state->req_frame = 0; /* clear request */
- state->isr |= 0x01; /* set a flag of rasing a interrupt */
- qemu_bh_schedule(state->tx_bh);
- } else {
- qemu_mutex_unlock(&state->thread_mutex);
- return -1;
- }
- qemu_mutex_unlock(&state->thread_mutex);
- return 0;
-}
-
-/* Worker thread to grab frames to the preview window */
-static void *marucam_worker_thread(void *thread_param)
-{
- MaruCamState *state = (MaruCamState *)thread_param;
-
- while (1) {
- qemu_mutex_lock(&state->thread_mutex);
- state->streamon = _MC_THREAD_PAUSED;
- qemu_cond_wait(&state->thread_cond, &state->thread_mutex);
- qemu_mutex_unlock(&state->thread_mutex);
-
- if (state->destroying) {
- break;
- }
-
- qemu_mutex_lock(&state->thread_mutex);
- state->streamon = _MC_THREAD_STREAMON;
- qemu_mutex_unlock(&state->thread_mutex);
- INFO("Streaming on ......\n");
-
- /* Loop: capture frame -> convert format -> render to screen */
- while (1) {
- if (is_streamon(state)) {
- if (backend_mac_read_frame(state) < 0) {
- INFO("Streaming is off ...\n");
- break;
- } else {
- /* wait until next frame is avalilable */
- usleep(22000);
- }
- } else {
- INFO("Streaming is off ...\n");
- break;
- }
- }
- }
-
- return NULL;
-}
-
int marucam_device_check()
{
/* FIXME: check the device parameters */
INFO("Checking camera device: OS X Version: %d.%d.%d\n",
versMaj, versMin, versBugFix);
- if (versMaj == 10 && versMin >= 8) {
- INFO("QTKit is deprecated, camera is not supported\n");
- return 0;
- } else if (versMaj == 10 && versMin >= 5) {
- return 1;
- } else {
- ERR("No available\n");
- return 0;
- }
+ return 1;
}
/**********************************************
**********************************************/
static void backend_mac_init(MaruCamState *state)
{
- state->destroying = false;
- qemu_thread_create(&state->thread_id,
- MARUCAM_THREAD_NAME,
- marucam_worker_thread,
- (void *)state,
- QEMU_THREAD_JOINABLE);
state->initialized = true;
}
static void backend_mac_release(MaruCamState *state)
{
MCBackendMac *backend = (MCBackendMac *)(state->backend);
-
- state->destroying = true;
- qemu_mutex_lock(&state->thread_mutex);
- qemu_cond_signal(&state->thread_cond);
- qemu_mutex_unlock(&state->thread_mutex);
- qemu_thread_join(&state->thread_id);
g_free(backend);
backend = NULL;
}
{
MCBackendMac *backend = (MCBackendMac *)(state->backend);
- backend->driver = [[MaruCameraDriver alloc] init];
+ backend->driver = [[MaruCameraDriver alloc] init:state];
if (backend->driver == nil) {
ERR("Camera device open failed\n");
[backend->driver dealloc];
return;
}
-
INFO("Opend\n");
}
state->backend->stream_off(state);
}
if (backend->driver != nil) {
+ [backend->driver deviceWillClose];
[backend->driver dealloc];
backend->driver = nil;
INFO("Closed\n");
return;
}
- [backend->driver startCapture:backend->dst_width
- setHeight:backend->dst_height];
-
- /* Enable the condition to capture frames now */
- qemu_mutex_lock(&state->thread_mutex);
- qemu_cond_signal(&state->thread_cond);
- qemu_mutex_unlock(&state->thread_mutex);
-
- while (!is_streamon(state)) {
- usleep(10000);
- }
+ [backend->driver startCapture];
+ set_stream_state(state, _MC_THREAD_STREAMON);
INFO("Streaming on ......\n");
}
MCBackendMac *backend = (MCBackendMac *)(state->backend);
INFO("...... Streaming off\n");
- if (is_streamon(state)) {
- qemu_mutex_lock(&state->thread_mutex);
- state->streamon = _MC_THREAD_STREAMOFF;
- qemu_mutex_unlock(&state->thread_mutex);
-
- while (is_streamon(state)) {
- usleep(10000);
- }
- }
-
if (backend->driver != nil) {
[backend->driver stopCapture];
}
state->buf_size = 0;
+ set_stream_state(state, _MC_THREAD_STREAMOFF);
INFO("Stopping preview\n");
}
MCBackendMac *backend = (MCBackendMac *)(state->backend);
struct v4l2_pix_format *f = (struct v4l2_pix_format *)state->io_ptr;
- TRACE("Set format: w:h(%u:%u), pixfmt(%u, %.4s)\n",
- f->width, f->height, f->pixelformat,
- (const char*)&(f->pixelformat));
-
for (fidx = 0; fidx < ARRAY_SIZE(support_frames); fidx++) {
if ((support_frames[fidx].width == f->width) &&
(support_frames[fidx].height == f->height)) {
}
for (pidx = 0; pidx < ARRAY_SIZE(support_fmts); pidx++) {
if (support_fmts[pidx] == f->pixelformat) {
- TRACE("pixfmt index is match: index(%u)\n", pidx);
break;
}
}
return;
}
- if ((backend->dst_width != f->width) &&
- (backend->dst_height != f->height)) {
+ if ((backend->dst_width != f->width) ||
+ (backend->dst_height != f->height) ||
+ (backend->dst_fmt != f->pixelformat)) {
if ([backend->driver setCaptureFormat:f->width
setHeight:f->height
- setFormat:support_fmts[pidx]] < 0) {
+ setFormat:f->pixelformat] < 0) {
ERR("Set pixel format failed\n");
state->ret_val = EINVAL;
return;
static void backend_mac_s_ctrl(MaruCamState *state)
{
- INFO("[%s][Not Implemented] QTKit don't support setting "
+ INFO("[%s][Not Implemented] AVFoundation don't support setting "
" brightness, contrast, saturation & sharpness\n", __func__);
}
static void backend_mac_g_ctrl(MaruCamState *state)
{
- INFO("[%s][Not Implemented] QTKit don't support getting "
+ INFO("[%s][Not Implemented] AVFoundation don't support getting "
" brightness, contrast, saturation & sharpness\n", __func__);
}