#include "maru_camera_darwin.h"
#include "debug_ch.h"
-MULTI_DEBUG_CHANNEL(tizen, camera_darwin);
+MULTI_DEBUG_CHANNEL(tizen, camera_convert);
+
+static void yuyv_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, uint32_t yvu);
+static void rgb24_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, uint32_t yvu, bool reverse);
+static void rgb24_to_yuyv(unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, bool reverse);
+static void yuv420_to_yvu420(unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height);
+static void yuv420_to_yuyv(unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height);
+static void uyvy_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, bool is_yvu);
+static void uyvy_to_yuyv(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height);
+
+uint32_t get_bytesperline(uint32_t pixfmt, uint32_t width)
+{
+ uint32_t bytesperline;
+
+ switch (pixfmt) {
+ case V4L2_PIX_FMT_YUV420:
+ case V4L2_PIX_FMT_YVU420:
+ bytesperline = width;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ default:
+ bytesperline = width * 2;
+ break;
+ }
-static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- int width, int height);
-static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- int width, int height);
-static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- int width, int height);
+ return bytesperline;
+}
-/* Convert pixel format to YUV420 */
-void convert_frame(uint32_t pixel_format, int frame_width, int frame_height,
- size_t frame_size, void *frame_pixels, void *video_buf)
+uint32_t get_sizeimage(uint32_t pixfmt, uint32_t width, uint32_t height)
{
- switch (pixel_format) {
+ uint32_t bytesperline;
+
+ switch (pixfmt) {
case V4L2_PIX_FMT_YUV420:
- memcpy(video_buf, (void *)frame_pixels, (size_t)frame_size);
- break;
case V4L2_PIX_FMT_YVU420:
- YVU420ToYUV420(frame_pixels, video_buf, frame_width, frame_height);
+ bytesperline = (width * 12) >> 3;
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ default:
+ bytesperline = width * 2;
+ break;
+ }
+
+ return bytesperline * height;
+}
+
+/* Convert pixel format to YUV420 */
+uint8_t convert_frame(uint32_t src_format, uint32_t dst_format,
+ uint32_t width, uint32_t height, size_t frame_size,
+ void *src_buf, void *dst_buf, bool reverse)
+{
+ switch (src_format) {
+ case V4L2_PIX_FMT_YUV420:
+ switch (dst_format) {
+ case V4L2_PIX_FMT_YUV420:
+ memcpy(dst_buf, src_buf, frame_size);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ yuv420_to_yvu420(src_buf, dst_buf, width, height);
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ yuv420_to_yuyv(src_buf, dst_buf, width, height);
+ break;
+ default:
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
+ return 1;
+ }
break;
case V4L2_PIX_FMT_YUYV:
- YUYVToYUV420(frame_pixels, video_buf, frame_width, frame_height);
+ switch (dst_format) {
+ case V4L2_PIX_FMT_YUV420:
+ yuyv_to_yuv420(src_buf, dst_buf, width, height, 0);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ yuyv_to_yuv420(src_buf, dst_buf, width, height, 1);
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ memcpy(dst_buf, src_buf, frame_size);
+ break;
+ default:
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
+ return 1;
+ }
break;
case V4L2_PIX_FMT_UYVY: /* Mac default format */
- UYVYToYUV420(frame_pixels, video_buf, frame_width, frame_height);
+ switch (dst_format) {
+ case V4L2_PIX_FMT_YUV420:
+ uyvy_to_yuv420(src_buf, dst_buf, width, height, 0);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ uyvy_to_yuv420(src_buf, dst_buf, width, height, 1);
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ uyvy_to_yuyv(src_buf, dst_buf, width, height);
+ break;
+ default:
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
+ return 1;
+ }
break;
- default:
- ERR("Cannot convert the pixel format (%.4s)...\n",
- (const char *)&pixel_format);
+ case V4L2_PIX_FMT_RGB24:
+ switch (dst_format) {
+ case V4L2_PIX_FMT_YUV420:
+ rgb24_to_yuv420(src_buf, dst_buf, width, height, 0, reverse);
+ break;
+ case V4L2_PIX_FMT_YVU420:
+ rgb24_to_yuv420(src_buf, dst_buf, width, height, 1, reverse);
+ break;
+ case V4L2_PIX_FMT_YUYV:
+ rgb24_to_yuyv(src_buf, dst_buf, width, height, reverse);
+ break;
+ default:
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
+ return 1;
+ }
break;
+ default:
+ ERR("Cannot convert from (%.4s) to (%.4s)\n",
+ (const char *)&src_format, (const char *)&dst_format);
+ return 1;
}
+
+ return 0;
}
-static void UYVYToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- int width, int height)
+static void yuyv_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, uint32_t yvu)
{
- int i, j;
-
- /* Source */
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1;
- unsigned char *ptrsrccb3;
- unsigned char *ptrsrccr1;
- unsigned char *ptrsrccr3;
- int srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc + 1;
- ptrsrcy2 = bufsrc + (width << 1) + 1;
- ptrsrcy3 = bufsrc + (width << 1) * 2 + 1;
- ptrsrcy4 = bufsrc + (width << 1) * 3 + 1;
-
- ptrsrccb1 = bufsrc;
- ptrsrccb3 = bufsrc + (width << 1) * 2;
-
- ptrsrccr1 = bufsrc + 2;
- ptrsrccr3 = bufsrc + (width << 1) * 2 + 2;
-
- srcystride = (width << 1) * 3;
- srcccstride = (width << 1) * 3;
-
- /* Destination */
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- int destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width * 2;
- ptrdesty4 = bufdest + width * 3;
-
- ptrdestcb1 = bufdest + width * height;
- ptrdestcb2 = bufdest + width * height + (width >> 1);
-
- ptrdestcr1 = bufdest + width * height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width * height + ((width*height) >> 2)
- + (width >> 1);
-
- destystride = (width)*3;
- destccstride = (width>>1);
-
- for (j = 0; j < (height / 4); j++) {
- for (i = 0; i < (width / 2); i++) {
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdestcb1++) = (*ptrsrccb1);
- (*ptrdestcb2++) = (*ptrsrccb3);
-
- ptrsrccb1 += 4;
- ptrsrccb3 += 4;
-
- (*ptrdestcr1++) = (*ptrsrccr1);
- (*ptrdestcr2++) = (*ptrsrccr3);
-
- ptrsrccr1 += 4;
- ptrsrccr3 += 4;
+ uint32_t i, j;
+ const unsigned char *src1;
+ unsigned char *udest, *vdest;
+
+ /* copy the Y values */
+ src1 = src;
+ for (i = 0; i < height; i++) {
+ for (j = 0; j < width; j += 2) {
+ *dest++ = src1[0];
+ *dest++ = src1[2];
+ src1 += 4;
+ }
+ }
+ /* copy the U and V values */
+ src++; /* point to V */
+ src1 = src + width * 2; /* next line */
+ if (yvu) {
+ vdest = dest;
+ udest = dest + width * height / 4;
+ } else {
+ udest = dest;
+ vdest = dest + width * height / 4;
+ }
+ for (i = 0; i < height; i += 2) {
+ for (j = 0; j < width; j += 2) {
+ *udest++ = ((int) src[0] + src1[0]) / 2; /* U */
+ *vdest++ = ((int) src[2] + src1[2]) / 2; /* V */
+ src += 4;
+ src1 += 4;
}
+ src = src1;
+ src1 += width * 2;
+ }
+}
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
+#define RGB2Y(r, g, b, y) \
+ (y) = ((8453 * (r) + 16594 * (g) + 3223 * (b) + 524288) >> 15)
- ptrsrccb1 += srcccstride;
- ptrsrccb3 += srcccstride;
+#define RGB2UV(r, g, b, u, v) \
+ do { \
+ (u) = ((-4878 * (r) - 9578 * (g) + 14456 * (b) + 4210688) >> 15); \
+ (v) = ((14456 * (r) - 12105 * (g) - 2351 * (b) + 4210688) >> 15); \
+ } while (0)
- ptrsrccr1 += srcccstride;
- ptrsrccr3 += srcccstride;
+#define CLIP(color) \
+ (unsigned char)(((color) > 0xFF) ? 0xff : (((color) < 0) ? 0 : (color)))
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
+static void rgb24_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, uint32_t yvu,
+ bool reverse)
+{
+ uint32_t x, y;
+ uint32_t halfWidth;
+ uint8_t *yplane, *uplane, *vplane;
+ uint8_t *yline, *uline, *vline;
+ const uint8_t *rgbIndex;
+
+ halfWidth = width >> 1;
+ yplane = dest;
+
+ if (yvu) {
+ vplane = dest + width * height;
+ uplane = vplane + ((width * height) >> 2);
+ } else {
+ uplane = dest + width * height;
+ vplane = uplane + ((width * height) >> 2);
+ }
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
+ for (y = 0; y < height; y++) {
+ yline = yplane + (y * width);
+ uline = uplane + ((y >> 1) * halfWidth);
+ vline = vplane + ((y >> 1) * halfWidth);
+
+ if (reverse) {
+ rgbIndex = src + (width * (height - 1 - y) * 3);
+ } else {
+ rgbIndex = src + (width * y * 3);
+ }
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
+ for (x = 0; x < (int)width; x += 2) {
+ RGB2Y(rgbIndex[2], rgbIndex[1], rgbIndex[0], *yline++);
+ rgbIndex += 3;
+ RGB2Y(rgbIndex[2], rgbIndex[1], rgbIndex[0], *yline++);
+ RGB2UV(rgbIndex[2], rgbIndex[1], rgbIndex[0], *uline++, *vline++);
+ rgbIndex += 3;
+ }
}
}
-static void YVU420ToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- int width, int height)
+static void rgb24_to_yuyv(unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, bool reverse)
{
- int i, j;
-
- /* Source*/
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1, *ptrsrccb2;
- unsigned char *ptrsrccr1, *ptrsrccr2;
- int srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc;
- ptrsrcy2 = bufsrc + width;
- ptrsrcy3 = bufsrc + width*2;
- ptrsrcy4 = bufsrc + width*3;
-
- ptrsrccr1 = bufsrc + width*height;
- ptrsrccr2 = bufsrc + width*height + (width>>1);
-
- ptrsrccb1 = bufsrc + width*height + ((width*height) >> 2);
- ptrsrccb2 = bufsrc + width*height + ((width*height) >> 2) + (width>>1);
-
- srcystride = (width)*3;
- srcccstride = (width>>1);
-
- /* Destination */
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- int destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width * 2;
- ptrdesty4 = bufdest + width * 3;
-
- ptrdestcb1 = bufdest + width * height;
- ptrdestcb2 = bufdest + width * height + (width >> 1);
-
- ptrdestcr1 = bufdest + width * height + ((width*height) >> 2);
- ptrdestcr2 = bufdest + width * height + ((width*height) >> 2)
- + (width >> 1);
-
- destystride = (width)*3;
- destccstride = (width>>1);
-
- for (j = 0; j < (height / 4); j++) {
- for (i = 0; i < (width / 2); i++) {
-
- (*ptrdesty1++) = (*ptrsrcy1++);
- (*ptrdesty2++) = (*ptrsrcy2++);
- (*ptrdesty3++) = (*ptrsrcy3++);
- (*ptrdesty4++) = (*ptrsrcy4++);
- (*ptrdesty1++) = (*ptrsrcy1++);
- (*ptrdesty2++) = (*ptrsrcy2++);
- (*ptrdesty3++) = (*ptrsrcy3++);
- (*ptrdesty4++) = (*ptrsrcy4++);
-
- (*ptrdestcb1++) = (*ptrsrccb1++);
- (*ptrdestcr1++) = (*ptrsrccr1++);
- (*ptrdestcb2++) = (*ptrsrccb2++);
- (*ptrdestcr2++) = (*ptrsrccr2++);
-
+ uint32_t i, j;
+ uint8_t *ptr;
+
+ for (i = 0; i < height; i++) {
+ if (reverse) {
+ ptr = src + (width * (height - 1 - i) * 3);
+ } else {
+ ptr = src + (width * i * 3);
}
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
-
- ptrsrccb1 += srcccstride;
- ptrsrccb2 += srcccstride;
+ for (j = 0; j < width; j += 2) {
+ /* y */
+ *dest++ = CLIP(0.299 * (ptr[2] - 128) +
+ 0.587 * (ptr[1] - 128) +
+ 0.114 * (ptr[0] - 128) + 128);
+ /* u */
+ *dest++ = CLIP(((-0.147 * (ptr[2] - 128) -
+ 0.289 * (ptr[1] - 128) +
+ 0.436 * (ptr[0] - 128) + 128) +
+ (-0.147 * (ptr[5] - 128) -
+ 0.289 * (ptr[4] - 128) +
+ 0.436 * (ptr[3] - 128) + 128)) / 2);
+ /* y1 */
+ *dest++ = CLIP(0.299 * (ptr[5] - 128) +
+ 0.587 * (ptr[4] - 128) +
+ 0.114 * (ptr[3] - 128) + 128);
+ /* v */
+ *dest++ = CLIP(((0.615 * (ptr[2] - 128) -
+ 0.515 * (ptr[1] - 128) -
+ 0.100 * (ptr[0] - 128) + 128) +
+ (0.615 * (ptr[5] - 128) -
+ 0.515 * (ptr[4] - 128) -
+ 0.100 * (ptr[3] - 128) + 128)) / 2);
+ ptr += 6;
+ }
+ }
+}
- ptrsrccr1 += srcccstride;
- ptrsrccr2 += srcccstride;
+static void yuv420_to_yvu420(unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height)
+{
+ unsigned char *psrc_y, *pdst_y;
+ unsigned char *psrc_u, *pdst_u;
+ unsigned char *psrc_v, *pdst_v;
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
+ psrc_y = src;
+ psrc_u = psrc_y + (width * height);
+ psrc_v = psrc_u + (width * height / 4);
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
+ pdst_y = dest;
+ pdst_v = pdst_y + (width * height);
+ pdst_u = pdst_v + (width * height / 4);
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
+ memcpy(pdst_y, psrc_y, width * height);
+ memcpy(pdst_v, psrc_v, width * height / 4);
+ memcpy(pdst_u, psrc_u, width * height / 4);
+}
+static void yuv420_to_yuyv(unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height)
+{
+ unsigned char *py;
+ unsigned char *pu;
+ unsigned char *pv;
+
+ uint32_t linesize = width * 2;
+ uint32_t uvlinesize = width / 2;
+ uint32_t offset = 0;
+ uint32_t offset1 = 0;
+ uint32_t offsety = 0;
+ uint32_t offsety1 = 0;
+ uint32_t offsetuv = 0;
+ uint32_t h = 0;
+ uint32_t w = 0;
+ uint32_t wy = 0;
+ uint32_t huv = 0;
+ uint32_t wuv = 0;
+
+ py = src;
+ pu = py + (width * height);
+ pv = pu + (width * height / 4);
+
+ for (h = 0; h < height; h += 2) {
+ wy = 0;
+ wuv = 0;
+ offset = h * linesize;
+ offset1 = (h + 1) * linesize;
+ offsety = h * width;
+ offsety1 = (h + 1) * width;
+ offsetuv = huv * uvlinesize;
+
+ for (w = 0; w < linesize; w += 4) {
+ /* y00 */
+ dest[w + offset] = py[wy + offsety];
+ /* u0 */
+ dest[(w + 1) + offset] = pu[wuv + offsetuv];
+ /* y01 */
+ dest[(w + 2) + offset] = py[(wy + 1) + offsety];
+ /* v0 */
+ dest[(w + 3) + offset] = pv[wuv + offsetuv];
+
+ /* y10 */
+ dest[w + offset1] = py[wy + offsety1];
+ /* u0 */
+ dest[(w + 1) + offset1] = pu[wuv + offsetuv];
+ /* y11 */
+ dest[(w + 2) + offset1] = py[(wy + 1) + offsety1];
+ /* v0 */
+ dest[(w + 3) + offset1] = pv[wuv + offsetuv];
+
+ wuv++;
+ wy += 2;
+ }
+ huv++;
}
-
}
-static void YUYVToYUV420(unsigned char *bufsrc, unsigned char *bufdest,
- int width, int height)
+static void uyvy_to_yuv420(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height, bool is_yvu)
{
- int i, j;
-
- /* Source*/
- unsigned char *ptrsrcy1, *ptrsrcy2;
- unsigned char *ptrsrcy3, *ptrsrcy4;
- unsigned char *ptrsrccb1;
- unsigned char *ptrsrccb3;
- unsigned char *ptrsrccr1;
- unsigned char *ptrsrccr3;
- int srcystride, srcccstride;
-
- ptrsrcy1 = bufsrc ;
- ptrsrcy2 = bufsrc + (width << 1);
- ptrsrcy3 = bufsrc + (width << 1) * 2;
- ptrsrcy4 = bufsrc + (width << 1) * 3;
-
- ptrsrccb1 = bufsrc + 1;
- ptrsrccb3 = bufsrc + (width << 1) * 2 + 1;
-
- ptrsrccr1 = bufsrc + 3;
- ptrsrccr3 = bufsrc + (width << 1) * 2 + 3;
-
- srcystride = (width << 1) * 3;
- srcccstride = (width << 1) * 3;
-
- /* Destination */
- unsigned char *ptrdesty1, *ptrdesty2;
- unsigned char *ptrdesty3, *ptrdesty4;
- unsigned char *ptrdestcb1, *ptrdestcb2;
- unsigned char *ptrdestcr1, *ptrdestcr2;
- int destystride, destccstride;
-
- ptrdesty1 = bufdest;
- ptrdesty2 = bufdest + width;
- ptrdesty3 = bufdest + width * 2;
- ptrdesty4 = bufdest + width * 3;
-
- ptrdestcb1 = bufdest + width * height;
- ptrdestcb2 = bufdest + width * height + (width >> 1);
-
- ptrdestcr1 = bufdest + width * height + ((width * height) >> 2);
- ptrdestcr2 = bufdest + width * height + ((width * height) >> 2)
- + (width >> 1);
-
- destystride = width * 3;
- destccstride = (width >> 1);
-
- for (j = 0; j < (height / 4); j++) {
- for (i = 0; i < (width / 2); i++) {
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdesty1++) = (*ptrsrcy1);
- (*ptrdesty2++) = (*ptrsrcy2);
- (*ptrdesty3++) = (*ptrsrcy3);
- (*ptrdesty4++) = (*ptrsrcy4);
-
- ptrsrcy1 += 2;
- ptrsrcy2 += 2;
- ptrsrcy3 += 2;
- ptrsrcy4 += 2;
-
- (*ptrdestcb1++) = (*ptrsrccb1);
- (*ptrdestcb2++) = (*ptrsrccb3);
-
- ptrsrccb1 += 4;
- ptrsrccb3 += 4;
-
- (*ptrdestcr1++) = (*ptrsrccr1);
- (*ptrdestcr2++) = (*ptrsrccr3);
-
- ptrsrccr1 += 4;
- ptrsrccr3 += 4;
-
+ uint32_t i, j;
+ const unsigned char *src1;
+ unsigned char *udest, *vdest;
+
+ /* copy the Y values */
+ src1 = src;
+ for (i = 0; i < height; i++) {
+ for (j = 0; j + 1 < width; j += 2) {
+ *dest++ = src1[1];
+ *dest++ = src1[3];
+ src1 += 4;
}
+ }
- /* Update src pointers */
- ptrsrcy1 += srcystride;
- ptrsrcy2 += srcystride;
- ptrsrcy3 += srcystride;
- ptrsrcy4 += srcystride;
-
- ptrsrccb1 += srcccstride;
- ptrsrccb3 += srcccstride;
-
- ptrsrccr1 += srcccstride;
- ptrsrccr3 += srcccstride;
-
- /* Update dest pointers */
- ptrdesty1 += destystride;
- ptrdesty2 += destystride;
- ptrdesty3 += destystride;
- ptrdesty4 += destystride;
-
- ptrdestcb1 += destccstride;
- ptrdestcb2 += destccstride;
+ /* copy the U and V values */
+ src1 = src + width * 2; /* next line */
+ if (is_yvu) {
+ vdest = dest;
+ udest = dest + width * height / 4;
+ } else {
+ udest = dest;
+ vdest = dest + width * height / 4;
+ }
+ for (i = 0; i < height; i += 2) {
+ for (j = 0; j + 1 < width; j += 2) {
+ *udest++ = ((int) src[0] + src1[0]) / 2; /* U */
+ *vdest++ = ((int) src[2] + src1[2]) / 2; /* V */
+ src += 4;
+ src1 += 4;
+ }
+ src = src1;
+ src1 += width * 2;
+ }
+}
- ptrdestcr1 += destccstride;
- ptrdestcr2 += destccstride;
+static void uyvy_to_yuyv(const unsigned char *src, unsigned char *dest,
+ uint32_t width, uint32_t height)
+{
+ uint32_t i, j;
+ const unsigned char *src1;
+
+ src1 = src;
+ for (i = 0; i < height; i++) {
+ for (j = 0; j + 1 < width; j += 2) {
+ *dest++ = src1[1];
+ *dest++ = src1[0];
+ *dest++ = src1[3];
+ *dest++ = src1[2];
+ src1 += 4;
+ }
}
}
/*
- * Implementation of MARU Virtual Camera device by PCI bus on MacOS.
+ * Implementation of MARU Virtual Camera device by PCI bus on macOS.
*
* Copyright (c) 2011 - 2013 Samsung Electronics Co., Ltd All Rights Reserved
*
* Contact:
- * Jun Tian <jun.j.tian@intel.com>
* JinHyung Jo <jinhyung.jo@samsung.com>
- * YeongKyoon Lee <yeongkyoon.lee@samsung.com>
+ * SeokYeon Hwang <syeon.hwang@samsung.com>
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
*/
#import <Cocoa/Cocoa.h>
-#import <QTKit/QTKit.h>
-#import <CoreAudio/CoreAudio.h>
+#import <AVFoundation/AVFoundation.h>
-#include <pthread.h>
#include "qemu-common.h"
#include "maru_camera_common.h"
#include "maru_camera_darwin.h"
MULTI_DEBUG_CHANNEL(tizen, maru-camera);
-#define MARUCAM_THREAD_NAME "marucam_worker_thread"
-
/* V4L2 defines copy from videodev2.h */
#define V4L2_CTRL_FLAG_SLIDER 0x0020
#define V4L2_CID_SATURATION (V4L2_CID_BASE + 2)
#define V4L2_CID_SHARPNESS (V4L2_CID_BASE + 27)
-typedef struct tagMaruCamConvertPixfmt {
- uint32_t fmt; /* fourcc */
- uint32_t bpp; /* bits per pixel, 0 for compressed formats */
- uint32_t needs_conversion;
-} MaruCamConvertPixfmt;
-
-
-static MaruCamConvertPixfmt supported_dst_pixfmts[] = {
- { V4L2_PIX_FMT_YUYV, 16, 0 },
- { V4L2_PIX_FMT_UYVY, 16, 0 },
- { V4L2_PIX_FMT_YUV420, 12, 0 },
- { V4L2_PIX_FMT_YVU420, 12, 0 },
+static uint32_t support_fmts[] = {
+ V4L2_PIX_FMT_YUYV,
+ V4L2_PIX_FMT_YUV420,
+ V4L2_PIX_FMT_YVU420,
};
-typedef struct tagMaruCamConvertFrameInfo {
+struct MCFrame {
uint32_t width;
uint32_t height;
-} MaruCamConvertFrameInfo;
+};
-static MaruCamConvertFrameInfo supported_dst_frames[] = {
+static struct MCFrame support_frames[] = {
{ 640, 480 },
{ 352, 288 },
{ 320, 240 },
_MC_THREAD_STREAMOFF,
};
-#if 0
-struct marucam_qctrl {
- uint32_t id;
- uint32_t hit;
- long min;
- long max;
- long step;
- long init_val;
-};
-
-static struct marucam_qctrl qctrl_tbl[] = {
- { V4L2_CID_BRIGHTNESS, 0, },
- { V4L2_CID_CONTRAST, 0, },
- { V4L2_CID_SATURATION, 0, },
- { V4L2_CID_SHARPNESS, 0, },
-};
-#endif
-
-static MaruCamState *g_state;
-
-static uint32_t ready_count;
-static uint32_t cur_fmt_idx;
-static uint32_t cur_frame_idx;
-
/***********************************
- * Mac camera helper functions
+ * macOS camera helper functions
***********************************/
/* Convert Core Video format to FOURCC */
-static uint32_t corevideo_to_fourcc(uint32_t cv_pix_fmt)
+static uint32_t cv_to_fourcc(uint32_t cv_pix_fmt)
{
switch (cv_pix_fmt) {
case kCVPixelFormatType_420YpCbCr8Planar:
}
}
-static uint32_t get_bytesperline(uint32_t pixfmt, uint32_t width)
-{
- uint32_t bytesperline;
-
- switch (pixfmt) {
- case V4L2_PIX_FMT_YUV420:
- case V4L2_PIX_FMT_YVU420:
- bytesperline = (width * 12) >> 3;
- break;
- case V4L2_PIX_FMT_YUYV:
- case V4L2_PIX_FMT_UYVY:
- default:
- bytesperline = width * 2;
- break;
- }
-
- return bytesperline;
-}
-
-static uint32_t get_sizeimage(uint32_t pixfmt, uint32_t width, uint32_t height)
-{
- return get_bytesperline(pixfmt, width) * height;
-}
-
/******************************************************************
** Maru Camera Implementation
*****************************************************************/
+@interface MaruCameraDriver : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> {
+ MaruCamState *mBase;
-@interface MaruCameraDriver : NSObject {
- QTCaptureSession *mCaptureSession;
- QTCaptureDeviceInput *mCaptureVideoDeviceInput;
- QTCaptureVideoPreviewOutput *mCaptureVideoPreviewOutput;
+ AVCaptureDevice *mDevice;
+ AVCaptureSession *mSession;
+ AVCaptureDeviceInput *mInput;
+ AVCaptureVideoDataOutput *mOutput;
- CVImageBufferRef mCurrentImageBuffer;
- BOOL mDeviceIsOpened;
- BOOL mCaptureIsStarted;
+ dispatch_queue_t mOutputQueue;
+
+ uint32_t mFormat;
}
-- (MaruCameraDriver *)init;
-- (int)startCapture:(int)width :(int)height;
+- (id)init:(MaruCamState *)setBase;
+- (int)startCapture;
- (void)stopCapture;
-- (int)readFrame:(void *)video_buf;
-- (int)setCaptureFormat:(int)width :(int)height :(int)pix_format;
-- (int)getCaptureFormat:(int)width :(int)height :(int)pix_format;
-- (BOOL)deviceStatus;
+- (int)setCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format;
+- (void)deviceWillClose;
@end
@implementation MaruCameraDriver
-- (MaruCameraDriver *)init
+- (id)init:(MaruCamState *)setBase
{
BOOL success = NO;
- NSError *error;
- mDeviceIsOpened = NO;
- mCaptureIsStarted = NO;
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
-
- /* Create the capture session */
- mCaptureSession = [[QTCaptureSession alloc] init];
+ NSError *error = nil;
- /* Find a video device */
- QTCaptureDevice *videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
- success = [videoDevice open:&error];
-
- /* If a video input device can't be found or opened, try to find and open a muxed input device */
- if (!success) {
- videoDevice = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeMuxed];
- success = [videoDevice open:&error];
- [pool release];
+ self = [super init];
+ if (self) {
+ /* Create the capture session */
+ mSession = [[AVCaptureSession alloc] init];
+ } else {
+ ERR("Failed to initialize NSObject\n");
return nil;
}
- if (!success) {
- videoDevice = nil;
- [pool release];
- return nil;
+ /* Find a video device */
+ mDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
+ if (!mDevice) {
+ /* If a video input device can't be found or opened,
+ try to find and open a muxed input device. */
+ mDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeMuxed];
}
- if (videoDevice) {
+ if (mDevice) {
/* Add the video device to the session as a device input */
- mCaptureVideoDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:videoDevice];
- success = [mCaptureSession addInput:mCaptureVideoDeviceInput error:&error];
-
- if (!success) {
- [pool release];
+ mInput = [AVCaptureDeviceInput deviceInputWithDevice:mDevice error:&error];
+ if (mInput == nil) {
+ ERR("Failed to get AVCaptureDeviceInput instance\n");
+ return nil;
+ }
+ success = [mSession canAddInput:mInput];
+ if (success) {
+ [mSession addInput:mInput];
+ } else {
+ ERR("Failed to add AVCaptureInput to the session\n");
return nil;
}
- mCaptureVideoPreviewOutput = [[QTCaptureVideoPreviewOutput alloc] init];
- success = [mCaptureSession addOutput:mCaptureVideoPreviewOutput error:&error];
- if (!success) {
- [pool release];
+ mOutput = [[AVCaptureVideoDataOutput alloc] init];
+ success = [mSession canAddOutput:mOutput];
+ if (success) {
+ [mSession addOutput:mOutput];
+ mOutputQueue = dispatch_queue_create("VideoDataOutputQueue",
+ DISPATCH_QUEUE_SERIAL);
+
+ [mOutput setSampleBufferDelegate:self queue:mOutputQueue];
+ } else {
+ ERR("Failed to add AVCaptureOutput to the session\n");
return nil;
}
- mDeviceIsOpened = YES;
- [mCaptureVideoPreviewOutput setDelegate:self];
+ mBase = setBase;
INFO("Camera session bundling successfully!\n");
- [pool release];
return self;
} else {
- [pool release];
+ ERR("There is no available devices\n");
return nil;
}
}
-- (int)startCapture:(int)width :(int)height
+- (int)startCapture
{
- int ret = -1;
-
- if (![mCaptureSession isRunning]) {
- /* Set width & height, using default pixel format to capture */
- NSDictionary *attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt: width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt: height], (id)kCVPixelBufferHeightKey,
- nil];
- [mCaptureVideoPreviewOutput setPixelBufferAttributes:attributes];
- [mCaptureSession startRunning];
+ if (![mSession isRunning]) {
+ [mSession startRunning];
} else {
ERR("Capture session is already running, exit\n");
- return ret;
+ return -1;
}
- if ([mCaptureSession isRunning]) {
- while(!mCaptureIsStarted) {
- /* Wait Until Capture is started */
- [[NSRunLoop currentRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.5]];
- }
- ret = 0;
- }
- return ret;
+ return 0;
}
- (void)stopCapture
{
- if ([mCaptureSession isRunning]) {
- [mCaptureSession stopRunning];
- while([mCaptureSession isRunning]) {
+ if ([mSession isRunning]) {
+ [mSession stopRunning];
+ while ([mSession isRunning]) {
/* Wait Until Capture is stopped */
- [[NSRunLoop currentRunLoop] runUntilDate: [NSDate dateWithTimeIntervalSinceNow: 0.1]];
+ [[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:0.1]];
}
-
}
- mCaptureIsStarted = NO;
}
-- (int)readFrame:(void *)video_buf
-{
- NSAutoreleasePool *pool = [[NSAutoreleasePool alloc] init];
-
- @synchronized (self) {
- if (mCaptureIsStarted == NO) {
- [pool release];
- return 0;
- }
- if (mCurrentImageBuffer != nil) {
- CVPixelBufferLockBaseAddress(mCurrentImageBuffer, 0);
- const uint32_t pixel_format = corevideo_to_fourcc(CVPixelBufferGetPixelFormatType(mCurrentImageBuffer));
- const int frame_width = CVPixelBufferGetWidth(mCurrentImageBuffer);
- const int frame_height = CVPixelBufferGetHeight(mCurrentImageBuffer);
- const size_t frame_size = CVPixelBufferGetBytesPerRow(mCurrentImageBuffer) * frame_height;
- const void *frame_pixels = CVPixelBufferGetBaseAddress(mCurrentImageBuffer);
-
- TRACE("buffer(%p), pixel_format(%d,%.4s), frame_width(%d), "
- "frame_height(%d), frame_size(%d)\n",
- mCurrentImageBuffer, (int)pixel_format,
- (const char *)&pixel_format, frame_width,
- frame_height, (int)frame_size);
-
- /* convert frame to v4l2 format */
- convert_frame(pixel_format, frame_width, frame_height,
- frame_size, (void *)frame_pixels, video_buf);
- CVPixelBufferUnlockBaseAddress(mCurrentImageBuffer, 0);
- [pool release];
- return 1;
- }
- }
-
- [pool release];
- return -1;
-}
-
-- (int)setCaptureFormat:(int)width :(int)height :(int)pix_format
+- (int)setCaptureFormat:(int)width setHeight:(int)height setFormat:(uint32_t)format
{
int ret = -1;
- NSDictionary *attributes;
- if (mCaptureSession == nil || mCaptureVideoPreviewOutput == nil) {
+ if (mSession == nil) {
ERR("Capture session is not initiated.\n");
return ret;
}
- /* Set the pixel buffer attributes before running the capture session */
- if (![mCaptureSession isRunning]) {
- if (pix_format) {
- attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt: width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt: height], (id)kCVPixelBufferHeightKey,
- [NSNumber numberWithInt: pix_format], (id)kCVPixelBufferPixelFormatTypeKey,
- nil];
- } else {
- attributes = [NSDictionary dictionaryWithObjectsAndKeys:
- [NSNumber numberWithInt: width], (id)kCVPixelBufferWidthKey,
- [NSNumber numberWithInt: height], (id)kCVPixelBufferHeightKey,
- nil];
- }
- [mCaptureVideoPreviewOutput setPixelBufferAttributes:attributes];
+ if (![mSession isRunning]) {
+ /* Use default pixel format, UYVY */
+ NSDictionary *settings = [NSDictionary dictionaryWithObjectsAndKeys:
+ [NSNumber numberWithInt:kCVPixelFormatType_422YpCbCr8],
+ kCVPixelBufferPixelFormatTypeKey,
+ [NSNumber numberWithInt:width],
+ kCVPixelBufferWidthKey,
+ [NSNumber numberWithInt:height],
+ kCVPixelBufferHeightKey,
+ nil];
+ [mOutput setVideoSettings:settings];
+ mFormat = format;
ret = 0;
} else {
ERR("Cannot set pixel buffer attributes when it's running.\n");
return ret;
}
-- (int)getCaptureFormat:(int)width :(int)height :(int)pix_format
-{
- return 0;
-}
-
-/* Get the device bundling status */
-- (BOOL)deviceStatus
+- (void)deviceWillClose
{
- return mDeviceIsOpened;
+ [mOutput setSampleBufferDelegate:nil queue:nil];
+ dispatch_release(mOutputQueue);
}
/* Handle deallocation of memory for your capture objects */
-
- (void)dealloc
{
- [mCaptureSession release];
- [mCaptureVideoDeviceInput release];
- [mCaptureVideoPreviewOutput release];
+ [mSession release];
+ [mInput release];
+ [mOutput release];
+ [mDevice release];
+
[super dealloc];
}
-/* Receive this method whenever the output decompresses and outputs a new video frame */
-- (void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame
- withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
+- (void)captureOutput:(AVCaptureOutput *)captureOutput
+ didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
+ fromConnection:(AVCaptureConnection *)connection
{
- CVImageBufferRef imageBufferToRelease;
- CVBufferRetain(videoFrame);
-
- @synchronized (self)
- {
- imageBufferToRelease = mCurrentImageBuffer;
- mCurrentImageBuffer = videoFrame;
- mCaptureIsStarted = YES;
+ /* Get a CMSampleBuffer's Core Video image buffer for the media data */
+ CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
+
+ @synchronized (self) {
+ // Lock the base address of the pixel buffer
+ CVPixelBufferLockBaseAddress(imageBuffer, 0);
+
+ const uint32_t src_fmt =
+ cv_to_fourcc(CVPixelBufferGetPixelFormatType(imageBuffer));
+ const uint32_t width = CVPixelBufferGetWidth(imageBuffer);
+ const uint32_t height = CVPixelBufferGetHeight(imageBuffer);
+ const size_t imgsize = CVPixelBufferGetBytesPerRow(imageBuffer) * height;
+ const void *pixels = CVPixelBufferGetBaseAddress(imageBuffer);
+
+ qemu_mutex_lock(&mBase->thread_mutex);
+ if (mBase->streamon) {
+ if (mBase->req_frame) {
+ void *tmp_buf = mBase->vaddr +
+ mBase->buf_size *
+ (mBase->req_frame - 1);
+ /* convert frame to v4l2 format */
+ convert_frame(src_fmt, mFormat, width, height, imgsize,
+ (void *)pixels, tmp_buf, false);
+ mBase->req_frame = 0; /* clear request */
+ mBase->isr |= 0x01; /* set a flag of rasing a interrupt */
+ qemu_bh_schedule(mBase->tx_bh);
+ }
+ }
+ qemu_mutex_unlock(&mBase->thread_mutex);
+
+ CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
}
- CVBufferRelease(imageBufferToRelease);
}
@end
** Maru Camera APIs
*****************************************************************/
-typedef struct MaruCameraDevice MaruCameraDevice;
-struct MaruCameraDevice {
+typedef struct MaruCameraDevice {
/* Maru camera device object. */
MaruCameraDriver *driver;
-};
+
+ uint32_t dst_width;
+ uint32_t dst_height;
+ uint32_t dst_fmt;
+} MaruCameraDevice;
/* Golbal representation of the Maru camera */
MaruCameraDevice *mcd = NULL;
-static int is_streamon()
+static uint32_t is_streamon(MaruCamState *state)
{
- int st;
- qemu_mutex_lock(&g_state->thread_mutex);
- st = g_state->streamon;
- qemu_mutex_unlock(&g_state->thread_mutex);
+ uint32_t st;
+ qemu_mutex_lock(&state->thread_mutex);
+ st = state->streamon;
+ qemu_mutex_unlock(&state->thread_mutex);
return (st == _MC_THREAD_STREAMON);
}
-static void __raise_err_intr()
-{
- qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon == _MC_THREAD_STREAMON) {
- g_state->req_frame = 0; /* clear request */
- g_state->isr = 0x08; /* set a error flag of rasing a interrupt */
- qemu_bh_schedule(g_state->tx_bh);
- }
- qemu_mutex_unlock(&g_state->thread_mutex);
-}
-
-static int marucam_device_read_frame()
+static void set_stream_state(MaruCamState *state, uint32_t set_state)
{
- int ret;
- void *tmp_buf;
-
- qemu_mutex_lock(&g_state->thread_mutex);
- if (g_state->streamon == _MC_THREAD_STREAMON) {
-#if 0
- if (ready_count < MARUCAM_SKIPFRAMES) {
- /* skip a frame cause first some frame are distorted */
- ++ready_count;
- TRACE("Skip %d frame\n", ready_count);
- qemu_mutex_unlock(&g_state->thread_mutex);
- return 0;
- }
-#endif
- if (g_state->req_frame == 0) {
- TRACE("There is no request\n");
- qemu_mutex_unlock(&g_state->thread_mutex);
- return 0;
- }
-
- /* Grab the camera frame into temp buffer */
- tmp_buf = g_state->vaddr + g_state->buf_size * (g_state->req_frame - 1);
- ret = [mcd->driver readFrame: tmp_buf];
- if (ret < 0) {
- ERR("%s, Capture error\n", __func__);
- qemu_mutex_unlock(&g_state->thread_mutex);
- __raise_err_intr();
- return -1;
- } else if (!ret) {
- qemu_mutex_unlock(&g_state->thread_mutex);
- return 0;
- }
-
- g_state->req_frame = 0; /* clear request */
- g_state->isr |= 0x01; /* set a flag of rasing a interrupt */
- qemu_bh_schedule(g_state->tx_bh);
- } else {
- qemu_mutex_unlock(&g_state->thread_mutex);
- return -1;
- }
- qemu_mutex_unlock(&g_state->thread_mutex);
- return 0;
-}
-
-/* Worker thread to grab frames to the preview window */
-static void *marucam_worker_thread(void *thread_param)
-{
- while (1) {
- qemu_mutex_lock(&g_state->thread_mutex);
- g_state->streamon = _MC_THREAD_PAUSED;
- qemu_cond_wait(&g_state->thread_cond, &g_state->thread_mutex);
- qemu_mutex_unlock(&g_state->thread_mutex);
-
- if (g_state->destroying) {
- break;
- }
-
- ready_count = 0;
- qemu_mutex_lock(&g_state->thread_mutex);
- g_state->streamon = _MC_THREAD_STREAMON;
- qemu_mutex_unlock(&g_state->thread_mutex);
- INFO("Streaming on ......\n");
-
- /* Loop: capture frame -> convert format -> render to screen */
- while (1) {
- if (is_streamon()) {
- if (marucam_device_read_frame() < 0) {
- INFO("Streaming is off ...\n");
- break;
- } else {
- /* wait until next frame is avalilable */
- usleep(22000);
- }
- } else {
- INFO("Streaming is off ...\n");
- break;
- }
- }
- }
-
- return NULL;
+ qemu_mutex_lock(&state->thread_mutex);
+ state->streamon = set_state;
+ qemu_mutex_unlock(&state->thread_mutex);
}
int marucam_device_check(int log_flag)
{
- /* FIXME: check the device parameters */
- INFO("Checking camera device\n");
return 1;
}
**********************************************/
void marucam_device_init(MaruCamState *state)
{
- g_state = state;
- g_state->destroying = false;
- qemu_thread_create(&state->thread_id,
- MARUCAM_THREAD_NAME,
- marucam_worker_thread,
- NULL,
- QEMU_THREAD_JOINABLE);
}
void marucam_device_exit(MaruCamState *state)
{
- state->destroying = true;
- qemu_mutex_lock(&state->thread_mutex);
- qemu_cond_signal(&state->thread_cond);
- qemu_mutex_unlock(&state->thread_mutex);
- qemu_thread_join(&state->thread_id);
}
/* MARUCAM_CMD_OPEN */
return;
}
memset(mcd, 0, sizeof(MaruCameraDevice));
- mcd->driver = [[MaruCameraDriver alloc] init];
+ mcd->driver = [[MaruCameraDriver alloc] init:state];
if (mcd->driver == nil) {
ERR("Camera device open failed\n");
[mcd->driver dealloc];
param->errCode = EINVAL;
return;
}
+ /* Set default values, TODO: can be smart? */
+ mcd->dst_height = support_frames[0].height;
+ mcd->dst_width = support_frames[0].width;
+ mcd->dst_fmt = support_fmts[0];
+ if ([mcd->driver setCaptureFormat:mcd->dst_width
+ setHeight:mcd->dst_height
+ setFormat:mcd->dst_fmt] < 0) {
+ ERR("Set pixel format failed\n");
+ param->errCode = EINVAL;
+ return;
+ }
+
INFO("Camera opened!\n");
}
param->top = 0;
if (mcd != NULL) {
- if (is_streamon()) {
+ if (is_streamon(state)) {
marucam_device_stop_preview(state);
}
+ [mcd->driver deviceWillClose];
[mcd->driver dealloc];
free(mcd);
mcd = NULL;
}
- /* marucam_reset_controls(); */
INFO("Camera closed\n");
}
/* MARUCAM_CMD_START_PREVIEW */
void marucam_device_start_preview(MaruCamState *state)
{
- uint32_t width, height, pixfmt;
MaruCamParam *param = state->param;
param->top = 0;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- state->buf_size = get_sizeimage(pixfmt, width, height);
+ state->buf_size = get_sizeimage(mcd->dst_fmt, mcd->dst_width, mcd->dst_height);
- INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u), frame idx(%d), fmt idx(%d)\n",
- (char)(pixfmt), (char)(pixfmt >> 8),
- (char)(pixfmt >> 16), (char)(pixfmt >> 24),
- width, height, state->buf_size,
- cur_frame_idx, cur_fmt_idx);
+ INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u)\n",
+ (char)(mcd->dst_fmt), (char)(mcd->dst_fmt >> 8),
+ (char)(mcd->dst_fmt >> 16), (char)(mcd->dst_fmt >> 24),
+ mcd->dst_width, mcd->dst_height, state->buf_size);
+ INFO("Starting preview\n");
if (mcd->driver == nil) {
- ERR("%s: Start capture failed: vaild device", __func__);
+ ERR("%s: Start capture failed: invaild device", __func__);
param->errCode = EINVAL;
return;
}
- INFO("Starting preview ...\n");
- [mcd->driver startCapture: width: height];
-
- /* Enable the condition to capture frames now */
- qemu_mutex_lock(&state->thread_mutex);
- qemu_cond_signal(&state->thread_cond);
- qemu_mutex_unlock(&state->thread_mutex);
+ [mcd->driver startCapture];
- while (!is_streamon()) {
- usleep(10000);
- }
+ set_stream_state(state, _MC_THREAD_STREAMON);
+ INFO("Streaming on ......\n");
}
/* MARUCAM_CMD_STOP_PREVIEW */
MaruCamParam *param = state->param;
param->top = 0;
- if (is_streamon()) {
- qemu_mutex_lock(&state->thread_mutex);
- state->streamon = _MC_THREAD_STREAMOFF;
- qemu_mutex_unlock(&state->thread_mutex);
-
- while (is_streamon()) {
- usleep(10000);
- }
- }
-
+ INFO("...... Streaming off\n");
if (mcd->driver != nil) {
[mcd->driver stopCapture];
}
-
state->buf_size = 0;
- INFO("Stopping preview ...\n");
+
+ set_stream_state(state, _MC_THREAD_STREAMOFF);
+ INFO("Stopping preview\n");
}
/* MARUCAM_CMD_S_PARAM */
TRACE("Set format: width(%d), height(%d), pixfmt(%d, %.4s)\n",
width, height, pixfmt, (const char*)&pixfmt);
- for (fidx = 0; fidx < ARRAY_SIZE(supported_dst_frames); fidx++) {
- if ((supported_dst_frames[fidx].width == width) &&
- (supported_dst_frames[fidx].height == height)) {
+ for (fidx = 0; fidx < ARRAY_SIZE(support_frames); fidx++) {
+ if ((support_frames[fidx].width == width) &&
+ (support_frames[fidx].height == height)) {
break;
}
}
- if (fidx == ARRAY_SIZE(supported_dst_frames)) {
+ if (fidx == ARRAY_SIZE(support_frames)) {
param->errCode = EINVAL;
return;
}
- for (pidx = 0; pidx < ARRAY_SIZE(supported_dst_pixfmts); pidx++) {
- if (supported_dst_pixfmts[pidx].fmt == pixfmt) {
+ for (pidx = 0; pidx < ARRAY_SIZE(support_fmts); pidx++) {
+ if (support_fmts[pidx] == pixfmt) {
TRACE("pixfmt index is match: %d\n", pidx);
break;
}
}
- if (pidx == ARRAY_SIZE(supported_dst_pixfmts)) {
+ if (pidx == ARRAY_SIZE(support_fmts)) {
param->errCode = EINVAL;
return;
}
- if ((supported_dst_frames[cur_frame_idx].width != width) &&
- (supported_dst_frames[cur_frame_idx].height != height)) {
- if (mcd->driver == nil || [mcd->driver setCaptureFormat: width: height: 0] < 0) {
+ if ((mcd->dst_width != width) ||
+ (mcd->dst_height != height) ||
+ (mcd->dst_fmt != pixfmt)) {
+ if (mcd->driver == nil ||
+ [mcd->driver setCaptureFormat:width
+ setHeight:height
+ setFormat:pixfmt] < 0) {
ERR("Set pixel format failed\n");
param->errCode = EINVAL;
return;
}
- TRACE("cur_frame_idx:%d, supported_dst_frames[cur_frame_idx].width:%d\n",
- cur_frame_idx, supported_dst_frames[cur_frame_idx].width);
}
-
- cur_frame_idx = fidx;
- cur_fmt_idx = pidx;
-
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
-
- param->stack[0] = width;
- param->stack[1] = height;
+ param->stack[0] = mcd->dst_width = support_frames[fidx].width;
+ param->stack[1] = mcd->dst_height = support_frames[fidx].height;
param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
+ param->stack[3] = mcd->dst_fmt = support_fmts[pidx];
+
+ param->stack[4] = get_bytesperline(mcd->dst_fmt,
+ mcd->dst_width);
+ param->stack[5] = get_sizeimage(mcd->dst_fmt,
+ mcd->dst_width,
+ mcd->dst_height);
param->stack[6] = 0;
param->stack[7] = 0;
- TRACE("Set device pixel format ...\n");
+ TRACE("Set the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ param->stack[0], param->stack[1], param->stack[3], param->stack[4],
+ param->stack[5], param->stack[6]);
}
/* MARUCAM_CMD_G_FMT */
void marucam_device_g_fmt(MaruCamState *state)
{
- uint32_t width, height, pixfmt;
MaruCamParam *param = state->param;
param->top = 0;
- pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;
- width = supported_dst_frames[cur_frame_idx].width;
- height = supported_dst_frames[cur_frame_idx].height;
-
- param->stack[0] = width;
- param->stack[1] = height;
+ param->stack[0] = mcd->dst_width;
+ param->stack[1] = mcd->dst_height;
param->stack[2] = 1; /* V4L2_FIELD_NONE */
- param->stack[3] = pixfmt;
- param->stack[4] = get_bytesperline(pixfmt, width);
- param->stack[5] = get_sizeimage(pixfmt, width, height);
+ param->stack[3] = mcd->dst_fmt;
+ param->stack[4] = get_bytesperline(mcd->dst_fmt,
+ mcd->dst_width);
+ param->stack[5] = get_sizeimage(mcd->dst_fmt,
+ mcd->dst_width,
+ mcd->dst_height);
param->stack[6] = 0;
param->stack[7] = 0;
- TRACE("Get device frame format ...\n");
+ TRACE("Get the format: w:h(%dx%d), fmt(0x%x), "
+ "bytesperline(%d), sizeimage(%d), colorspace(%d)\n",
+ param->stack[0], param->stack[1], param->stack[3], param->stack[4],
+ param->stack[5], param->stack[6]);
+
}
void marucam_device_try_fmt(MaruCamState *state)
param->top = 0;
index = param->stack[0];
- if (index >= ARRAY_SIZE(supported_dst_pixfmts)) {
+ if (index >= ARRAY_SIZE(support_fmts)) {
param->errCode = EINVAL;
return;
}
param->stack[1] = 0; /* flags = NONE */
- param->stack[2] = supported_dst_pixfmts[index].fmt; /* pixelformat */
- switch (supported_dst_pixfmts[index].fmt) {
+ param->stack[2] = support_fmts[index]; /* pixelformat */
+ switch (support_fmts[index]) {
case V4L2_PIX_FMT_YUYV:
memcpy(¶m->stack[3], "YUYV", 32);
break;
}
}
-/*
- * QTKit don't support setting brightness, contrast, saturation & sharpness
- */
void marucam_device_qctrl(MaruCamState *state)
{
uint32_t id, i;
void marucam_device_s_ctrl(MaruCamState *state)
{
- INFO("Set control\n");
+ INFO("[%s][Not Implemented] AVFoundation don't support setting "
+ " brightness, contrast, saturation & sharpness\n", __func__);
}
void marucam_device_g_ctrl(MaruCamState *state)
{
- INFO("Get control\n");
+ INFO("[%s][Not Implemented] AVFoundation don't support getting "
+ " brightness, contrast, saturation & sharpness\n", __func__);
}
/* Get frame width & height */
index = param->stack[0];
pixfmt = param->stack[1];
- if (index >= ARRAY_SIZE(supported_dst_frames)) {
+ if (index >= ARRAY_SIZE(support_frames)) {
param->errCode = EINVAL;
return;
}
- for (i = 0; i < ARRAY_SIZE(supported_dst_pixfmts); i++) {
- if (supported_dst_pixfmts[i].fmt == pixfmt) {
+ for (i = 0; i < ARRAY_SIZE(support_fmts); i++) {
+ if (support_fmts[i] == pixfmt) {
break;
}
}
- if (i == ARRAY_SIZE(supported_dst_pixfmts)) {
+ if (i == ARRAY_SIZE(support_fmts)) {
param->errCode = EINVAL;
return;
}
- param->stack[0] = supported_dst_frames[index].width;
- param->stack[1] = supported_dst_frames[index].height;
+ param->stack[0] = support_frames[index].width;
+ param->stack[1] = support_frames[index].height;
}
void marucam_device_enum_fintv(MaruCamState *state)