static int v4l2_dequeue_and_convert(int index, struct v4l2_buffer *buf,
unsigned char *dest, int dest_size)
{
- const int max_tries = 10;
+ const int max_tries = V4L2_IGNORE_FIRST_FRAME_ERRORS + 1;
int result, tries = max_tries;
/* Make sure we have the real v4l2 buffers mapped */
if (result < 0) {
int saved_err = errno;
- if (errno == EAGAIN)
+ if (errno == EAGAIN || errno == EPIPE)
V4L2_LOG("warning error while converting frame data: %s",
v4lconvert_get_error_message(devices[index].convert));
else
errno = saved_err;
}
tries--;
- } while (result < 0 && errno == EAGAIN && tries);
+ } while (result < 0 && (errno == EAGAIN || errno == EPIPE) && tries);
if (result < 0 && errno == EAGAIN) {
V4L2_LOG_ERR("got %d consecutive frame decode errors, last error: %s",
errno = EAGAIN;
}
+ if (result < 0 && errno == EPIPE) {
+ V4L2_LOG("got %d consecutive short frame errors, "
+ "returning short frame");
+ result = devices[index].dest_fmt.fmt.pix.sizeimage;
+ errno = 0;
+ }
+
return result;
}
static int v4l2_read_and_convert(int index, unsigned char *dest, int dest_size)
{
- const int max_tries = 10;
+ const int max_tries = V4L2_IGNORE_FIRST_FRAME_ERRORS + 1;
int result, buf_size, tries = max_tries;
buf_size = devices[index].dest_fmt.fmt.pix.sizeimage;
if (result < 0) {
int saved_err = errno;
- if (errno == EAGAIN)
+ if (errno == EAGAIN || errno == EPIPE)
V4L2_LOG("warning error while converting frame data: %s",
v4lconvert_get_error_message(devices[index].convert));
else
errno = saved_err;
}
tries--;
- } while (result < 0 && errno == EAGAIN && tries);
+ } while (result < 0 && (errno == EAGAIN || errno == EPIPE) && tries);
if (result < 0 && errno == EAGAIN) {
V4L2_LOG_ERR("got %d consecutive frame decode errors, last error: %s",
max_tries, v4lconvert_get_error_message(devices[index].convert));
+ errno = EAGAIN;
+ }
+
+ if (result < 0 && errno == EPIPE) {
+ V4L2_LOG("got %d consecutive short frame errors, "
+ "returning short frame");
+ result = devices[index].dest_fmt.fmt.pix.sizeimage;
+ errno = 0;
}
return result;
if (SYS_IOCTL(data->fd, VIDIOC_QUERYCAP, &cap) == 0) {
if (!strcmp((char *)cap.driver, "uvcvideo"))
data->flags |= V4LCONVERT_IS_UVC;
- else if (!strcmp((char *)cap.driver, "sn9c20x"))
- data->flags |= V4LCONVERT_IS_SN9C20X;
if ((cap.capabilities & 0xff) & ~V4L2_CAP_VIDEO_CAPTURE)
always_needs_conversion = 0;
}
if (result) {
- /* Pixart webcam's seem to regulary generate corrupt frames, which
- are best thrown away to avoid flashes in the video stream. Tell
- the upper layer this is an intermediate fault and it should try
- again with a new buffer by setting errno to EAGAIN */
- if (src_pix_fmt == V4L2_PIX_FMT_PJPG ||
- data->flags & V4LCONVERT_IS_SN9C20X) {
- V4LCONVERT_ERR("decompressing JPEG: %s",
- tinyjpeg_get_errorstring(data->jdec));
- errno = EAGAIN;
- return -1;
- } else {
- /* If the JPEG header checked out ok and we get an error during actual
- decompression, log the error, but don't return an errorcode to the
- application, so that the user gets what we managed to decompress */
- fprintf(stderr, "libv4lconvert: Error decompressing JPEG: %s",
- tinyjpeg_get_errorstring(data->jdec));
- }
+ /* The JPEG header checked out ok but we got an error
+ during decompression. Some webcams, esp pixart and
+ sn9c20x based ones regulary generate corrupt frames,
+ which are best thrown away to avoid flashes in the
+ video stream. We use EPIPE to signal the upper layer
+ we have some video data, but it is incomplete.
+
+ The upper layer (usually libv4l2) should respond to
+ this by trying a number of times to get a new frame
+ and if that fails just passing up whatever we did
+ manage to decompress. */
+ V4LCONVERT_ERR("decompressing JPEG: %s",
+ tinyjpeg_get_errorstring(data->jdec));
+ errno = EPIPE;
+ result = -1;
}
break;
/* Deliberate fall through to raw bayer fmt code! */
src_pix_fmt = tmpfmt.fmt.pix.pixelformat;
src = tmpbuf;
+ src_size = width * height;
/* fall through */
}
v4lconvert_bayer_to_yuv420(src, dest, width, height, src_pix_fmt, 1);
break;
}
+ if (src_size < (width * height)) {
+ V4LCONVERT_ERR("short raw bayer data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_RGB565:
v4lconvert_rgb565_to_yuv420(src, dest, fmt, 1);
break;
}
+ if (src_size < (width * height * 2)) {
+ V4LCONVERT_ERR("short rgb565 data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_RGB24:
v4lconvert_rgb24_to_yuv420(src, dest, fmt, 0, 1);
break;
}
+ if (src_size < (width * height * 3)) {
+ V4LCONVERT_ERR("short rgb24 data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_BGR24:
v4lconvert_rgb24_to_yuv420(src, dest, fmt, 1, 1);
break;
}
+ if (src_size < (width * height * 3)) {
+ V4LCONVERT_ERR("short bgr24 data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_YUV420:
v4lconvert_swap_uv(src, dest, fmt);
break;
}
+ if (src_size < (width * height * 3 / 2)) {
+ V4LCONVERT_ERR("short yuv420 data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_YVU420:
v4lconvert_swap_uv(src, dest, fmt);
break;
}
+ if (src_size < (width * height * 3 / 2)) {
+ V4LCONVERT_ERR("short yvu420 data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_YUYV:
v4lconvert_yuyv_to_yuv420(src, dest, width, height, 1);
break;
}
+ if (src_size < (width * height * 2)) {
+ V4LCONVERT_ERR("short yuyv data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_YVYU:
v4lconvert_yuyv_to_yuv420(src, dest, width, height, 0);
break;
}
+ if (src_size < (width * height * 2)) {
+ V4LCONVERT_ERR("short yvyu data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
case V4L2_PIX_FMT_UYVY:
v4lconvert_uyvy_to_yuv420(src, dest, width, height, 1);
break;
}
+ if (src_size < (width * height * 2)) {
+ V4LCONVERT_ERR("short uyvy data frame\n");
+ errno = EPIPE;
+ result = -1;
+ }
break;
default:
fmt->fmt.pix.pixelformat = dest_pix_fmt;
v4lconvert_fixup_fmt(fmt);
- return 0;
+ return result;
}
int v4lconvert_convert(struct v4lconvert_data *data,