#include <config.h>
#endif
-#define _GNU_SOURCE /* Enable mmap64() */
-
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
-#include <unistd.h>
#include <string.h>
#include <sys/mman.h>
#include <sys/ioctl.h>
#include <gudev/gudev.h>
#endif
+#include "ext/videodev2.h"
#include "gstv4l2object.h"
#include "gstv4l2tuner.h"
#include "gstv4l2colorbalance.h"
#include "gst/gst-i18n-plugin.h"
#include <gst/video/video.h>
+#include <gst/allocators/gstdmabuf.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
{V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
- {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
/* compressed formats */
{V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
{V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
+ {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
+/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */
+#ifdef HAVE_LIBV4L2
+#if SIZEOF_OFF_T < 8
+
+static gpointer
+v4l2_mmap_wrapper (gpointer start, gsize length, gint prot, gint flags, gint fd,
+ off_t offset)
+{
+ return v4l2_mmap (start, length, prot, flags, fd, (gint64) offset);
+}
+
+#define v4l2_mmap v4l2_mmap_wrapper
+
+#endif /* SIZEOF_OFF_T < 8 */
+#endif /* HAVE_LIBV4L2 */
+
GstV4l2Object *
gst_v4l2_object_new (GstElement * element,
+ GstObject * debug_object,
enum v4l2_buf_type type,
const char *default_device,
GstV4l2GetInOutFunction get_in_out_func,
v4l2object->formats = NULL;
v4l2object->element = element;
+ v4l2object->dbg_obj = debug_object;
v4l2object->get_in_out_func = get_in_out_func;
v4l2object->set_in_out_func = set_in_out_func;
v4l2object->update_fps_func = update_fps_func;
v4l2object->dup = dup;
v4l2object->ioctl = ioctl;
v4l2object->read = read;
- v4l2object->mmap = mmap64;
+ v4l2object->mmap = mmap;
v4l2object->munmap = munmap;
}
gst_value_set_fraction (v4l2object->par, 1, 1);
}
- GST_DEBUG_OBJECT (v4l2object->element, "set PAR to %d/%d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "set PAR to %d/%d",
gst_value_get_fraction_numerator (v4l2object->par),
gst_value_get_fraction_denominator (v4l2object->par));
break;
break;
case PROP_DEVICE_NAME:
{
- const guchar *new = NULL;
+ const guchar *name = NULL;
- if (GST_V4L2_IS_OPEN (v4l2object)) {
- new = v4l2object->vcap.card;
- } else if (gst_v4l2_open (v4l2object)) {
- new = v4l2object->vcap.card;
- gst_v4l2_close (v4l2object);
- }
- g_value_set_string (value, (gchar *) new);
+ if (GST_V4L2_IS_OPEN (v4l2object))
+ name = v4l2object->vcap.card;
+
+ g_value_set_string (value, (gchar *) name);
break;
}
case PROP_DEVICE_FD:
control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE;
if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"driver requires a minimum of %d buffers", control.value);
v4l2object->min_buffers = control.value;
} else {
if (v4l2object->tv_norm)
norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm);
- GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "tv_norm=0x%" G_GINT64_MODIFIER "x, "
"norm=%p", (guint64) v4l2object->tv_norm, norm);
if (norm) {
gst_tuner_set_norm (tuner, norm);
v4l2object->par = NULL;
}
+ if (v4l2object->channel) {
+ g_free (v4l2object->channel);
+ v4l2object->channel = NULL;
+ }
+
return TRUE;
}
gint n;
struct v4l2_fmtdesc *format;
- GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting src format enumerations");
/* format enumeration */
for (n = 0;; n++) {
}
}
- GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index);
- GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type);
- GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags);
- GST_LOG_OBJECT (v4l2object->element, "description: '%s'",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "index: %u", format->index);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "type: %d", format->type);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "flags: %08x", format->flags);
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "description: '%s'",
format->description);
- GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT,
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (format->pixelformat));
/* sort formats according to our preference; we do this, because caps
{
GSList *l;
- GST_INFO_OBJECT (v4l2object->element, "got %d format(s):", n);
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "got %d format(s):", n);
for (l = v4l2object->formats; l != NULL; l = l->next) {
format = l->data;
- GST_INFO_OBJECT (v4l2object->element,
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
" %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat),
((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : "");
}
{
g_free (format);
- if (!GST_IS_ELEMENT (v4l2object->element))
+ if (v4l2object->element)
return FALSE;
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
case V4L2_PIX_FMT_BGR32:
case V4L2_PIX_FMT_ABGR32:
case V4L2_PIX_FMT_ARGB32:
+ case V4L2_PIX_FMT_SBGGR8:
+ case V4L2_PIX_FMT_SGBRG8:
+ case V4L2_PIX_FMT_SGRBG8:
+ case V4L2_PIX_FMT_SRGGB8:
ret = TRUE;
break;
default:
break;
case V4L2_PIX_FMT_MPEG1:
structure = gst_structure_new ("video/mpeg",
- "mpegversion", G_TYPE_INT, 2, NULL);
+ "mpegversion", G_TYPE_INT, 1, NULL);
break;
case V4L2_PIX_FMT_MPEG2:
structure = gst_structure_new ("video/mpeg",
"mpegversion", G_TYPE_INT, 4, "systemstream",
G_TYPE_BOOLEAN, FALSE, NULL);
break;
+ case V4L2_PIX_FMT_FWHT:
+ structure = gst_structure_new_empty ("video/x-fwht");
+ break;
case V4L2_PIX_FMT_H263:
structure = gst_structure_new ("video/x-h263",
"variant", G_TYPE_STRING, "itu", NULL);
"stream-format", G_TYPE_STRING, "avc", "alignment",
G_TYPE_STRING, "au", NULL);
break;
+ case V4L2_PIX_FMT_HEVC: /* H.265 */
+ structure = gst_structure_new ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
+ G_TYPE_STRING, "au", NULL);
+ break;
case V4L2_PIX_FMT_VC1_ANNEX_G:
case V4L2_PIX_FMT_VC1_ANNEX_L:
structure = gst_structure_new ("video/x-wmv",
break;
}
}
+ } else if (g_str_equal (mimetype, "video/x-fwht")) {
+ fourcc = V4L2_PIX_FMT_FWHT;
} else if (g_str_equal (mimetype, "video/x-h263")) {
fourcc = V4L2_PIX_FMT_H263;
} else if (g_str_equal (mimetype, "video/x-h264")) {
fourcc = V4L2_PIX_FMT_H264_NO_SC;
else
fourcc = V4L2_PIX_FMT_H264;
+ } else if (g_str_equal (mimetype, "video/x-h265")) {
+ fourcc = V4L2_PIX_FMT_HEVC;
} else if (g_str_equal (mimetype, "video/x-vp8")) {
fourcc = V4L2_PIX_FMT_VP8;
} else if (g_str_equal (mimetype, "video/x-vp9")) {
gst_v4l2_object_get_interlace_mode (enum v4l2_field field,
GstVideoInterlaceMode * interlace_mode)
{
- /* NB: If you add new return values, please fix mode_strings in
- * gst_v4l2_object_add_interlace_mode */
switch (field) {
case V4L2_FIELD_ANY:
GST_ERROR
cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
break;
- case V4L2_COLORSPACE_ADOBERGB:
+ case V4L2_COLORSPACE_OPRGB:
cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
switch (transfer) {
case V4L2_XFER_FUNC_709:
- if (fmt->fmt.pix.height >= 2160)
+ if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
else
cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
case V4L2_XFER_FUNC_SRGB:
cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
break;
- case V4L2_XFER_FUNC_ADOBERGB:
+ case V4L2_XFER_FUNC_OPRGB:
cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
break;
case V4L2_XFER_FUNC_SMPTE240M:
error:
memcpy (try_fmt, &fmt, sizeof (fmt));
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unable to try format: %s", g_strerror (errno));
return r;
}
{
struct v4l2_format fmt;
GValue interlace_formats = { 0, };
+ enum v4l2_field formats[] = { V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED };
+ gsize i;
GstVideoInterlaceMode interlace_mode, prev = -1;
- const gchar *mode_strings[] = { "progressive",
- "interleaved",
- "mixed"
- };
-
if (!g_str_equal (gst_structure_get_name (s), "video/x-raw"))
return;
g_value_init (&interlace_formats, GST_TYPE_LIST);
/* Try twice - once for NONE, once for INTERLACED. */
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = v4l2object->type;
- fmt.fmt.pix.width = width;
- fmt.fmt.pix.height = height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_NONE;
-
- if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
- gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
- GValue interlace_enum = { 0, };
- g_value_init (&interlace_enum, G_TYPE_STRING);
- g_value_set_string (&interlace_enum, mode_strings[interlace_mode]);
- gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
- prev = interlace_mode;
- }
-
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = v4l2object->type;
- fmt.fmt.pix.width = width;
- fmt.fmt.pix.height = height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
-
- if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
- gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) &&
- prev != interlace_mode) {
- GValue interlace_enum = { 0, };
- g_value_init (&interlace_enum, G_TYPE_STRING);
- g_value_set_string (&interlace_enum, mode_strings[interlace_mode]);
- gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
+ for (i = 0; i < G_N_ELEMENTS (formats); i++) {
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = width;
+ fmt.fmt.pix.height = height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = formats[i];
+
+ if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
+ gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)
+ && prev != interlace_mode) {
+ GValue interlace_enum = { 0, };
+ const gchar *mode_string;
+ g_value_init (&interlace_enum, G_TYPE_STRING);
+ mode_string = gst_video_interlace_mode_to_string (interlace_mode);
+ g_value_set_string (&interlace_enum, mode_string);
+ gst_value_list_append_and_take_value (&interlace_formats,
+ &interlace_enum);
+ prev = interlace_mode;
+ }
}
if (gst_v4l2src_value_simplify (&interlace_formats)
ival.width = width;
ival.height = height;
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height,
GST_FOURCC_ARGS (pixelformat));
denom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "adding discrete framerate: %d/%d",
denom, num);
/* swap to get the framerate */
minnum >>= 1;
mindenom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise min frame interval: %d/%d",
minnum, mindenom);
gst_value_set_fraction (&min, minnum, mindenom);
maxdenom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise max frame interval: %d/%d",
maxnum, maxdenom);
gst_value_set_fraction (&max, maxnum, maxdenom);
/* since we only have gst_value_fraction_subtract and not add, negate the
* numerator */
- GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise step frame interval: %d/%d",
num, denom);
gst_value_set_fraction (&step, -num, denom);
num = gst_value_get_fraction_numerator (&min);
denom = gst_value_get_fraction_denominator (&min);
- GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "adding stepwise framerate: %d/%d",
denom, num);
/* invert to get the framerate */
/* we're actually adding because step was negated above. This is because
* there is no _add function... */
if (!gst_value_fraction_subtract (&min, &min, &step)) {
- GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!");
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "could not step fraction!");
break;
}
}
if (!added) {
/* no range was added, leave the default range from the template */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"no range added, leaving default");
g_value_unset (&rates);
}
maxdenom >>= 1;
}
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom,
num);
/* ERRORS */
enum_frameintervals_failed:
{
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u",
GST_FOURCC_ARGS (pixelformat), width, height);
goto return_data;
unknown_type:
{
/* I don't see how this is actually an error, we ignore the format then */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u",
GST_FOURCC_ARGS (pixelformat), width, height, ival.type);
return NULL;
size.index = 0;
size.pixel_format = pixelformat;
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Enumerating frame sizes for %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (pixelformat));
if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {
do {
- GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "got discrete frame size %dx%d",
size.discrete.width, size.discrete.height);
w = MIN (size.discrete.width, G_MAXINT);
size.index++;
} while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"done iterating discrete frame sizes");
} else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {
guint32 maxw, maxh, step_w, step_h;
- GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:");
- GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have stepwise frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
size.stepwise.min_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.min_height);
- GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.max_height);
- GST_DEBUG_OBJECT (v4l2object->element, "step width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d",
size.stepwise.step_width);
- GST_DEBUG_OBJECT (v4l2object->element, "step height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d",
size.stepwise.step_height);
w = MAX (size.stepwise.min_width, 1);
} else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {
guint32 maxw, maxh;
- GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:");
- GST_DEBUG_OBJECT (v4l2object->element, "min width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have continuous frame sizes:");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d",
size.stepwise.min_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.min_height);
- GST_DEBUG_OBJECT (v4l2object->element, "max width: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2object->element, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
size.stepwise.max_height);
w = MAX (size.stepwise.min_width, 1);
enum_framesizes_failed:
{
/* I don't see how this is actually an error */
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT
" (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno));
goto default_frame_sizes;
{
/* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in
* question doesn't actually support it yet */
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"No results for pixelformat %" GST_FOURCC_FORMAT
" enumerating frame sizes, trying fallback",
GST_FOURCC_ARGS (pixelformat));
}
unknown_type:
{
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT
": %u", GST_FOURCC_ARGS (pixelformat), size.type);
goto default_frame_sizes;
max_w = max_h = GST_V4L2_MAX_SIZE;
if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w,
&min_h)) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Could not probe minimum capture size for pixelformat %"
GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
}
if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w,
&max_h)) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Could not probe maximum capture size for pixelformat %"
GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat));
}
g_return_val_if_fail (width != NULL, FALSE);
g_return_val_if_fail (height != NULL, FALSE);
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT,
*width, *height, GST_FOURCC_ARGS (pixelformat));
if (gst_v4l2_object_try_fmt (v4l2object, &fmt) < 0)
goto error;
- GST_LOG_OBJECT (v4l2object->element,
+ GST_LOG_OBJECT (v4l2object->dbg_obj,
"got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
*width = fmt.fmt.pix.width;
*height = fmt.fmt.pix.height;
if (!gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u",
GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field);
goto error;
error:
if (!ret) {
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Unable to try format: %s", g_strerror (errno));
}
}
static gboolean
+gst_v4l2_object_is_dmabuf_supported (GstV4l2Object * v4l2object)
+{
+ gboolean ret = TRUE;
+ struct v4l2_exportbuffer expbuf = {
+ .type = v4l2object->type,
+ .index = -1,
+ .plane = -1,
+ .flags = O_CLOEXEC | O_RDWR,
+ };
+
+ if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "libv4l2 converter detected, disabling DMABuf");
+ ret = FALSE;
+ }
+
+ /* Expected to fail, but ENOTTY tells us that it is not implemented. */
+ v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
+ if (errno == ENOTTY)
+ ret = FALSE;
+
+ return ret;
+}
+
+static gboolean
gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps)
{
GstV4l2IOMode mode;
- GST_DEBUG_OBJECT (v4l2object->element, "initializing the %s system",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "initializing the %s system",
V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture");
GST_V4L2_CHECK_OPEN (v4l2object);
goto method_not_supported;
if (v4l2object->device_caps & V4L2_CAP_STREAMING) {
- if (v4l2object->req_mode == GST_V4L2_IO_AUTO)
- mode = GST_V4L2_IO_MMAP;
- } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP)
+ if (v4l2object->req_mode == GST_V4L2_IO_AUTO) {
+ if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) &&
+ gst_v4l2_object_is_dmabuf_supported (v4l2object)) {
+ mode = GST_V4L2_IO_DMABUF;
+ } else {
+ mode = GST_V4L2_IO_MMAP;
+ }
+ }
+ } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
+ v4l2object->req_mode == GST_V4L2_IO_DMABUF)
goto method_not_supported;
/* if still no transport selected, error out */
if (mode == GST_V4L2_IO_AUTO)
goto no_supported_capture_method;
- GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode);
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "accessing buffers via mode %d", mode);
v4l2object->mode = mode;
/* If min_buffers is not set, the driver either does not support the control or
gst_v4l2_get_driver_min_buffers (v4l2object);
/* Map the buffers */
- GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool");
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool");
if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
goto buffer_pool_new_failed;
offs += estride *
GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height);
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Extrapolated for plane %d with base stride %d: "
"stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i],
info->offset[i]);
{
const GstVideoFormatInfo *finfo = info->finfo;
gboolean standard_stride = TRUE;
- gint stride, padded_width, padded_height, i;
+ gint stride, pstride, padded_width, padded_height, i;
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) {
v4l2object->n_v4l2_planes = 1;
else
stride = format->fmt.pix.bytesperline;
- padded_width = stride / GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
+ pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0);
+ if (pstride) {
+ padded_width = stride / pstride;
+ } else {
+ /* pstride can be 0 for complex formats */
+ GST_WARNING_OBJECT (v4l2object->element,
+ "format %s has a pstride of 0, cannot compute padded with",
+ gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info)));
+ padded_width = stride;
+ }
if (padded_width < format->fmt.pix.width)
- GST_WARNING_OBJECT (v4l2object->element,
- "Driver bug detected, stride is too small for the width");
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Driver bug detected, stride (%d) is too small for the width (%d)",
+ padded_width, format->fmt.pix.width);
align->padding_right = padded_width - info->width - align->padding_left;
/* adjust the offset to take into account left and top */
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
if ((align->padding_left + align->padding_top) > 0)
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Left and top padding is not permitted for tiled formats");
} else {
for (i = 0; i < finfo->n_planes; i++) {
}
store_info:
- GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %" G_GSIZE_FORMAT,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT,
info->size);
/* to avoid copies we need video meta if there is padding */
}
static gboolean
+gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo,
+ const gchar * color)
+{
+ GstVideoColorimetry ci;
+ static const GstVideoColorimetry ci_likely_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN
+ };
+ static const GstVideoColorimetry ci_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709
+ };
+
+ if (!gst_video_colorimetry_from_string (&ci, color))
+ return FALSE;
+
+ if (gst_video_colorimetry_is_equal (&ci, cinfo))
+ return TRUE;
+
+ /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
+ if (gst_video_colorimetry_is_equal (&ci, &ci_likely_jpeg)
+ && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg))
+ return TRUE;
+
+ return FALSE;
+}
+
+static gboolean
gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
gboolean try_only, GstV4l2Error * error)
{
n_v4l_planes = 1;
if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
- GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "interlaced video");
/* ideally we would differentiate between types of interlaced video
* but there is not sufficient information in the caps..
*/
field = V4L2_FIELD_INTERLACED;
} else {
- GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "progressive video");
field = V4L2_FIELD_NONE;
}
- if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
- /* We first pick th main colorspace from the primaries */
- switch (info.colorimetry.primaries) {
- case GST_VIDEO_COLOR_PRIMARIES_BT709:
- /* There is two colorspaces using these primaries, use the range to
- * differentiate */
- if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
- colorspace = V4L2_COLORSPACE_REC709;
- else
- colorspace = V4L2_COLORSPACE_SRGB;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_BT2020:
- colorspace = V4L2_COLORSPACE_BT2020;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_BT470M:
- colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
- colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
- colorspace = V4L2_COLORSPACE_SMPTE170M;
- break;
- case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
- colorspace = V4L2_COLORSPACE_SMPTE240M;
- break;
+ /* We first pick the main colorspace from the primaries */
+ switch (info.colorimetry.primaries) {
+ case GST_VIDEO_COLOR_PRIMARIES_BT709:
+ /* There is two colorspaces using these primaries, use the range to
+ * differentiate */
+ if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235)
+ colorspace = V4L2_COLORSPACE_REC709;
+ else
+ colorspace = V4L2_COLORSPACE_SRGB;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT2020:
+ colorspace = V4L2_COLORSPACE_BT2020;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT470M:
+ colorspace = V4L2_COLORSPACE_470_SYSTEM_M;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_BT470BG:
+ colorspace = V4L2_COLORSPACE_470_SYSTEM_BG;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M:
+ colorspace = V4L2_COLORSPACE_SMPTE170M;
+ break;
+ case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M:
+ colorspace = V4L2_COLORSPACE_SMPTE240M;
+ break;
- case GST_VIDEO_COLOR_PRIMARIES_FILM:
- case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
- /* We don't know, we will guess */
- break;
+ case GST_VIDEO_COLOR_PRIMARIES_FILM:
+ case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN:
+ /* We don't know, we will guess */
+ break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry primaries %d", info.colorimetry.primaries);
- break;
- }
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry primaries %d", info.colorimetry.primaries);
+ break;
+ }
- switch (info.colorimetry.range) {
- case GST_VIDEO_COLOR_RANGE_0_255:
- range = V4L2_QUANTIZATION_FULL_RANGE;
- break;
- case GST_VIDEO_COLOR_RANGE_16_235:
- range = V4L2_QUANTIZATION_LIM_RANGE;
- break;
- case GST_VIDEO_COLOR_RANGE_UNKNOWN:
- /* We let the driver pick a default one */
- break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry range %d", info.colorimetry.range);
- break;
- }
+ switch (info.colorimetry.range) {
+ case GST_VIDEO_COLOR_RANGE_0_255:
+ range = V4L2_QUANTIZATION_FULL_RANGE;
+ break;
+ case GST_VIDEO_COLOR_RANGE_16_235:
+ range = V4L2_QUANTIZATION_LIM_RANGE;
+ break;
+ case GST_VIDEO_COLOR_RANGE_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry range %d", info.colorimetry.range);
+ break;
+ }
- switch (info.colorimetry.matrix) {
- case GST_VIDEO_COLOR_MATRIX_RGB:
- /* Unspecified, leave to default */
- break;
- /* FCC is about the same as BT601 with less digit */
- case GST_VIDEO_COLOR_MATRIX_FCC:
- case GST_VIDEO_COLOR_MATRIX_BT601:
- matrix = V4L2_YCBCR_ENC_601;
- break;
- case GST_VIDEO_COLOR_MATRIX_BT709:
- matrix = V4L2_YCBCR_ENC_709;
- break;
- case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
- matrix = V4L2_YCBCR_ENC_SMPTE240M;
- break;
- case GST_VIDEO_COLOR_MATRIX_BT2020:
- matrix = V4L2_YCBCR_ENC_BT2020;
- break;
- case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
- /* We let the driver pick a default one */
- break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry matrix %d", info.colorimetry.matrix);
- break;
- }
+ switch (info.colorimetry.matrix) {
+ case GST_VIDEO_COLOR_MATRIX_RGB:
+ /* Unspecified, leave to default */
+ break;
+ /* FCC is about the same as BT601 with less digit */
+ case GST_VIDEO_COLOR_MATRIX_FCC:
+ case GST_VIDEO_COLOR_MATRIX_BT601:
+ matrix = V4L2_YCBCR_ENC_601;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT709:
+ matrix = V4L2_YCBCR_ENC_709;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_SMPTE240M:
+ matrix = V4L2_YCBCR_ENC_SMPTE240M;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_BT2020:
+ matrix = V4L2_YCBCR_ENC_BT2020;
+ break;
+ case GST_VIDEO_COLOR_MATRIX_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry matrix %d", info.colorimetry.matrix);
+ break;
+ }
- switch (info.colorimetry.transfer) {
- case GST_VIDEO_TRANSFER_GAMMA18:
- case GST_VIDEO_TRANSFER_GAMMA20:
- case GST_VIDEO_TRANSFER_GAMMA22:
- case GST_VIDEO_TRANSFER_GAMMA28:
- GST_WARNING_OBJECT (v4l2object->element,
- "GAMMA 18, 20, 22, 28 transfer functions not supported");
- /* fallthrough */
- case GST_VIDEO_TRANSFER_GAMMA10:
- transfer = V4L2_XFER_FUNC_NONE;
- break;
- case GST_VIDEO_TRANSFER_BT2020_12:
- case GST_VIDEO_TRANSFER_BT709:
- transfer = V4L2_XFER_FUNC_709;
- break;
- case GST_VIDEO_TRANSFER_SMPTE240M:
- transfer = V4L2_XFER_FUNC_SMPTE240M;
- break;
- case GST_VIDEO_TRANSFER_SRGB:
- transfer = V4L2_XFER_FUNC_SRGB;
- break;
- case GST_VIDEO_TRANSFER_LOG100:
- case GST_VIDEO_TRANSFER_LOG316:
- GST_WARNING_OBJECT (v4l2object->element,
- "LOG 100, 316 transfer functions not supported");
- /* FIXME No known sensible default, maybe AdobeRGB ? */
- break;
- case GST_VIDEO_TRANSFER_UNKNOWN:
- /* We let the driver pick a default one */
- break;
- default:
- GST_WARNING_OBJECT (v4l2object->element,
- "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
- break;
- }
+ switch (info.colorimetry.transfer) {
+ case GST_VIDEO_TRANSFER_GAMMA18:
+ case GST_VIDEO_TRANSFER_GAMMA20:
+ case GST_VIDEO_TRANSFER_GAMMA22:
+ case GST_VIDEO_TRANSFER_GAMMA28:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "GAMMA 18, 20, 22, 28 transfer functions not supported");
+ /* fallthrough */
+ case GST_VIDEO_TRANSFER_GAMMA10:
+ transfer = V4L2_XFER_FUNC_NONE;
+ break;
+ case GST_VIDEO_TRANSFER_BT2020_12:
+ case GST_VIDEO_TRANSFER_BT709:
+ transfer = V4L2_XFER_FUNC_709;
+ break;
+ case GST_VIDEO_TRANSFER_SMPTE240M:
+ transfer = V4L2_XFER_FUNC_SMPTE240M;
+ break;
+ case GST_VIDEO_TRANSFER_SRGB:
+ transfer = V4L2_XFER_FUNC_SRGB;
+ break;
+ case GST_VIDEO_TRANSFER_LOG100:
+ case GST_VIDEO_TRANSFER_LOG316:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "LOG 100, 316 transfer functions not supported");
+ /* FIXME No known sensible default, maybe AdobeRGB ? */
+ break;
+ case GST_VIDEO_TRANSFER_UNKNOWN:
+ /* We let the driver pick a default one */
+ break;
+ default:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Unknown colorimetry tranfer %d", info.colorimetry.transfer);
+ break;
+ }
- if (colorspace == 0) {
- /* Try to guess colorspace according to pixelformat and size */
- if (GST_VIDEO_INFO_IS_YUV (&info)) {
+ if (colorspace == 0) {
+ /* Try to guess colorspace according to pixelformat and size */
+ if (GST_VIDEO_INFO_IS_YUV (&info)) {
+ if (range == V4L2_QUANTIZATION_FULL_RANGE
+ && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) {
+ /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
+ * function most likely is JPEG */
+ colorspace = V4L2_COLORSPACE_JPEG;
+ transfer = V4L2_XFER_FUNC_SRGB;
+ } else {
/* SD streams likely use SMPTE170M and HD streams REC709 */
if (width <= 720 && height <= 576)
colorspace = V4L2_COLORSPACE_SMPTE170M;
else
colorspace = V4L2_COLORSPACE_REC709;
- } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
- colorspace = V4L2_COLORSPACE_SRGB;
- transfer = V4L2_XFER_FUNC_NONE;
}
+ } else if (GST_VIDEO_INFO_IS_RGB (&info)) {
+ colorspace = V4L2_COLORSPACE_SRGB;
+ transfer = V4L2_XFER_FUNC_NONE;
}
}
- GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format %dx%d, format "
"%" GST_FOURCC_FORMAT " stride: %d", width, height,
GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0));
gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
format.type = v4l2object->type;
+
format.fmt.pix.width = width;
format.fmt.pix.height = height;
format.fmt.pix.pixelformat = pixelformat;
format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE;
}
- GST_DEBUG_OBJECT (v4l2object->element, "Desired format is %dx%d, format "
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format is %dx%d, format "
"%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
#ifndef GST_DISABLE_GST_DEBUG
if (is_mplane) {
for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
format.fmt.pix_mp.plane_fmt[i].bytesperline);
} else {
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d",
format.fmt.pix.bytesperline);
}
#endif
- if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
- if (is_mplane) {
- format.fmt.pix_mp.colorspace = colorspace;
- format.fmt.pix_mp.quantization = range;
- format.fmt.pix_mp.ycbcr_enc = matrix;
- format.fmt.pix_mp.xfer_func = transfer;
- } else {
- format.fmt.pix.colorspace = colorspace;
- format.fmt.pix.quantization = range;
- format.fmt.pix.ycbcr_enc = matrix;
- format.fmt.pix.xfer_func = transfer;
- }
-
- GST_DEBUG_OBJECT (v4l2object->element, "Desired colorspace is %d:%d:%d:%d",
- colorspace, range, matrix, transfer);
+ if (is_mplane) {
+ format.fmt.pix_mp.colorspace = colorspace;
+ format.fmt.pix_mp.quantization = range;
+ format.fmt.pix_mp.ycbcr_enc = matrix;
+ format.fmt.pix_mp.xfer_func = transfer;
+ } else {
+ format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
+ format.fmt.pix.colorspace = colorspace;
+ format.fmt.pix.quantization = range;
+ format.fmt.pix.ycbcr_enc = matrix;
+ format.fmt.pix.xfer_func = transfer;
}
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
+ colorspace, range, matrix, transfer);
+
if (try_only) {
if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0)
goto try_fmt_failed;
goto set_fmt_failed;
}
- GST_DEBUG_OBJECT (v4l2object->element, "Got format of %dx%d, format "
- "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d",
+ if (is_mplane) {
+ colorspace = format.fmt.pix_mp.colorspace;
+ range = format.fmt.pix_mp.quantization;
+ matrix = format.fmt.pix_mp.ycbcr_enc;
+ transfer = format.fmt.pix_mp.xfer_func;
+ } else {
+ colorspace = format.fmt.pix.colorspace;
+ range = format.fmt.pix.quantization;
+ matrix = format.fmt.pix.ycbcr_enc;
+ transfer = format.fmt.pix.xfer_func;
+ }
+
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format "
+ "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
format.fmt.pix.width, format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
is_mplane ? format.fmt.pix_mp.num_planes : 1,
- is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace);
+ colorspace, range, matrix, transfer);
#ifndef GST_DISABLE_GST_DEBUG
if (is_mplane) {
for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
format.fmt.pix_mp.plane_fmt[i].bytesperline,
format.fmt.pix_mp.plane_fmt[i].sizeimage);
} else {
- GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d",
format.fmt.pix.bytesperline, format.fmt.pix.sizeimage);
}
#endif
if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
goto invalid_planes;
- if ((is_mplane && format.fmt.pix_mp.field != field)
- || format.fmt.pix.field != field)
+ /* used to check colorimetry and interlace mode fields presence */
+ s = gst_caps_get_structure (caps, 0);
+
+ if (!gst_v4l2_object_get_interlace_mode (format.fmt.pix.field,
+ &info.interlace_mode))
goto invalid_field;
+ if (gst_structure_has_field (s, "interlace-mode")) {
+ if (format.fmt.pix.field != field)
+ goto invalid_field;
+ }
gst_v4l2_object_get_colorspace (&format, &info.colorimetry);
-
- s = gst_caps_get_structure (caps, 0);
if (gst_structure_has_field (s, "colorimetry")) {
- GstVideoColorimetry ci;
- if (!gst_video_colorimetry_from_string (&ci,
- gst_structure_get_string (s, "colorimetry"))
- || !gst_video_colorimetry_is_equal (&ci, &info.colorimetry))
+ if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry,
+ gst_structure_get_string (s, "colorimetry")))
goto invalid_colorimetry;
}
ctl.value = 0xff;
if (v4l2object->ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0)
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Failed to set alpha component value");
}
/* Is there a reason we require the caller to always specify a framerate? */
- GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n,
fps_d);
memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm));
GST_VIDEO_INFO_FPS_D (&info) =
streamparm.parm.capture.timeperframe.numerator;
- GST_DEBUG_OBJECT (v4l2object->element, "Got capture framerate: %u/%u",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got capture framerate: %u/%u",
streamparm.parm.capture.timeperframe.denominator,
streamparm.parm.capture.timeperframe.numerator);
* causing them to not output data (several models of Thinkpad cameras
* have this problem at least).
* So, don't skip. */
- GST_LOG_OBJECT (v4l2object->element, "Setting capture framerate to %u/%u",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting capture framerate to %u/%u",
fps_n, fps_d);
/* We want to change the frame rate, so check whether we can. Some cheap USB
* cameras don't have the capability */
if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Not setting capture framerate (not supported)");
goto done;
}
fps_d = streamparm.parm.capture.timeperframe.numerator;
fps_n = streamparm.parm.capture.timeperframe.denominator;
- GST_INFO_OBJECT (v4l2object->element, "Set capture framerate to %u/%u",
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "Set capture framerate to %u/%u",
fps_n, fps_d);
} else {
/* fix v4l2 capture driver to provide framerate values */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d);
}
GST_VIDEO_INFO_FPS_D (&info) =
streamparm.parm.output.timeperframe.numerator;
- GST_DEBUG_OBJECT (v4l2object->element, "Got output framerate: %u/%u",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got output framerate: %u/%u",
streamparm.parm.output.timeperframe.denominator,
streamparm.parm.output.timeperframe.numerator);
- GST_LOG_OBJECT (v4l2object->element, "Setting output framerate to %u/%u",
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting output framerate to %u/%u",
fps_n, fps_d);
if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"Not setting output framerate (not supported)");
goto done;
}
fps_d = streamparm.parm.output.timeperframe.numerator;
fps_n = streamparm.parm.output.timeperframe.denominator;
- GST_INFO_OBJECT (v4l2object->element, "Set output framerate to %u/%u",
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "Set output framerate to %u/%u",
fps_n, fps_d);
} else {
/* fix v4l2 output driver to provide framerate values */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d);
}
/* ERRORS */
invalid_caps:
{
- GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT,
caps);
return FALSE;
}
gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps,
GstV4l2Error * error)
{
- GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %" GST_PTR_FORMAT,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT,
caps);
return gst_v4l2_object_set_format_full (v4l2object, caps, FALSE, error);
}
gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps,
GstV4l2Error * error)
{
- GST_DEBUG_OBJECT (v4l2object->element, "Trying format %" GST_PTR_FORMAT,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT,
caps);
return gst_v4l2_object_set_format_full (v4l2object, caps, TRUE, error);
}
gboolean
gst_v4l2_object_set_crop (GstV4l2Object * obj)
{
+ struct v4l2_selection sel = { 0 };
struct v4l2_crop crop = { 0 };
+ sel.type = obj->type;
+ sel.target = V4L2_SEL_TGT_CROP;
+ sel.flags = 0;
+ sel.r.left = obj->align.padding_left;
+ sel.r.top = obj->align.padding_top;
+ sel.r.width = obj->info.width;
+ sel.r.height = obj->info.height;
+
crop.type = obj->type;
- crop.c.left = obj->align.padding_left;
- crop.c.top = obj->align.padding_top;
- crop.c.width = obj->info.width;
- crop.c.height = obj->info.height;
+ crop.c = sel.r;
if (obj->align.padding_left + obj->align.padding_top +
obj->align.padding_right + obj->align.padding_bottom == 0) {
- GST_DEBUG_OBJECT (obj->element, "no cropping needed");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "no cropping needed");
return TRUE;
}
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
crop.c.width, crop.c.height);
- if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
- GST_WARNING_OBJECT (obj->element, "VIDIOC_S_CROP failed");
- return FALSE;
- }
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) {
+ if (errno != ENOTTY) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
+ g_strerror (errno));
+ return FALSE;
+ } else {
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed");
+ return FALSE;
+ }
- if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
- GST_WARNING_OBJECT (obj->element, "VIDIOC_G_CROP failed");
- return FALSE;
+ if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed");
+ return FALSE;
+ }
+
+ sel.r = crop.c;
+ }
}
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
crop.c.width, crop.c.height);
}
gboolean
+gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps)
+{
+ GstStructure *config;
+ GstCaps *oldcaps;
+ gboolean ret;
+
+ if (!v4l2object->pool)
+ return FALSE;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ ret = oldcaps && gst_caps_is_subset (oldcaps, caps);
+
+ gst_structure_free (config);
+
+ return ret;
+}
+
+GstCaps *
+gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object)
+{
+ GstStructure *config;
+ GstCaps *oldcaps;
+
+ if (!v4l2object->pool)
+ return NULL;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ if (oldcaps)
+ gst_caps_ref (oldcaps);
+
+ gst_structure_free (config);
+
+ return oldcaps;
+}
+
+gboolean
gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
{
gboolean ret = TRUE;
- GST_LOG_OBJECT (v4l2object->element, "start flushing");
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing");
if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
gst_buffer_pool_set_flushing (v4l2object->pool, TRUE);
{
gboolean ret = TRUE;
- GST_LOG_OBJECT (v4l2object->element, "stop flushing");
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing");
if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
gst_buffer_pool_set_flushing (v4l2object->pool, FALSE);
gboolean
gst_v4l2_object_stop (GstV4l2Object * v4l2object)
{
- GST_DEBUG_OBJECT (v4l2object->element, "stopping");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping");
if (!GST_V4L2_IS_OPEN (v4l2object))
goto done;
goto done;
if (v4l2object->pool) {
- GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool");
- gst_buffer_pool_set_active (v4l2object->pool, FALSE);
- gst_object_unref (v4l2object->pool);
+ if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
+ gst_buffer_pool_set_active (v4l2object->pool, FALSE);
+ gst_object_unref (v4l2object->pool);
+ }
v4l2object->pool = NULL;
}
cropcap.type = v4l2object->type;
if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) {
if (errno != ENOTTY)
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
g_strerror (errno));
- } else {
+ } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) {
v4l2object->par = g_new0 (GValue, 1);
g_value_init (v4l2object->par, GST_TYPE_FRACTION);
gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator,
template = gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat);
if (!template) {
- GST_DEBUG_OBJECT (v4l2object->element,
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"unknown format %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (format->pixelformat));
continue;
gst_caps_unref (tmp);
}
- GST_INFO_OBJECT (v4l2object->element, "probed caps: %" GST_PTR_FORMAT, ret);
+ GST_INFO_OBJECT (v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret);
return ret;
}
GstAllocator *allocator = NULL;
GstAllocationParams params = { 0 };
- GST_DEBUG_OBJECT (obj->element, "decide allocation");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation");
g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
update = FALSE;
}
- GST_DEBUG_OBJECT (obj->element, "allocation: size:%u min:%u max:%u pool:%"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%"
GST_PTR_FORMAT, size, min, max, pool);
has_video_meta =
if (pool) {
/* in READ/WRITE mode, prefer a downstream pool because our own pool
* doesn't help much, we have to write to it as well */
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"read/write mode: using downstream pool");
/* use the bigest size, when we use our own pool we can't really do any
* other size than what the hardware gives us but for downstream pools
size = MAX (size, obj->info.size);
} else if (can_share_own_pool) {
/* no downstream pool, use our own then */
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"read/write mode: no downstream pool, using our own");
pool = gst_object_ref (obj->pool);
size = obj->info.size;
gst_object_unref (pool);
pool = gst_object_ref (obj->pool);
size = obj->info.size;
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
pushing_from_our_pool = TRUE;
} else if (pool) {
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: copying to downstream pool %" GST_PTR_FORMAT,
pool);
} else {
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: no usable pool, copying to generic pool");
size = MAX (size, obj->info.size);
}
break;
case GST_V4L2_IO_AUTO:
default:
- GST_WARNING_OBJECT (obj->element, "unhandled mode");
+ GST_WARNING_OBJECT (obj->dbg_obj, "unhandled mode");
break;
}
config = gst_buffer_pool_get_config (obj->pool);
if (obj->need_video_meta || has_video_meta) {
- GST_DEBUG_OBJECT (obj->element, "activate Video Meta");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
}
gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
gst_buffer_pool_config_set_params (config, caps, size, own_min, 0);
- GST_DEBUG_OBJECT (obj->element, "setting own pool config to %"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "setting own pool config to %"
GST_PTR_FORMAT, config);
/* Our pool often need to adjust the value */
if (!gst_buffer_pool_set_config (obj->pool, config)) {
config = gst_buffer_pool_get_config (obj->pool);
- GST_DEBUG_OBJECT (obj->element, "own pool config changed to %"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %"
GST_PTR_FORMAT, config);
/* our pool will adjust the maximum buffer, which we are fine with */
gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
gst_buffer_pool_config_set_params (config, caps, size, min, max);
- GST_DEBUG_OBJECT (obj->element, "setting other pool config to %"
+ GST_DEBUG_OBJECT (obj->dbg_obj, "setting other pool config to %"
GST_PTR_FORMAT, config);
/* if downstream supports video metadata, add this to the pool config */
if (has_video_meta) {
- GST_DEBUG_OBJECT (obj->element, "activate Video Meta");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
gst_buffer_pool_config_add_option (config,
GST_BUFFER_POOL_OPTION_VIDEO_META);
}
if (caps == NULL)
goto no_caps;
- if ((pool = obj->pool))
- gst_object_ref (pool);
+ switch (obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ if ((pool = obj->pool))
+ gst_object_ref (pool);
+ break;
+ default:
+ pool = NULL;
+ break;
+ }
if (pool != NULL) {
GstCaps *pcaps;
config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL);
- GST_DEBUG_OBJECT (obj->element,
+ GST_DEBUG_OBJECT (obj->dbg_obj,
"we had a pool with caps %" GST_PTR_FORMAT, pcaps);
if (!gst_caps_is_equal (caps, pcaps)) {
gst_structure_free (config);
/* ERRORS */
no_caps:
{
- GST_DEBUG_OBJECT (obj->element, "no caps specified");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "no caps specified");
return FALSE;
}
different_caps:
{
/* different caps, we can't use this pool */
- GST_DEBUG_OBJECT (obj->element, "pool has different caps");
+ GST_DEBUG_OBJECT (obj->dbg_obj, "pool has different caps");
+ return FALSE;
+ }
+}
+
+gboolean
+gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer)
+{
+ GstVideoMeta *vmeta;
+ guint n_mem = gst_buffer_n_memory (buffer);
+
+ /* only import if requested */
+ switch (obj->mode) {
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ break;
+ default:
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "The io-mode does not enable importation");
+ return FALSE;
+ }
+
+ vmeta = gst_buffer_get_video_meta (buffer);
+ if (!vmeta && obj->need_video_meta) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard "
+ "stride/offset while the driver does not.");
return FALSE;
}
+
+ /* we need matching strides/offsets and size */
+ if (vmeta) {
+ guint p;
+ gboolean need_fmt_update = FALSE;
+
+ if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Cannot import buffers with different number planes");
+ return FALSE;
+ }
+
+ for (p = 0; p < vmeta->n_planes; p++) {
+ if (vmeta->stride[p] < obj->info.stride[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not importing as remote stride %i is smaller then %i on plane %u",
+ vmeta->stride[p], obj->info.stride[p], p);
+ return FALSE;
+ } else if (vmeta->stride[p] > obj->info.stride[p]) {
+ need_fmt_update = TRUE;
+ }
+
+ if (vmeta->offset[p] < obj->info.offset[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not importing as offset %" G_GSIZE_FORMAT
+ " is smaller then %" G_GSIZE_FORMAT " on plane %u",
+ vmeta->offset[p], obj->info.offset[p], p);
+ return FALSE;
+ } else if (vmeta->offset[p] > obj->info.offset[p]) {
+ need_fmt_update = TRUE;
+ }
+ }
+
+ if (need_fmt_update) {
+ struct v4l2_format format;
+ gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, };
+
+ format = obj->format;
+
+ /* update the current format with the stride we want to import from */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:");
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ gint stride = vmeta->stride[i];
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
+ wanted_stride[i] = stride;
+ GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
+ }
+ } else {
+ gint stride = vmeta->stride[0];
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", stride);
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix.bytesperline = stride;
+ wanted_stride[0] = stride;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Something went wrong trying to update current format: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info,
+ &obj->align);
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "[%i] Driver did not accept the new stride (wants %i, got %i)",
+ i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
+ wanted_stride[i]);
+ return FALSE;
+ }
+ }
+ } else {
+ if (format.fmt.pix.bytesperline != wanted_stride[0]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Driver did not accept the new stride (wants %i, got %i)",
+ format.fmt.pix.bytesperline, wanted_stride[0]);
+ return FALSE;
+ }
+ }
+ }
+ }
+
+ /* we can always import single memory buffer, but otherwise we need the same
+ * amount of memory object. */
+ if (n_mem != 1 && n_mem != obj->n_v4l2_planes) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, "
+ "buffers contains %u memory", obj->n_v4l2_planes, n_mem);
+ return FALSE;
+ }
+
+ /* For DMABuf importation we need DMABuf of course */
+ if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) {
+ guint i;
+
+ for (i = 0; i < n_mem; i++) {
+ GstMemory *mem = gst_buffer_peek_memory (buffer, i);
+
+ if (!gst_is_dmabuf_memory (mem)) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory.");
+ return FALSE;
+ }
+ }
+ }
+
+ /* for the remaining, only the kernel driver can tell */
+ return TRUE;
}