X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=sys%2Fv4l2%2Fgstv4l2object.c;h=a8f68da928de4a79012d7222d763ccb5121575cc;hb=1e4c9a1fb85d617292c41b1b12c2a8eca0fc26fe;hp=f2547a827cb9e92ae24fdfc4dbd92f677da8ddcd;hpb=0a186c28b106508aa10b326a82581e1efc65c2ca;p=platform%2Fupstream%2Fgst-plugins-good.git diff --git a/sys/v4l2/gstv4l2object.c b/sys/v4l2/gstv4l2object.c index f2547a8..a8f68da 100644 --- a/sys/v4l2/gstv4l2object.c +++ b/sys/v4l2/gstv4l2object.c @@ -25,20 +25,24 @@ #include #include #include -#include #include +#include +#include + #ifdef HAVE_GUDEV #include #endif -#include "v4l2_calls.h" +#include "ext/videodev2.h" +#include "gstv4l2object.h" #include "gstv4l2tuner.h" #include "gstv4l2colorbalance.h" #include "gst/gst-i18n-plugin.h" #include +#include GST_DEBUG_CATEGORY_EXTERN (v4l2_debug); #define GST_CAT_DEFAULT v4l2_debug @@ -47,11 +51,9 @@ GST_DEBUG_CATEGORY_EXTERN (v4l2_debug); #define DEFAULT_PROP_DEVICE_FD -1 #define DEFAULT_PROP_FLAGS 0 #define DEFAULT_PROP_TV_NORM 0 -#define DEFAULT_PROP_CHANNEL NULL -#define DEFAULT_PROP_FREQUENCY 0 #define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO -#define ENCODED_BUFFER_SIZE (1 * 1024 * 1024) +#define ENCODED_BUFFER_SIZE (2 * 1024 * 1024) enum { @@ -157,10 +159,10 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = { {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW}, /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */ - {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC}, - {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_CODEC}, - {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_CODEC}, - {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_CODEC}, + {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW}, /* compressed formats */ {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC}, @@ -168,9 +170,11 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = { {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT}, {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT}, + {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC}, + {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC}, @@ -179,6 +183,7 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = { {V4L2_PIX_FMT_VC1_ANNEX_G, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_VC1_ANNEX_L, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_VP8, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE}, + {V4L2_PIX_FMT_VP9, FALSE, GST_V4L2_CODEC | GST_V4L2_NO_PARSE}, /* Vendor-specific formats */ {V4L2_PIX_FMT_WNVA, TRUE, GST_V4L2_CODEC}, @@ -454,8 +459,25 @@ gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class) GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); } +/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */ +#ifdef HAVE_LIBV4L2 +#if SIZEOF_OFF_T < 8 + +static gpointer +v4l2_mmap_wrapper (gpointer start, gsize length, gint prot, gint flags, gint fd, + off_t offset) +{ + return v4l2_mmap (start, length, prot, flags, fd, (gint64) offset); +} + +#define v4l2_mmap v4l2_mmap_wrapper + +#endif /* SIZEOF_OFF_T < 8 */ +#endif /* HAVE_LIBV4L2 */ + GstV4l2Object * gst_v4l2_object_new (GstElement * element, + GstObject * debug_object, enum v4l2_buf_type type, const char *default_device, GstV4l2GetInOutFunction get_in_out_func, @@ -473,6 +495,7 @@ gst_v4l2_object_new (GstElement * element, v4l2object->formats = NULL; v4l2object->element = element; + v4l2object->dbg_obj = debug_object; v4l2object->get_in_out_func = get_in_out_func; v4l2object->set_in_out_func = set_in_out_func; v4l2object->update_fps_func = update_fps_func; @@ -485,14 +508,38 @@ gst_v4l2_object_new (GstElement * element, v4l2object->channels = NULL; v4l2object->colors = NULL; - v4l2object->xwindow_id = 0; - v4l2object->keep_aspect = TRUE; v4l2object->n_v4l2_planes = 0; v4l2object->no_initial_format = FALSE; + /* We now disable libv4l2 by default, but have an env to enable it. */ +#ifdef HAVE_LIBV4L2 +#ifdef TIZEN_FEATURE_USE_LIBV4L2 + if (1) { +#else /* TIZEN_FEATURE_USE_LIBV4L2 */ + if (g_getenv ("GST_V4L2_USE_LIBV4L2")) { +#endif /* TIZEN_FEATURE_USE_LIBV4L2 */ + v4l2object->fd_open = v4l2_fd_open; + v4l2object->close = v4l2_close; + v4l2object->dup = v4l2_dup; + v4l2object->ioctl = v4l2_ioctl; + v4l2object->read = v4l2_read; + v4l2object->mmap = v4l2_mmap; + v4l2object->munmap = v4l2_munmap; + } else +#endif + { + v4l2object->fd_open = NULL; + v4l2object->close = close; + v4l2object->dup = dup; + v4l2object->ioctl = ioctl; + v4l2object->read = read; + v4l2object->mmap = mmap; + v4l2object->munmap = munmap; + } + return v4l2object; } @@ -645,14 +692,18 @@ gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object, break; } case PROP_PIXEL_ASPECT_RATIO: - g_free (v4l2object->par); + if (v4l2object->par) { + g_value_unset (v4l2object->par); + g_free (v4l2object->par); + } v4l2object->par = g_new0 (GValue, 1); g_value_init (v4l2object->par, GST_TYPE_FRACTION); if (!g_value_transform (value, v4l2object->par)) { g_warning ("Could not transform string to aspect ratio"); gst_value_set_fraction (v4l2object->par, 1, 1); } - GST_DEBUG_OBJECT (v4l2object->element, "set PAR to %d/%d", + + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "set PAR to %d/%d", gst_value_get_fraction_numerator (v4l2object->par), gst_value_get_fraction_denominator (v4l2object->par)); break; @@ -677,15 +728,12 @@ gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object, break; case PROP_DEVICE_NAME: { - const guchar *new = NULL; + const guchar *name = NULL; - if (GST_V4L2_IS_OPEN (v4l2object)) { - new = v4l2object->vcap.card; - } else if (gst_v4l2_open (v4l2object)) { - new = v4l2object->vcap.card; - gst_v4l2_close (v4l2object); - } - g_value_set_string (value, (gchar *) new); + if (GST_V4L2_IS_OPEN (v4l2object)) + name = v4l2object->vcap.card; + + g_value_set_string (value, (gchar *) name); break; } case PROP_DEVICE_FD: @@ -778,10 +826,22 @@ gst_v4l2_get_driver_min_buffers (GstV4l2Object * v4l2object) else control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) { - GST_DEBUG_OBJECT (v4l2object->element, + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) { + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "driver requires a minimum of %d buffers", control.value); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT +#define DEFAULT_DECODER_OUTPUT_BUFFER_COUNT 5 + if (v4l2object->tbm_output && + !V4L2_TYPE_IS_OUTPUT (v4l2object->type) && control.value == 1) { + v4l2object->min_buffers = DEFAULT_DECODER_OUTPUT_BUFFER_COUNT; + GST_WARNING_OBJECT (v4l2object->dbg_obj, "but SET MIN BUFFER COUNT[%d] and it will be [%d] later", + v4l2object->min_buffers, v4l2object->min_buffers + 1); + } else { + v4l2object->min_buffers = control.value; + } +#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ v4l2object->min_buffers = control.value; +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ } else { v4l2object->min_buffers = 0; } @@ -801,7 +861,7 @@ gst_v4l2_set_defaults (GstV4l2Object * v4l2object) if (v4l2object->tv_norm) norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm); - GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, " + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "tv_norm=0x%" G_GINT64_MODIFIER "x, " "norm=%p", (guint64) v4l2object->tv_norm, norm); if (norm) { gst_tuner_set_norm (tuner, norm); @@ -881,6 +941,17 @@ gst_v4l2_object_close (GstV4l2Object * v4l2object) gst_v4l2_object_clear_format_list (v4l2object); } + if (v4l2object->par) { + g_value_unset (v4l2object->par); + g_free (v4l2object->par); + v4l2object->par = NULL; + } + + if (v4l2object->channel) { + g_free (v4l2object->channel); + v4l2object->channel = NULL; + } + return TRUE; } @@ -1108,7 +1179,7 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, gint n; struct v4l2_fmtdesc *format; - GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting src format enumerations"); /* format enumeration */ for (n = 0;; n++) { @@ -1117,7 +1188,7 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, format->index = n; format->type = type; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) { + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) { if (errno == EINVAL) { g_free (format); break; /* end of enumeration */ @@ -1126,12 +1197,12 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, } } - GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index); - GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type); - GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags); - GST_LOG_OBJECT (v4l2object->element, "description: '%s'", + GST_LOG_OBJECT (v4l2object->dbg_obj, "index: %u", format->index); + GST_LOG_OBJECT (v4l2object->dbg_obj, "type: %d", format->type); + GST_LOG_OBJECT (v4l2object->dbg_obj, "flags: %08x", format->flags); + GST_LOG_OBJECT (v4l2object->dbg_obj, "description: '%s'", format->description); - GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT, + GST_LOG_OBJECT (v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (format->pixelformat)); /* sort formats according to our preference; we do this, because caps @@ -1145,11 +1216,11 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, { GSList *l; - GST_INFO_OBJECT (v4l2object->element, "got %d format(s):", n); + GST_INFO_OBJECT (v4l2object->dbg_obj, "got %d format(s):", n); for (l = v4l2object->formats; l != NULL; l = l->next) { format = l->data; - GST_INFO_OBJECT (v4l2object->element, + GST_INFO_OBJECT (v4l2object->dbg_obj, " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat), ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : ""); } @@ -1163,7 +1234,7 @@ failed: { g_free (format); - if (!GST_IS_ELEMENT (v4l2object->element)) + if (v4l2object->element) return FALSE; GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, @@ -1338,6 +1409,10 @@ gst_v4l2_object_v4l2fourcc_is_rgb (guint32 fourcc) case V4L2_PIX_FMT_BGR32: case V4L2_PIX_FMT_ABGR32: case V4L2_PIX_FMT_ARGB32: + case V4L2_PIX_FMT_SBGGR8: + case V4L2_PIX_FMT_SGBRG8: + case V4L2_PIX_FMT_SGRBG8: + case V4L2_PIX_FMT_SRGGB8: ret = TRUE; break; default: @@ -1360,7 +1435,7 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) break; case V4L2_PIX_FMT_MPEG1: structure = gst_structure_new ("video/mpeg", - "mpegversion", G_TYPE_INT, 2, NULL); + "mpegversion", G_TYPE_INT, 1, NULL); break; case V4L2_PIX_FMT_MPEG2: structure = gst_structure_new ("video/mpeg", @@ -1372,6 +1447,9 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) "mpegversion", G_TYPE_INT, 4, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); break; + case V4L2_PIX_FMT_FWHT: + structure = gst_structure_new_empty ("video/x-fwht"); + break; case V4L2_PIX_FMT_H263: structure = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); @@ -1386,6 +1464,11 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); break; + case V4L2_PIX_FMT_HEVC: /* H.265 */ + structure = gst_structure_new ("video/x-h265", + "stream-format", G_TYPE_STRING, "byte-stream", "alignment", + G_TYPE_STRING, "au", NULL); + break; case V4L2_PIX_FMT_VC1_ANNEX_G: case V4L2_PIX_FMT_VC1_ANNEX_L: structure = gst_structure_new ("video/x-wmv", @@ -1394,6 +1477,9 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) case V4L2_PIX_FMT_VP8: structure = gst_structure_new_empty ("video/x-vp8"); break; + case V4L2_PIX_FMT_VP9: + structure = gst_structure_new_empty ("video/x-vp9"); + break; case V4L2_PIX_FMT_GREY: /* 8 Greyscale */ case V4L2_PIX_FMT_Y16: case V4L2_PIX_FMT_Y16_BE: @@ -1553,6 +1639,16 @@ gst_v4l2_object_get_caps_helper (GstV4L2FormatFlags flags) } switch (gst_v4l2_formats[i].format) { +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + case V4L2_PIX_FMT_YUV420: + alt_s = gst_structure_copy (structure); + gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL); + break; + case V4L2_PIX_FMT_NV12: + alt_s = gst_structure_copy (structure); + gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL); + break; +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ case V4L2_PIX_FMT_RGB32: alt_s = gst_structure_copy (structure); gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL); @@ -1642,6 +1738,9 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, if (g_str_equal (mimetype, "video/x-raw")) { switch (GST_VIDEO_INFO_FORMAT (info)) { case GST_VIDEO_FORMAT_I420: +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + case GST_VIDEO_FORMAT_S420: +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ fourcc = V4L2_PIX_FMT_YUV420; fourcc_nc = V4L2_PIX_FMT_YUV420M; break; @@ -1661,6 +1760,9 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, fourcc = V4L2_PIX_FMT_YUV422P; break; case GST_VIDEO_FORMAT_NV12: +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + case GST_VIDEO_FORMAT_SN12: +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ fourcc = V4L2_PIX_FMT_NV12; fourcc_nc = V4L2_PIX_FMT_NV12M; break; @@ -1754,6 +1856,8 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, break; } } + } else if (g_str_equal (mimetype, "video/x-fwht")) { + fourcc = V4L2_PIX_FMT_FWHT; } else if (g_str_equal (mimetype, "video/x-h263")) { fourcc = V4L2_PIX_FMT_H263; } else if (g_str_equal (mimetype, "video/x-h264")) { @@ -1763,8 +1867,12 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, fourcc = V4L2_PIX_FMT_H264_NO_SC; else fourcc = V4L2_PIX_FMT_H264; + } else if (g_str_equal (mimetype, "video/x-h265")) { + fourcc = V4L2_PIX_FMT_HEVC; } else if (g_str_equal (mimetype, "video/x-vp8")) { fourcc = V4L2_PIX_FMT_VP8; + } else if (g_str_equal (mimetype, "video/x-vp9")) { + fourcc = V4L2_PIX_FMT_VP9; } else if (g_str_equal (mimetype, "video/x-bayer")) { const gchar *format = gst_structure_get_string (structure, "format"); if (format) { @@ -1832,42 +1940,8 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, static void gst_v4l2_object_add_aspect_ratio (GstV4l2Object * v4l2object, GstStructure * s) { - struct v4l2_cropcap cropcap; - int num = 1, den = 1; - - if (!v4l2object->keep_aspect) - return; - - if (v4l2object->par) { - num = gst_value_get_fraction_numerator (v4l2object->par); - den = gst_value_get_fraction_denominator (v4l2object->par); - goto done; - } - - memset (&cropcap, 0, sizeof (cropcap)); - - cropcap.type = v4l2object->type; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) - goto cropcap_failed; - - num = cropcap.pixelaspect.numerator; - den = cropcap.pixelaspect.denominator; - - /* Ignore PAR that are 0/0 */ - if (den == 0) - return; - -done: - gst_structure_set (s, "pixel-aspect-ratio", GST_TYPE_FRACTION, num, den, - NULL); - return; - -cropcap_failed: - if (errno != ENOTTY) - GST_WARNING_OBJECT (v4l2object->element, - "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s", - g_strerror (errno)); - goto done; + if (v4l2object->keep_aspect && v4l2object->par) + gst_structure_set_value (s, "pixel-aspect-ratio", v4l2object->par); } /* returns TRUE if the value was changed in place, otherwise FALSE */ @@ -1894,8 +1968,6 @@ static gboolean gst_v4l2_object_get_interlace_mode (enum v4l2_field field, GstVideoInterlaceMode * interlace_mode) { - /* NB: If you add new return values, please fix mode_strings in - * gst_v4l2_object_add_interlace_mode */ switch (field) { case V4L2_FIELD_ANY: GST_ERROR @@ -1960,7 +2032,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, cinfo->transfer = GST_VIDEO_TRANSFER_SRGB; cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709; break; - case V4L2_COLORSPACE_ADOBERGB: + case V4L2_COLORSPACE_OPRGB: cinfo->range = GST_VIDEO_COLOR_RANGE_16_235; cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601; cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB; @@ -1969,7 +2041,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, case V4L2_COLORSPACE_BT2020: cinfo->range = GST_VIDEO_COLOR_RANGE_16_235; cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020; - cinfo->transfer = GST_VIDEO_TRANSFER_BT709; + cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12; cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT2020; break; case V4L2_COLORSPACE_SMPTE240M: @@ -2046,7 +2118,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709; break; case V4L2_YCBCR_ENC_BT2020_CONST_LUM: - GST_FIXME ("BT2020 with constant lumma is not defined, assuming BT2020"); + GST_FIXME ("BT2020 with constant luma is not defined, assuming BT2020"); /* fallthrough */ case V4L2_YCBCR_ENC_BT2020: cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020; @@ -2071,12 +2143,15 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, switch (transfer) { case V4L2_XFER_FUNC_709: - cinfo->transfer = GST_VIDEO_TRANSFER_BT709; + if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160) + cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12; + else + cinfo->transfer = GST_VIDEO_TRANSFER_BT709; break; case V4L2_XFER_FUNC_SRGB: cinfo->transfer = GST_VIDEO_TRANSFER_SRGB; break; - case V4L2_XFER_FUNC_ADOBERGB: + case V4L2_XFER_FUNC_OPRGB: cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB; break; case V4L2_XFER_FUNC_SMPTE240M: @@ -2107,7 +2182,7 @@ gst_v4l2_object_try_fmt (GstV4l2Object * v4l2object, int r; memcpy (&fmt, try_fmt, sizeof (fmt)); - r = v4l2_ioctl (fd, VIDIOC_TRY_FMT, &fmt); + r = v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &fmt); if (r < 0 && errno == ENOTTY) { /* The driver might not implement TRY_FMT, in which case we will try @@ -2116,14 +2191,15 @@ gst_v4l2_object_try_fmt (GstV4l2Object * v4l2object, goto error; memcpy (&fmt, try_fmt, sizeof (fmt)); - r = v4l2_ioctl (fd, VIDIOC_S_FMT, &fmt); + r = v4l2object->ioctl (fd, VIDIOC_S_FMT, &fmt); } memcpy (try_fmt, &fmt, sizeof (fmt)); + return r; error: memcpy (try_fmt, &fmt, sizeof (fmt)); - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unable to try format: %s", g_strerror (errno)); return r; } @@ -2135,13 +2211,10 @@ gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object, { struct v4l2_format fmt; GValue interlace_formats = { 0, }; + enum v4l2_field formats[] = { V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED }; + gsize i; GstVideoInterlaceMode interlace_mode, prev = -1; - const gchar *mode_strings[] = { "progressive", - "interleaved", - "mixed" - }; - if (!g_str_equal (gst_structure_get_name (s), "video/x-raw")) return; @@ -2153,36 +2226,26 @@ gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object, g_value_init (&interlace_formats, GST_TYPE_LIST); /* Try twice - once for NONE, once for INTERLACED. */ - memset (&fmt, 0, sizeof (fmt)); - fmt.type = v4l2object->type; - fmt.fmt.pix.width = width; - fmt.fmt.pix.height = height; - fmt.fmt.pix.pixelformat = pixelformat; - fmt.fmt.pix.field = V4L2_FIELD_NONE; - - if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 && - gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) { - GValue interlace_enum = { 0, }; - g_value_init (&interlace_enum, G_TYPE_STRING); - g_value_set_string (&interlace_enum, mode_strings[interlace_mode]); - gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum); - prev = interlace_mode; - } - - memset (&fmt, 0, sizeof (fmt)); - fmt.type = v4l2object->type; - fmt.fmt.pix.width = width; - fmt.fmt.pix.height = height; - fmt.fmt.pix.pixelformat = pixelformat; - fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; - - if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 && - gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) && - prev != interlace_mode) { - GValue interlace_enum = { 0, }; - g_value_init (&interlace_enum, G_TYPE_STRING); - g_value_set_string (&interlace_enum, mode_strings[interlace_mode]); - gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum); + for (i = 0; i < G_N_ELEMENTS (formats); i++) { + memset (&fmt, 0, sizeof (fmt)); + fmt.type = v4l2object->type; + fmt.fmt.pix.width = width; + fmt.fmt.pix.height = height; + fmt.fmt.pix.pixelformat = pixelformat; + fmt.fmt.pix.field = formats[i]; + + if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 && + gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) + && prev != interlace_mode) { + GValue interlace_enum = { 0, }; + const gchar *mode_string; + g_value_init (&interlace_enum, G_TYPE_STRING); + mode_string = gst_video_interlace_mode_to_string (interlace_mode); + g_value_set_string (&interlace_enum, mode_string); + gst_value_list_append_and_take_value (&interlace_formats, + &interlace_enum); + prev = interlace_mode; + } } if (gst_v4l2src_value_simplify (&interlace_formats) @@ -2304,13 +2367,13 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, ival.width = width; ival.height = height; - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat)); /* keep in mind that v4l2 gives us frame intervals (durations); we invert the * fraction to get framerate */ - if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0) + if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0) goto enum_frameintervals_failed; if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) { @@ -2329,7 +2392,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, denom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "adding discrete framerate: %d/%d", denom, num); /* swap to get the framerate */ @@ -2337,7 +2400,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, gst_value_list_append_value (&rates, &rate); ival.index++; - } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0); + } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0); } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) { GValue min = { 0, }; GValue step = { 0, }; @@ -2359,7 +2422,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, minnum >>= 1; mindenom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise min frame interval: %d/%d", minnum, mindenom); gst_value_set_fraction (&min, minnum, mindenom); @@ -2371,7 +2434,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, maxdenom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise max frame interval: %d/%d", maxnum, maxdenom); gst_value_set_fraction (&max, maxnum, maxdenom); @@ -2392,7 +2455,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, /* since we only have gst_value_fraction_subtract and not add, negate the * numerator */ - GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise step frame interval: %d/%d", num, denom); gst_value_set_fraction (&step, -num, denom); @@ -2401,7 +2464,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, num = gst_value_get_fraction_numerator (&min); denom = gst_value_get_fraction_denominator (&min); - GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "adding stepwise framerate: %d/%d", denom, num); /* invert to get the framerate */ @@ -2413,13 +2476,13 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, /* we're actually adding because step was negated above. This is because * there is no _add function... */ if (!gst_value_fraction_subtract (&min, &min, &step)) { - GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!"); + GST_WARNING_OBJECT (v4l2object->dbg_obj, "could not step fraction!"); break; } } if (!added) { /* no range was added, leave the default range from the template */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "no range added, leaving default"); g_value_unset (&rates); } @@ -2442,7 +2505,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, maxdenom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom, num); @@ -2455,10 +2518,14 @@ return_data: s = gst_structure_copy (template); gst_structure_set (s, "width", G_TYPE_INT, (gint) width, "height", G_TYPE_INT, (gint) height, NULL); + gst_v4l2_object_add_aspect_ratio (v4l2object, s); - gst_v4l2_object_add_interlace_mode (v4l2object, s, width, height, - pixelformat); - gst_v4l2_object_add_colorspace (v4l2object, s, width, height, pixelformat); + + if (!v4l2object->skip_try_fmt_probes) { + gst_v4l2_object_add_interlace_mode (v4l2object, s, width, height, + pixelformat); + gst_v4l2_object_add_colorspace (v4l2object, s, width, height, pixelformat); + } if (G_IS_VALUE (&rates)) { gst_v4l2src_value_simplify (&rates); @@ -2475,7 +2542,7 @@ return_data: /* ERRORS */ enum_frameintervals_failed: { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u", GST_FOURCC_ARGS (pixelformat), width, height); goto return_data; @@ -2483,7 +2550,7 @@ enum_frameintervals_failed: unknown_type: { /* I don't see how this is actually an error, we ignore the format then */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u", GST_FOURCC_ARGS (pixelformat), width, height, ival.type); return NULL; @@ -2568,16 +2635,16 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, size.index = 0; size.pixel_format = pixelformat; - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Enumerating frame sizes for %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); - if (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) + if (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) goto enum_framesizes_failed; if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) { do { - GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "got discrete frame size %dx%d", size.discrete.width, size.discrete.height); w = MIN (size.discrete.width, G_MAXINT); @@ -2593,24 +2660,24 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, } size.index++; - } while (v4l2_ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0); - GST_DEBUG_OBJECT (v4l2object->element, + } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "done iterating discrete frame sizes"); } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) { guint32 maxw, maxh, step_w, step_h; - GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:"); - GST_DEBUG_OBJECT (v4l2object->element, "min width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have stepwise frame sizes:"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d", size.stepwise.min_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.min_height); - GST_DEBUG_OBJECT (v4l2object->element, "max width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d", size.stepwise.max_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.max_height); - GST_DEBUG_OBJECT (v4l2object->element, "step width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d", size.stepwise.step_width); - GST_DEBUG_OBJECT (v4l2object->element, "step height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d", size.stepwise.step_height); w = MAX (size.stepwise.min_width, 1); @@ -2644,14 +2711,14 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) { guint32 maxw, maxh; - GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:"); - GST_DEBUG_OBJECT (v4l2object->element, "min width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have continuous frame sizes:"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d", size.stepwise.min_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.min_height); - GST_DEBUG_OBJECT (v4l2object->element, "max width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d", size.stepwise.max_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.max_height); w = MAX (size.stepwise.min_width, 1); @@ -2695,7 +2762,7 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, enum_framesizes_failed: { /* I don't see how this is actually an error */ - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno)); goto default_frame_sizes; @@ -2704,7 +2771,7 @@ enum_framesizes_no_results: { /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in * question doesn't actually support it yet */ - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "No results for pixelformat %" GST_FOURCC_FORMAT " enumerating frame sizes, trying fallback", GST_FOURCC_ARGS (pixelformat)); @@ -2712,7 +2779,7 @@ enum_framesizes_no_results: } unknown_type: { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT ": %u", GST_FOURCC_ARGS (pixelformat), size.type); goto default_frame_sizes; @@ -2727,13 +2794,13 @@ default_frame_sizes: max_w = max_h = GST_V4L2_MAX_SIZE; if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w, &min_h)) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); } if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w, &max_h)) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); } @@ -2778,13 +2845,17 @@ default_frame_sizes: else gst_structure_set (tmp, "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL); - /* We could consider setting interlace mode from min and max. */ - gst_v4l2_object_add_interlace_mode (v4l2object, tmp, max_w, max_h, - pixelformat); gst_v4l2_object_add_aspect_ratio (v4l2object, tmp); - /* We could consider to check colorspace for min too, in case it depends on - * the size. But in this case, min and max could not be enough */ - gst_v4l2_object_add_colorspace (v4l2object, tmp, max_w, max_h, pixelformat); + + if (!v4l2object->skip_try_fmt_probes) { + /* We could consider setting interlace mode from min and max. */ + gst_v4l2_object_add_interlace_mode (v4l2object, tmp, max_w, max_h, + pixelformat); + /* We could consider to check colorspace for min too, in case it depends on + * the size. But in this case, min and max could not be enough */ + gst_v4l2_object_add_colorspace (v4l2object, tmp, max_w, max_h, + pixelformat); + } gst_v4l2_object_update_and_append (v4l2object, pixelformat, ret, tmp); return ret; @@ -2802,7 +2873,7 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, g_return_val_if_fail (width != NULL, FALSE); g_return_val_if_fail (height != NULL, FALSE); - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT, *width, *height, GST_FOURCC_ARGS (pixelformat)); @@ -2819,14 +2890,14 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, if (gst_v4l2_object_try_fmt (v4l2object, &fmt) < 0) goto error; - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height); *width = fmt.fmt.pix.width; *height = fmt.fmt.pix.height; if (!gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u", GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field); goto error; @@ -2836,7 +2907,7 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, error: if (!ret) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unable to try format: %s", g_strerror (errno)); } @@ -2844,11 +2915,36 @@ error: } static gboolean +gst_v4l2_object_is_dmabuf_supported (GstV4l2Object * v4l2object) +{ + gboolean ret = TRUE; + struct v4l2_exportbuffer expbuf = { + .type = v4l2object->type, + .index = -1, + .plane = -1, + .flags = O_CLOEXEC | O_RDWR, + }; + + if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) { + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "libv4l2 converter detected, disabling DMABuf"); + ret = FALSE; + } + + /* Expected to fail, but ENOTTY tells us that it is not implemented. */ + v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf); + if (errno == ENOTTY) + ret = FALSE; + + return ret; +} + +static gboolean gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps) { GstV4l2IOMode mode; - GST_DEBUG_OBJECT (v4l2object->element, "initializing the %s system", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "initializing the %s system", V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture"); GST_V4L2_CHECK_OPEN (v4l2object); @@ -2864,16 +2960,23 @@ gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps) goto method_not_supported; if (v4l2object->device_caps & V4L2_CAP_STREAMING) { - if (v4l2object->req_mode == GST_V4L2_IO_AUTO) - mode = GST_V4L2_IO_MMAP; - } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP) + if (v4l2object->req_mode == GST_V4L2_IO_AUTO) { + if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) && + gst_v4l2_object_is_dmabuf_supported (v4l2object)) { + mode = GST_V4L2_IO_DMABUF; + } else { + mode = GST_V4L2_IO_MMAP; + } + } + } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP || + v4l2object->req_mode == GST_V4L2_IO_DMABUF) goto method_not_supported; /* if still no transport selected, error out */ if (mode == GST_V4L2_IO_AUTO) goto no_supported_capture_method; - GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode); + GST_INFO_OBJECT (v4l2object->dbg_obj, "accessing buffers via mode %d", mode); v4l2object->mode = mode; /* If min_buffers is not set, the driver either does not support the control or @@ -2882,7 +2985,7 @@ gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps) gst_v4l2_get_driver_min_buffers (v4l2object); /* Map the buffers */ - GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool"); + GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool"); if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps))) goto buffer_pool_new_failed; @@ -2963,7 +3066,7 @@ gst_v4l2_object_extrapolate_info (GstV4l2Object * v4l2object, offs += estride * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height); - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Extrapolated for plane %d with base stride %d: " "stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i], info->offset[i]); @@ -2984,7 +3087,7 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, { const GstVideoFormatInfo *finfo = info->finfo; gboolean standard_stride = TRUE; - gint stride, padded_width, padded_height, i; + gint stride, pstride, padded_width, padded_height, i; if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) { v4l2object->n_v4l2_planes = 1; @@ -2998,11 +3101,21 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, else stride = format->fmt.pix.bytesperline; - padded_width = stride / GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0); + pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0); + if (pstride) { + padded_width = stride / pstride; + } else { + /* pstride can be 0 for complex formats */ + GST_WARNING_OBJECT (v4l2object->element, + "format %s has a pstride of 0, cannot compute padded with", + gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info))); + padded_width = stride; + } if (padded_width < format->fmt.pix.width) - GST_WARNING_OBJECT (v4l2object->element, - "Driver bug detected, stride is too small for the width"); + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Driver bug detected, stride (%d) is too small for the width (%d)", + padded_width, format->fmt.pix.width); align->padding_right = padded_width - info->width - align->padding_left; @@ -3058,7 +3171,7 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, /* adjust the offset to take into account left and top */ if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) { if ((align->padding_left + align->padding_top) > 0) - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Left and top padding is not permitted for tiled formats"); } else { for (i = 0; i < finfo->n_planes; i++) { @@ -3076,7 +3189,7 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, } store_info: - GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %" G_GSIZE_FORMAT, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT, info->size); /* to avoid copies we need video meta if there is padding */ @@ -3131,6 +3244,34 @@ gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo, } static gboolean +gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo, + const gchar * color) +{ + GstVideoColorimetry ci; + static const GstVideoColorimetry ci_likely_jpeg = { + GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601, + GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN + }; + static const GstVideoColorimetry ci_jpeg = { + GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601, + GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709 + }; + + if (!gst_video_colorimetry_from_string (&ci, color)) + return FALSE; + + if (gst_video_colorimetry_is_equal (&ci, cinfo)) + return TRUE; + + /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */ + if (gst_video_colorimetry_is_equal (&ci, &ci_likely_jpeg) + && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg)) + return TRUE; + + return FALSE; +} + +static gboolean gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, gboolean try_only, GstV4l2Error * error) { @@ -3150,6 +3291,11 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, enum v4l2_quantization range = 0; enum v4l2_ycbcr_encoding matrix = 0; enum v4l2_xfer_func transfer = 0; + GstStructure *s; + gboolean disable_colorimetry = FALSE; + + g_return_val_if_fail (!v4l2object->skip_try_fmt_probes || + gst_caps_is_writable (caps), FALSE); GST_V4L2_CHECK_OPEN (v4l2object); if (!try_only) @@ -3176,145 +3322,155 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, n_v4l_planes = 1; if (GST_VIDEO_INFO_IS_INTERLACED (&info)) { - GST_DEBUG_OBJECT (v4l2object->element, "interlaced video"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "interlaced video"); /* ideally we would differentiate between types of interlaced video * but there is not sufficient information in the caps.. */ field = V4L2_FIELD_INTERLACED; } else { - GST_DEBUG_OBJECT (v4l2object->element, "progressive video"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "progressive video"); field = V4L2_FIELD_NONE; } - if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) { - /* We first pick th main colorspace from the primaries */ - switch (info.colorimetry.primaries) { - case GST_VIDEO_COLOR_PRIMARIES_BT709: - /* There is two colorspaces using these primaries, use the range to - * differentiate */ - if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235) - colorspace = V4L2_COLORSPACE_REC709; - else - colorspace = V4L2_COLORSPACE_SRGB; - break; - case GST_VIDEO_COLOR_PRIMARIES_BT470M: - colorspace = V4L2_COLORSPACE_470_SYSTEM_M; - break; - case GST_VIDEO_COLOR_PRIMARIES_BT470BG: - colorspace = V4L2_COLORSPACE_470_SYSTEM_BG; - break; - case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: - colorspace = V4L2_COLORSPACE_SMPTE170M; - break; - case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: - colorspace = V4L2_COLORSPACE_SMPTE240M; - break; + /* We first pick the main colorspace from the primaries */ + switch (info.colorimetry.primaries) { + case GST_VIDEO_COLOR_PRIMARIES_BT709: + /* There is two colorspaces using these primaries, use the range to + * differentiate */ + if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235) + colorspace = V4L2_COLORSPACE_REC709; + else + colorspace = V4L2_COLORSPACE_SRGB; + break; + case GST_VIDEO_COLOR_PRIMARIES_BT2020: + colorspace = V4L2_COLORSPACE_BT2020; + break; + case GST_VIDEO_COLOR_PRIMARIES_BT470M: + colorspace = V4L2_COLORSPACE_470_SYSTEM_M; + break; + case GST_VIDEO_COLOR_PRIMARIES_BT470BG: + colorspace = V4L2_COLORSPACE_470_SYSTEM_BG; + break; + case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: + colorspace = V4L2_COLORSPACE_SMPTE170M; + break; + case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: + colorspace = V4L2_COLORSPACE_SMPTE240M; + break; - case GST_VIDEO_COLOR_PRIMARIES_FILM: - case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: - /* We don't know, we will guess */ - break; + case GST_VIDEO_COLOR_PRIMARIES_FILM: + case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: + /* We don't know, we will guess */ + break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry primaries %d", info.colorimetry.primaries); - break; - } + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry primaries %d", info.colorimetry.primaries); + break; + } - switch (info.colorimetry.range) { - case GST_VIDEO_COLOR_RANGE_0_255: - range = V4L2_QUANTIZATION_FULL_RANGE; - break; - case GST_VIDEO_COLOR_RANGE_16_235: - range = V4L2_QUANTIZATION_LIM_RANGE; - break; - case GST_VIDEO_COLOR_RANGE_UNKNOWN: - /* We let the driver pick a default one */ - break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry range %d", info.colorimetry.range); - break; - } + switch (info.colorimetry.range) { + case GST_VIDEO_COLOR_RANGE_0_255: + range = V4L2_QUANTIZATION_FULL_RANGE; + break; + case GST_VIDEO_COLOR_RANGE_16_235: + range = V4L2_QUANTIZATION_LIM_RANGE; + break; + case GST_VIDEO_COLOR_RANGE_UNKNOWN: + /* We let the driver pick a default one */ + break; + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry range %d", info.colorimetry.range); + break; + } - switch (info.colorimetry.matrix) { - case GST_VIDEO_COLOR_MATRIX_RGB: - /* Unspecified, leave to default */ - break; - /* FCC is about the same as BT601 with less digit */ - case GST_VIDEO_COLOR_MATRIX_FCC: - case GST_VIDEO_COLOR_MATRIX_BT601: - matrix = V4L2_YCBCR_ENC_601; - break; - case GST_VIDEO_COLOR_MATRIX_BT709: - matrix = V4L2_YCBCR_ENC_709; - break; - case GST_VIDEO_COLOR_MATRIX_SMPTE240M: - matrix = V4L2_YCBCR_ENC_SMPTE240M; - break; - case GST_VIDEO_COLOR_MATRIX_BT2020: - matrix = V4L2_YCBCR_ENC_BT2020; - break; - case GST_VIDEO_COLOR_MATRIX_UNKNOWN: - /* We let the driver pick a default one */ - break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry matrix %d", info.colorimetry.matrix); - break; - } + switch (info.colorimetry.matrix) { + case GST_VIDEO_COLOR_MATRIX_RGB: + /* Unspecified, leave to default */ + break; + /* FCC is about the same as BT601 with less digit */ + case GST_VIDEO_COLOR_MATRIX_FCC: + case GST_VIDEO_COLOR_MATRIX_BT601: + matrix = V4L2_YCBCR_ENC_601; + break; + case GST_VIDEO_COLOR_MATRIX_BT709: + matrix = V4L2_YCBCR_ENC_709; + break; + case GST_VIDEO_COLOR_MATRIX_SMPTE240M: + matrix = V4L2_YCBCR_ENC_SMPTE240M; + break; + case GST_VIDEO_COLOR_MATRIX_BT2020: + matrix = V4L2_YCBCR_ENC_BT2020; + break; + case GST_VIDEO_COLOR_MATRIX_UNKNOWN: + /* We let the driver pick a default one */ + break; + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry matrix %d", info.colorimetry.matrix); + break; + } - switch (info.colorimetry.transfer) { - case GST_VIDEO_TRANSFER_GAMMA18: - case GST_VIDEO_TRANSFER_GAMMA20: - case GST_VIDEO_TRANSFER_GAMMA22: - case GST_VIDEO_TRANSFER_GAMMA28: - GST_WARNING_OBJECT (v4l2object->element, - "GAMMA 18, 20, 22, 28 transfer functions not supported"); - /* fallthrough */ - case GST_VIDEO_TRANSFER_GAMMA10: - transfer = V4L2_XFER_FUNC_NONE; - break; - case GST_VIDEO_TRANSFER_BT709: - transfer = V4L2_XFER_FUNC_709; - break; - case GST_VIDEO_TRANSFER_SMPTE240M: - transfer = V4L2_XFER_FUNC_SMPTE240M; - break; - case GST_VIDEO_TRANSFER_SRGB: - transfer = V4L2_XFER_FUNC_SRGB; - break; - case GST_VIDEO_TRANSFER_LOG100: - case GST_VIDEO_TRANSFER_LOG316: - GST_WARNING_OBJECT (v4l2object->element, - "LOG 100, 316 transfer functions not supported"); - /* FIXME No known sensible default, maybe AdobeRGB ? */ - break; - case GST_VIDEO_TRANSFER_UNKNOWN: - /* We let the driver pick a default one */ - break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry tranfer %d", info.colorimetry.transfer); - break; - } + switch (info.colorimetry.transfer) { + case GST_VIDEO_TRANSFER_GAMMA18: + case GST_VIDEO_TRANSFER_GAMMA20: + case GST_VIDEO_TRANSFER_GAMMA22: + case GST_VIDEO_TRANSFER_GAMMA28: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "GAMMA 18, 20, 22, 28 transfer functions not supported"); + /* fallthrough */ + case GST_VIDEO_TRANSFER_GAMMA10: + transfer = V4L2_XFER_FUNC_NONE; + break; + case GST_VIDEO_TRANSFER_BT2020_12: + case GST_VIDEO_TRANSFER_BT709: + transfer = V4L2_XFER_FUNC_709; + break; + case GST_VIDEO_TRANSFER_SMPTE240M: + transfer = V4L2_XFER_FUNC_SMPTE240M; + break; + case GST_VIDEO_TRANSFER_SRGB: + transfer = V4L2_XFER_FUNC_SRGB; + break; + case GST_VIDEO_TRANSFER_LOG100: + case GST_VIDEO_TRANSFER_LOG316: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "LOG 100, 316 transfer functions not supported"); + /* FIXME No known sensible default, maybe AdobeRGB ? */ + break; + case GST_VIDEO_TRANSFER_UNKNOWN: + /* We let the driver pick a default one */ + break; + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry tranfer %d", info.colorimetry.transfer); + break; + } - if (colorspace == 0) { - /* Try to guess colorspace according to pixelformat and size */ - if (GST_VIDEO_INFO_IS_YUV (&info)) { + if (colorspace == 0) { + /* Try to guess colorspace according to pixelformat and size */ + if (GST_VIDEO_INFO_IS_YUV (&info)) { + if (range == V4L2_QUANTIZATION_FULL_RANGE + && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) { + /* Full range BT.601 YCbCr encoding with unknown primaries and transfer + * function most likely is JPEG */ + colorspace = V4L2_COLORSPACE_JPEG; + transfer = V4L2_XFER_FUNC_SRGB; + } else { /* SD streams likely use SMPTE170M and HD streams REC709 */ if (width <= 720 && height <= 576) colorspace = V4L2_COLORSPACE_SMPTE170M; else colorspace = V4L2_COLORSPACE_REC709; - } else if (GST_VIDEO_INFO_IS_RGB (&info)) { - colorspace = V4L2_COLORSPACE_SRGB; - transfer = V4L2_XFER_FUNC_NONE; } + } else if (GST_VIDEO_INFO_IS_RGB (&info)) { + colorspace = V4L2_COLORSPACE_SRGB; + transfer = V4L2_XFER_FUNC_NONE; } } - GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format " + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format %dx%d, format " "%" GST_FOURCC_FORMAT " stride: %d", width, height, GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0)); @@ -3347,6 +3503,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0); format.type = v4l2object->type; + format.fmt.pix.width = width; format.fmt.pix.height = height; format.fmt.pix.pixelformat = pixelformat; @@ -3363,7 +3520,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE; } - GST_DEBUG_OBJECT (v4l2object->element, "Desired format is %dx%d, format " + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format is %dx%d, format " "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width, format.fmt.pix_mp.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat), @@ -3372,54 +3529,65 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, #ifndef GST_DISABLE_GST_DEBUG if (is_mplane) { for (i = 0; i < format.fmt.pix_mp.num_planes; i++) - GST_DEBUG_OBJECT (v4l2object->element, " stride %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d", format.fmt.pix_mp.plane_fmt[i].bytesperline); } else { - GST_DEBUG_OBJECT (v4l2object->element, " stride %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d", format.fmt.pix.bytesperline); } #endif - if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) { - if (is_mplane) { - format.fmt.pix_mp.colorspace = colorspace; - format.fmt.pix_mp.quantization = range; - format.fmt.pix_mp.ycbcr_enc = matrix; - format.fmt.pix_mp.xfer_func = transfer; - } else { - format.fmt.pix.colorspace = colorspace; - format.fmt.pix.quantization = range; - format.fmt.pix.ycbcr_enc = matrix; - format.fmt.pix.xfer_func = transfer; - } - - GST_DEBUG_OBJECT (v4l2object->element, "Desired colorspace is %d:%d:%d:%d", - colorspace, range, matrix, transfer); + if (is_mplane) { + format.fmt.pix_mp.colorspace = colorspace; + format.fmt.pix_mp.quantization = range; + format.fmt.pix_mp.ycbcr_enc = matrix; + format.fmt.pix_mp.xfer_func = transfer; + } else { + format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC; + format.fmt.pix.colorspace = colorspace; + format.fmt.pix.quantization = range; + format.fmt.pix.ycbcr_enc = matrix; + format.fmt.pix.xfer_func = transfer; } + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d", + colorspace, range, matrix, transfer); + if (try_only) { - if (v4l2_ioctl (fd, VIDIOC_TRY_FMT, &format) < 0) + if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0) goto try_fmt_failed; } else { - if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0) + if (v4l2object->ioctl (fd, VIDIOC_S_FMT, &format) < 0) goto set_fmt_failed; } - GST_DEBUG_OBJECT (v4l2object->element, "Got format of %dx%d, format " - "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d", + if (is_mplane) { + colorspace = format.fmt.pix_mp.colorspace; + range = format.fmt.pix_mp.quantization; + matrix = format.fmt.pix_mp.ycbcr_enc; + transfer = format.fmt.pix_mp.xfer_func; + } else { + colorspace = format.fmt.pix.colorspace; + range = format.fmt.pix.quantization; + matrix = format.fmt.pix.ycbcr_enc; + transfer = format.fmt.pix.xfer_func; + } + + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format " + "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d", format.fmt.pix.width, format.fmt.pix_mp.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat), is_mplane ? format.fmt.pix_mp.num_planes : 1, - is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace); + colorspace, range, matrix, transfer); #ifndef GST_DISABLE_GST_DEBUG if (is_mplane) { for (i = 0; i < format.fmt.pix_mp.num_planes; i++) - GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d", format.fmt.pix_mp.plane_fmt[i].bytesperline, format.fmt.pix_mp.plane_fmt[i].sizeimage); } else { - GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d", format.fmt.pix.bytesperline, format.fmt.pix.sizeimage); } #endif @@ -3446,6 +3614,45 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes) goto invalid_planes; + /* used to check colorimetry and interlace mode fields presence */ + s = gst_caps_get_structure (caps, 0); + + if (!gst_v4l2_object_get_interlace_mode (format.fmt.pix.field, + &info.interlace_mode)) + goto invalid_field; + if (gst_structure_has_field (s, "interlace-mode")) { + if (format.fmt.pix.field != field) + goto invalid_field; + } + + if (gst_v4l2_object_get_colorspace (&format, &info.colorimetry)) { + if (gst_structure_has_field (s, "colorimetry")) { + if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry, + gst_structure_get_string (s, "colorimetry"))) + goto invalid_colorimetry; + } + } else { + /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from + * the TRY_FMT */ + disable_colorimetry = TRUE; + if (gst_structure_has_field (s, "colorimetry")) + gst_structure_remove_field (s, "colorimetry"); + } + + /* In case we have skipped the try_fmt probes, we'll need to set the + * colorimetry and interlace-mode back into the caps. */ + if (v4l2object->skip_try_fmt_probes) { + if (!disable_colorimetry && !gst_structure_has_field (s, "colorimetry")) { + gchar *str = gst_video_colorimetry_to_string (&info.colorimetry); + gst_structure_set (s, "colorimetry", G_TYPE_STRING, str, NULL); + g_free (str); + } + + if (!gst_structure_has_field (s, "interlace-mode")) + gst_structure_set (s, "interlace-mode", G_TYPE_STRING, + gst_video_interlace_mode_to_string (info.interlace_mode), NULL); + } + if (try_only) /* good enough for trying only */ return TRUE; @@ -3454,19 +3661,19 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, ctl.id = V4L2_CID_ALPHA_COMPONENT; ctl.value = 0xff; - if (v4l2_ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0) - GST_WARNING_OBJECT (v4l2object->element, + if (v4l2object->ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0) + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Failed to set alpha component value"); } /* Is there a reason we require the caller to always specify a framerate? */ - GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n, fps_d); memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm)); streamparm.type = v4l2object->type; - if (v4l2_ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0) + if (v4l2object->ioctl (fd, VIDIOC_G_PARM, &streamparm) < 0) goto get_parm_failed; if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE @@ -3476,7 +3683,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator; - GST_DEBUG_OBJECT (v4l2object->element, "Got capture framerate: %u/%u", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got capture framerate: %u/%u", streamparm.parm.capture.timeperframe.denominator, streamparm.parm.capture.timeperframe.numerator); @@ -3485,12 +3692,12 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, * causing them to not output data (several models of Thinkpad cameras * have this problem at least). * So, don't skip. */ - GST_LOG_OBJECT (v4l2object->element, "Setting capture framerate to %u/%u", + GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting capture framerate to %u/%u", fps_n, fps_d); /* We want to change the frame rate, so check whether we can. Some cheap USB * cameras don't have the capability */ if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Not setting capture framerate (not supported)"); goto done; } @@ -3500,7 +3707,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, streamparm.parm.capture.timeperframe.denominator = fps_n; /* some cheap USB cam's won't accept any change */ - if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0) + if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0) goto set_parm_failed; if (streamparm.parm.capture.timeperframe.numerator > 0 && @@ -3509,11 +3716,11 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, fps_d = streamparm.parm.capture.timeperframe.numerator; fps_n = streamparm.parm.capture.timeperframe.denominator; - GST_INFO_OBJECT (v4l2object->element, "Set capture framerate to %u/%u", + GST_INFO_OBJECT (v4l2object->dbg_obj, "Set capture framerate to %u/%u", fps_n, fps_d); } else { /* fix v4l2 capture driver to provide framerate values */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d); } @@ -3526,14 +3733,14 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.output.timeperframe.numerator; - GST_DEBUG_OBJECT (v4l2object->element, "Got output framerate: %u/%u", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got output framerate: %u/%u", streamparm.parm.output.timeperframe.denominator, streamparm.parm.output.timeperframe.numerator); - GST_LOG_OBJECT (v4l2object->element, "Setting output framerate to %u/%u", + GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting output framerate to %u/%u", fps_n, fps_d); if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Not setting output framerate (not supported)"); goto done; } @@ -3542,7 +3749,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, streamparm.parm.output.timeperframe.numerator = fps_d; streamparm.parm.output.timeperframe.denominator = fps_n; - if (v4l2_ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0) + if (v4l2object->ioctl (fd, VIDIOC_S_PARM, &streamparm) < 0) goto set_parm_failed; if (streamparm.parm.output.timeperframe.numerator > 0 && @@ -3551,11 +3758,11 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, fps_d = streamparm.parm.output.timeperframe.numerator; fps_n = streamparm.parm.output.timeperframe.denominator; - GST_INFO_OBJECT (v4l2object->element, "Set output framerate to %u/%u", + GST_INFO_OBJECT (v4l2object->dbg_obj, "Set output framerate to %u/%u", fps_n, fps_d); } else { /* fix v4l2 output driver to provide framerate values */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d); } @@ -3576,15 +3783,22 @@ done: /* ERRORS */ invalid_caps: { - GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT, caps); return FALSE; } try_fmt_failed: { - if (errno == EBUSY) { - GST_V4L2_ERROR (error, RESOURCE, BUSY, - (_("Device '%s' is busy"), v4l2object->videodev), + if (errno == EINVAL) { + GST_V4L2_ERROR (error, RESOURCE, SETTINGS, + (_("Device '%s' has no supported format"), v4l2object->videodev), + ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s", + GST_FOURCC_ARGS (pixelformat), width, height, + g_strerror (errno))); + } else { + GST_V4L2_ERROR (error, RESOURCE, FAILED, + (_("Device '%s' failed during initialization"), + v4l2object->videodev), ("Call to TRY_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s", GST_FOURCC_ARGS (pixelformat), width, height, g_strerror (errno))); @@ -3599,10 +3813,16 @@ set_fmt_failed: ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s", GST_FOURCC_ARGS (pixelformat), width, height, g_strerror (errno))); - } else { + } else if (errno == EINVAL) { GST_V4L2_ERROR (error, RESOURCE, SETTINGS, - (_("Device '%s' cannot capture at %dx%d"), - v4l2object->videodev, width, height), + (_("Device '%s' has no supported format"), v4l2object->videodev), + ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s", + GST_FOURCC_ARGS (pixelformat), width, height, + g_strerror (errno))); + } else { + GST_V4L2_ERROR (error, RESOURCE, FAILED, + (_("Device '%s' failed during initialization"), + v4l2object->videodev), ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s", GST_FOURCC_ARGS (pixelformat), width, height, g_strerror (errno))); @@ -3611,36 +3831,61 @@ set_fmt_failed: } invalid_dimensions: { - if (!try_only) { - GST_V4L2_ERROR (error, RESOURCE, SETTINGS, - (_("Device '%s' cannot capture at %dx%d"), - v4l2object->videodev, width, height), - ("Tried to capture at %dx%d, but device returned size %dx%d", - width, height, format.fmt.pix.width, format.fmt.pix.height)); - } + GST_V4L2_ERROR (error, RESOURCE, SETTINGS, + (_("Device '%s' cannot capture at %dx%d"), + v4l2object->videodev, width, height), + ("Tried to capture at %dx%d, but device returned size %dx%d", + width, height, format.fmt.pix.width, format.fmt.pix.height)); return FALSE; } invalid_pixelformat: { - if (!try_only) { - GST_V4L2_ERROR (error, RESOURCE, SETTINGS, - (_("Device '%s' cannot capture in the specified format"), - v4l2object->videodev), - ("Tried to capture in %" GST_FOURCC_FORMAT - ", but device returned format" " %" GST_FOURCC_FORMAT, - GST_FOURCC_ARGS (pixelformat), - GST_FOURCC_ARGS (format.fmt.pix.pixelformat))); - } + GST_V4L2_ERROR (error, RESOURCE, SETTINGS, + (_("Device '%s' cannot capture in the specified format"), + v4l2object->videodev), + ("Tried to capture in %" GST_FOURCC_FORMAT + ", but device returned format" " %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (pixelformat), + GST_FOURCC_ARGS (format.fmt.pix.pixelformat))); return FALSE; } invalid_planes: { - if (!try_only) { - GST_V4L2_ERROR (error, RESOURCE, SETTINGS, - (_("Device '%s' does support non-contiguous planes"), - v4l2object->videodev), - ("Device wants %d planes", format.fmt.pix_mp.num_planes)); - } + GST_V4L2_ERROR (error, RESOURCE, SETTINGS, + (_("Device '%s' does support non-contiguous planes"), + v4l2object->videodev), + ("Device wants %d planes", format.fmt.pix_mp.num_planes)); + return FALSE; + } +invalid_field: + { + enum v4l2_field wanted_field; + + if (is_mplane) + wanted_field = format.fmt.pix_mp.field; + else + wanted_field = format.fmt.pix.field; + + GST_V4L2_ERROR (error, RESOURCE, SETTINGS, + (_("Device '%s' does not support %s interlacing"), + v4l2object->videodev, + field == V4L2_FIELD_NONE ? "progressive" : "interleaved"), + ("Device wants %s interlacing", + wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved")); + return FALSE; + } +invalid_colorimetry: + { + gchar *wanted_colorimetry; + + wanted_colorimetry = gst_video_colorimetry_to_string (&info.colorimetry); + + GST_V4L2_ERROR (error, RESOURCE, SETTINGS, + (_("Device '%s' does not support %s colorimetry"), + v4l2object->videodev, gst_structure_get_string (s, "colorimetry")), + ("Device wants %s colorimetry", wanted_colorimetry)); + + g_free (wanted_colorimetry); return FALSE; } get_parm_failed: @@ -3671,6 +3916,8 @@ gboolean gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error) { + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT, + caps); return gst_v4l2_object_set_format_full (v4l2object, caps, FALSE, error); } @@ -3678,6 +3925,8 @@ gboolean gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error) { + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT, + caps); return gst_v4l2_object_set_format_full (v4l2object, caps, TRUE, error); } @@ -3708,7 +3957,7 @@ gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info) memset (&fmt, 0x00, sizeof (struct v4l2_format)); fmt.type = v4l2object->type; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0) + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_FMT, &fmt) < 0) goto get_fmt_failed; fmtdesc = gst_v4l2_object_get_format_from_fourcc (v4l2object, @@ -3733,13 +3982,13 @@ gst_v4l2_object_acquire_format (GstV4l2Object * v4l2object, GstVideoInfo * info) memset (&sel, 0, sizeof (struct v4l2_selection)); sel.type = v4l2object->type; sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0) { + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_SELECTION, &sel) >= 0) { r = &sel.r; } else { /* For ancient kernels, fall back to G_CROP */ memset (&crop, 0, sizeof (struct v4l2_crop)); crop.type = v4l2object->type; - if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0) + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CROP, &crop) >= 0) r = &crop.c; } if (r) { @@ -3809,35 +4058,52 @@ unsupported_format: gboolean gst_v4l2_object_set_crop (GstV4l2Object * obj) { + struct v4l2_selection sel = { 0 }; struct v4l2_crop crop = { 0 }; + sel.type = obj->type; + sel.target = V4L2_SEL_TGT_CROP; + sel.flags = 0; + sel.r.left = obj->align.padding_left; + sel.r.top = obj->align.padding_top; + sel.r.width = obj->info.width; + sel.r.height = obj->info.height; + crop.type = obj->type; - crop.c.left = obj->align.padding_left; - crop.c.top = obj->align.padding_top; - crop.c.width = obj->info.width; - crop.c.height = obj->info.height; + crop.c = sel.r; if (obj->align.padding_left + obj->align.padding_top + obj->align.padding_right + obj->align.padding_bottom == 0) { - GST_DEBUG_OBJECT (obj->element, "no cropping needed"); + GST_DEBUG_OBJECT (obj->dbg_obj, "no cropping needed"); return TRUE; } - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top, crop.c.width, crop.c.height); - if (v4l2_ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) { - GST_WARNING_OBJECT (obj->element, "VIDIOC_S_CROP failed"); - return FALSE; - } + if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) { + if (errno != ENOTTY) { + GST_WARNING_OBJECT (obj->dbg_obj, + "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s", + g_strerror (errno)); + return FALSE; + } else { + if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) { + GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed"); + return FALSE; + } - if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) { - GST_WARNING_OBJECT (obj->element, "VIDIOC_G_CROP failed"); - return FALSE; + if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) { + GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed"); + return FALSE; + } + + sel.r = crop.c; + } } - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top, crop.c.width, crop.c.height); @@ -3865,11 +4131,51 @@ gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps) } gboolean +gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps) +{ + GstStructure *config; + GstCaps *oldcaps; + gboolean ret; + + if (!v4l2object->pool) + return FALSE; + + config = gst_buffer_pool_get_config (v4l2object->pool); + gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL); + + ret = oldcaps && gst_caps_is_subset (oldcaps, caps); + + gst_structure_free (config); + + return ret; +} + +GstCaps * +gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object) +{ + GstStructure *config; + GstCaps *oldcaps; + + if (!v4l2object->pool) + return NULL; + + config = gst_buffer_pool_get_config (v4l2object->pool); + gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL); + + if (oldcaps) + gst_caps_ref (oldcaps); + + gst_structure_free (config); + + return oldcaps; +} + +gboolean gst_v4l2_object_unlock (GstV4l2Object * v4l2object) { gboolean ret = TRUE; - GST_LOG_OBJECT (v4l2object->element, "start flushing"); + GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing"); if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool)) gst_buffer_pool_set_flushing (v4l2object->pool, TRUE); @@ -3882,7 +4188,7 @@ gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object) { gboolean ret = TRUE; - GST_LOG_OBJECT (v4l2object->element, "stop flushing"); + GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing"); if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool)) gst_buffer_pool_set_flushing (v4l2object->pool, FALSE); @@ -3893,7 +4199,7 @@ gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object) gboolean gst_v4l2_object_stop (GstV4l2Object * v4l2object) { - GST_DEBUG_OBJECT (v4l2object->element, "stopping"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping"); if (!GST_V4L2_IS_OPEN (v4l2object)) goto done; @@ -3901,9 +4207,11 @@ gst_v4l2_object_stop (GstV4l2Object * v4l2object) goto done; if (v4l2object->pool) { - GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool"); - gst_buffer_pool_set_active (v4l2object->pool, FALSE); - gst_object_unref (v4l2object->pool); + if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) { + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool"); + gst_buffer_pool_set_active (v4l2object->pool, FALSE); + gst_object_unref (v4l2object->pool); + } v4l2object->pool = NULL; } @@ -3924,27 +4232,81 @@ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter) ret = gst_caps_new_empty (); + if (v4l2object->keep_aspect && !v4l2object->par) { + struct v4l2_cropcap cropcap; + + memset (&cropcap, 0, sizeof (cropcap)); + + cropcap.type = v4l2object->type; + if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) { + if (errno != ENOTTY) + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s", + g_strerror (errno)); + } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) { + v4l2object->par = g_new0 (GValue, 1); + g_value_init (v4l2object->par, GST_TYPE_FRACTION); + gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator, + cropcap.pixelaspect.denominator); + } + } + for (walk = formats; walk; walk = walk->next) { struct v4l2_fmtdesc *format; GstStructure *template; + GstCaps *tmp; format = (struct v4l2_fmtdesc *) walk->data; template = gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat); - if (template) { - GstCaps *tmp; + if (!template) { + GST_DEBUG_OBJECT (v4l2object->dbg_obj, + "unknown format %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (format->pixelformat)); + continue; + } + + /* If we have a filter, check if we need to probe this format or not */ + if (filter) { + GstCaps *format_caps = gst_caps_new_empty (); + + gst_caps_append_structure (format_caps, gst_structure_copy (template)); + + if (!gst_caps_can_intersect (format_caps, filter)) { + gst_caps_unref (format_caps); + gst_structure_free (template); + continue; + } + + gst_caps_unref (format_caps); + } + + tmp = gst_v4l2_object_probe_caps_for_format (v4l2object, + format->pixelformat, template); + if (tmp) + gst_caps_append (ret, tmp); + +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + if (format->pixelformat == V4L2_PIX_FMT_NV12 || + format->pixelformat == V4L2_PIX_FMT_YUV420) { + GstStructure *alt_s = gst_structure_copy (template); + + if (format->pixelformat == V4L2_PIX_FMT_NV12) + gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL); + else + gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL); tmp = gst_v4l2_object_probe_caps_for_format (v4l2object, - format->pixelformat, template); + format->pixelformat, alt_s); + if (tmp) gst_caps_append (ret, tmp); - gst_structure_free (template); - } else { - GST_DEBUG_OBJECT (v4l2object->element, "unknown format %u", - format->pixelformat); + gst_structure_free (alt_s); } +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + gst_structure_free (template); } if (filter) { @@ -3955,6 +4317,8 @@ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter) gst_caps_unref (tmp); } + GST_INFO_OBJECT (v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret); + return ret; } @@ -3973,8 +4337,6 @@ gst_v4l2_object_get_caps (GstV4l2Object * v4l2object, GstCaps * filter) ret = gst_caps_ref (v4l2object->probed_caps); } - GST_INFO_OBJECT (v4l2object->element, "probed caps: %" GST_PTR_FORMAT, ret); - return ret; } @@ -3991,7 +4353,12 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) GstAllocator *allocator = NULL; GstAllocationParams params = { 0 }; - GST_DEBUG_OBJECT (obj->element, "decide allocation"); +#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT + GST_INFO_OBJECT (obj->dbg_obj, "decide allocation - %s", + V4L2_TYPE_IS_OUTPUT (obj->type) ? "output" : "capture"); +#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ + GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation"); +#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */ g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE); @@ -4016,7 +4383,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) update = FALSE; } - GST_DEBUG_OBJECT (obj->element, "allocation: size:%u min:%u max:%u pool:%" + GST_DEBUG_OBJECT (obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool); has_video_meta = @@ -4035,7 +4402,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) if (pool) { /* in READ/WRITE mode, prefer a downstream pool because our own pool * doesn't help much, we have to write to it as well */ - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "read/write mode: using downstream pool"); /* use the bigest size, when we use our own pool we can't really do any * other size than what the hardware gives us but for downstream pools @@ -4043,7 +4410,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) size = MAX (size, obj->info.size); } else if (can_share_own_pool) { /* no downstream pool, use our own then */ - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "read/write mode: no downstream pool, using our own"); pool = gst_object_ref (obj->pool); size = obj->info.size; @@ -4074,22 +4441,22 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) gst_object_unref (pool); pool = gst_object_ref (obj->pool); size = obj->info.size; - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "streaming mode: using our own pool %" GST_PTR_FORMAT, pool); pushing_from_our_pool = TRUE; } else if (pool) { - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "streaming mode: copying to downstream pool %" GST_PTR_FORMAT, pool); } else { - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "streaming mode: no usable pool, copying to generic pool"); size = MAX (size, obj->info.size); } break; case GST_V4L2_IO_AUTO: default: - GST_WARNING_OBJECT (obj->element, "unhandled mode"); + GST_WARNING_OBJECT (obj->dbg_obj, "unhandled mode"); break; } @@ -4101,14 +4468,16 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) if (pushing_from_our_pool) { /* When pushing from our own pool, we need what downstream one, to be able * to fill the pipeline, the minimum required to decoder according to the - * driver and 1 more, so we don't endup up with everything downstream or - * held by the decoder. */ - own_min = min + obj->min_buffers + 1; + * driver and 2 more, so we don't endup up with everything downstream or + * held by the decoder. We account 2 buffers for v4l2 so when one is being + * pushed downstream the other one can already be queued for the next + * frame. */ + own_min = min + obj->min_buffers + 2; /* If no allocation parameters where provided, allow for a little more * buffers and enable copy threshold */ if (!update) { - own_min += 3; + own_min += 2; gst_v4l2_buffer_pool_copy_at_threshold (GST_V4L2_BUFFER_POOL (pool), TRUE); } else { @@ -4140,7 +4509,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) config = gst_buffer_pool_get_config (obj->pool); if (obj->need_video_meta || has_video_meta) { - GST_DEBUG_OBJECT (obj->element, "activate Video Meta"); + GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta"); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); } @@ -4148,14 +4517,14 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); gst_buffer_pool_config_set_params (config, caps, size, own_min, 0); - GST_DEBUG_OBJECT (obj->element, "setting own pool config to %" + GST_DEBUG_OBJECT (obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config); /* Our pool often need to adjust the value */ if (!gst_buffer_pool_set_config (obj->pool, config)) { config = gst_buffer_pool_get_config (obj->pool); - GST_DEBUG_OBJECT (obj->element, "own pool config changed to %" + GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config); /* our pool will adjust the maximum buffer, which we are fine with */ @@ -4172,12 +4541,12 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); gst_buffer_pool_config_set_params (config, caps, size, min, max); - GST_DEBUG_OBJECT (obj->element, "setting other pool config to %" + GST_DEBUG_OBJECT (obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config); /* if downstream supports video metadata, add this to the pool config */ if (has_video_meta) { - GST_DEBUG_OBJECT (obj->element, "activate Video Meta"); + GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta"); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); } @@ -4271,8 +4640,16 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query) if (caps == NULL) goto no_caps; - if ((pool = obj->pool)) - gst_object_ref (pool); + switch (obj->mode) { + case GST_V4L2_IO_MMAP: + case GST_V4L2_IO_DMABUF: + if ((pool = obj->pool)) + gst_object_ref (pool); + break; + default: + pool = NULL; + break; + } if (pool != NULL) { GstCaps *pcaps; @@ -4282,7 +4659,7 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query) config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL); - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "we had a pool with caps %" GST_PTR_FORMAT, pcaps); if (!gst_caps_is_equal (caps, pcaps)) { gst_structure_free (config); @@ -4308,13 +4685,164 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query) /* ERRORS */ no_caps: { - GST_DEBUG_OBJECT (obj->element, "no caps specified"); + GST_DEBUG_OBJECT (obj->dbg_obj, "no caps specified"); return FALSE; } different_caps: { /* different caps, we can't use this pool */ - GST_DEBUG_OBJECT (obj->element, "pool has different caps"); + GST_DEBUG_OBJECT (obj->dbg_obj, "pool has different caps"); return FALSE; } } + +gboolean +gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer) +{ + GstVideoMeta *vmeta; + guint n_mem = gst_buffer_n_memory (buffer); + + /* only import if requested */ + switch (obj->mode) { + case GST_V4L2_IO_USERPTR: + case GST_V4L2_IO_DMABUF_IMPORT: + break; + default: + GST_DEBUG_OBJECT (obj->dbg_obj, + "The io-mode does not enable importation"); + return FALSE; + } + + vmeta = gst_buffer_get_video_meta (buffer); + if (!vmeta && obj->need_video_meta) { + GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard " + "stride/offset while the driver does not."); + return FALSE; + } + + /* we need matching strides/offsets and size */ + if (vmeta) { + guint p; + gboolean need_fmt_update = FALSE; + + if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) { + GST_WARNING_OBJECT (obj->dbg_obj, + "Cannot import buffers with different number planes"); + return FALSE; + } + + for (p = 0; p < vmeta->n_planes; p++) { + if (vmeta->stride[p] < obj->info.stride[p]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "Not importing as remote stride %i is smaller then %i on plane %u", + vmeta->stride[p], obj->info.stride[p], p); + return FALSE; + } else if (vmeta->stride[p] > obj->info.stride[p]) { + need_fmt_update = TRUE; + } + + if (vmeta->offset[p] < obj->info.offset[p]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "Not importing as offset %" G_GSIZE_FORMAT + " is smaller then %" G_GSIZE_FORMAT " on plane %u", + vmeta->offset[p], obj->info.offset[p], p); + return FALSE; + } else if (vmeta->offset[p] > obj->info.offset[p]) { + need_fmt_update = TRUE; + } + } + + if (need_fmt_update) { + struct v4l2_format format; + gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, }; + + format = obj->format; + + /* update the current format with the stride we want to import from */ + if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) { + guint i; + + GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:"); + + for (i = 0; i < obj->n_v4l2_planes; i++) { + gint stride = vmeta->stride[i]; + + if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo)) + stride = GST_VIDEO_TILE_X_TILES (stride) << + GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo); + + format.fmt.pix_mp.plane_fmt[i].bytesperline = stride; + wanted_stride[i] = stride; + GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]); + } + } else { + gint stride = vmeta->stride[0]; + + GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", stride); + + if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo)) + stride = GST_VIDEO_TILE_X_TILES (stride) << + GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo); + + format.fmt.pix.bytesperline = stride; + wanted_stride[0] = stride; + } + + if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) { + GST_WARNING_OBJECT (obj->dbg_obj, + "Something went wrong trying to update current format: %s", + g_strerror (errno)); + return FALSE; + } + + gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info, + &obj->align); + + if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) { + guint i; + + for (i = 0; i < obj->n_v4l2_planes; i++) { + if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "[%i] Driver did not accept the new stride (wants %i, got %i)", + i, format.fmt.pix_mp.plane_fmt[i].bytesperline, + wanted_stride[i]); + return FALSE; + } + } + } else { + if (format.fmt.pix.bytesperline != wanted_stride[0]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "Driver did not accept the new stride (wants %i, got %i)", + format.fmt.pix.bytesperline, wanted_stride[0]); + return FALSE; + } + } + } + } + + /* we can always import single memory buffer, but otherwise we need the same + * amount of memory object. */ + if (n_mem != 1 && n_mem != obj->n_v4l2_planes) { + GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, " + "buffers contains %u memory", obj->n_v4l2_planes, n_mem); + return FALSE; + } + + /* For DMABuf importation we need DMABuf of course */ + if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) { + guint i; + + for (i = 0; i < n_mem; i++) { + GstMemory *mem = gst_buffer_peek_memory (buffer, i); + + if (!gst_is_dmabuf_memory (mem)) { + GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory."); + return FALSE; + } + } + } + + /* for the remaining, only the kernel driver can tell */ + return TRUE; +}