X-Git-Url: http://review.tizen.org/git/?a=blobdiff_plain;f=sys%2Fv4l2%2Fgstv4l2object.c;h=9db4ae691faff902867f12ba7659f07bfcd6e29b;hb=a9ea40efb522a2f013d853e08dbaf703610c8027;hp=af62d6db6276620f146777e5f5460b65a3a62471;hpb=0e9fe263caef71d265f686e769d5a6d40713fd93;p=platform%2Fupstream%2Fgst-plugins-good.git diff --git a/sys/v4l2/gstv4l2object.c b/sys/v4l2/gstv4l2object.c index af62d6d..9db4ae6 100644 --- a/sys/v4l2/gstv4l2object.c +++ b/sys/v4l2/gstv4l2object.c @@ -22,12 +22,9 @@ #include #endif -#define _GNU_SOURCE /* Enable mmap64() */ - #include #include #include -#include #include #include #include @@ -37,6 +34,7 @@ #include #endif +#include "ext/videodev2.h" #include "gstv4l2object.h" #include "gstv4l2tuner.h" #include "gstv4l2colorbalance.h" @@ -44,6 +42,7 @@ #include "gst/gst-i18n-plugin.h" #include +#include GST_DEBUG_CATEGORY_EXTERN (v4l2_debug); #define GST_CAT_DEFAULT v4l2_debug @@ -114,6 +113,7 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = { {V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW}, {V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW}, {V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_INVZ, TRUE, GST_V4L2_RAW}, /* Palette formats */ {V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW}, @@ -159,10 +159,10 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = { {V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW}, /* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */ - {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC}, - {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_CODEC}, - {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_CODEC}, - {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_CODEC}, + {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW}, + {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW}, /* compressed formats */ {V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC}, @@ -170,9 +170,11 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = { {V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT}, {V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT}, + {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC}, + {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC}, {V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC}, @@ -457,8 +459,25 @@ gst_v4l2_object_install_m2m_properties_helper (GObjectClass * gobject_class) GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); } +/* Support for 32bit off_t, this wrapper is casting off_t to gint64 */ +#ifdef HAVE_LIBV4L2 +#if SIZEOF_OFF_T < 8 + +static gpointer +v4l2_mmap_wrapper (gpointer start, gsize length, gint prot, gint flags, gint fd, + off_t offset) +{ + return v4l2_mmap (start, length, prot, flags, fd, (gint64) offset); +} + +#define v4l2_mmap v4l2_mmap_wrapper + +#endif /* SIZEOF_OFF_T < 8 */ +#endif /* HAVE_LIBV4L2 */ + GstV4l2Object * gst_v4l2_object_new (GstElement * element, + GstObject * debug_object, enum v4l2_buf_type type, const char *default_device, GstV4l2GetInOutFunction get_in_out_func, @@ -476,6 +495,7 @@ gst_v4l2_object_new (GstElement * element, v4l2object->formats = NULL; v4l2object->element = element; + v4l2object->dbg_obj = debug_object; v4l2object->get_in_out_func = get_in_out_func; v4l2object->set_in_out_func = set_in_out_func; v4l2object->update_fps_func = update_fps_func; @@ -496,7 +516,11 @@ gst_v4l2_object_new (GstElement * element, /* We now disable libv4l2 by default, but have an env to enable it. */ #ifdef HAVE_LIBV4L2 +#ifdef TIZEN_FEATURE_USE_LIBV4L2 + if (1) { +#else /* TIZEN_FEATURE_USE_LIBV4L2 */ if (g_getenv ("GST_V4L2_USE_LIBV4L2")) { +#endif /* TIZEN_FEATURE_USE_LIBV4L2 */ v4l2object->fd_open = v4l2_fd_open; v4l2object->close = v4l2_close; v4l2object->dup = v4l2_dup; @@ -512,7 +536,7 @@ gst_v4l2_object_new (GstElement * element, v4l2object->dup = dup; v4l2object->ioctl = ioctl; v4l2object->read = read; - v4l2object->mmap = mmap64; + v4l2object->mmap = mmap; v4l2object->munmap = munmap; } @@ -668,14 +692,18 @@ gst_v4l2_object_set_property_helper (GstV4l2Object * v4l2object, break; } case PROP_PIXEL_ASPECT_RATIO: - g_free (v4l2object->par); + if (v4l2object->par) { + g_value_unset (v4l2object->par); + g_free (v4l2object->par); + } v4l2object->par = g_new0 (GValue, 1); g_value_init (v4l2object->par, GST_TYPE_FRACTION); if (!g_value_transform (value, v4l2object->par)) { g_warning ("Could not transform string to aspect ratio"); gst_value_set_fraction (v4l2object->par, 1, 1); } - GST_DEBUG_OBJECT (v4l2object->element, "set PAR to %d/%d", + + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "set PAR to %d/%d", gst_value_get_fraction_numerator (v4l2object->par), gst_value_get_fraction_denominator (v4l2object->par)); break; @@ -700,15 +728,12 @@ gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object, break; case PROP_DEVICE_NAME: { - const guchar *new = NULL; + const guchar *name = NULL; - if (GST_V4L2_IS_OPEN (v4l2object)) { - new = v4l2object->vcap.card; - } else if (gst_v4l2_open (v4l2object)) { - new = v4l2object->vcap.card; - gst_v4l2_close (v4l2object); - } - g_value_set_string (value, (gchar *) new); + if (GST_V4L2_IS_OPEN (v4l2object)) + name = v4l2object->vcap.card; + + g_value_set_string (value, (gchar *) name); break; } case PROP_DEVICE_FD: @@ -802,9 +827,18 @@ gst_v4l2_get_driver_min_buffers (GstV4l2Object * v4l2object) control.id = V4L2_CID_MIN_BUFFERS_FOR_CAPTURE; if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "driver requires a minimum of %d buffers", control.value); +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER +#define DEFAULT_DECODER_OUTPUT_BUFFER_COUNT 5 + if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) && control.value == 1) { + v4l2object->min_buffers = DEFAULT_DECODER_OUTPUT_BUFFER_COUNT; + GST_WARNING_OBJECT (v4l2object->dbg_obj, "but SET MIN BUFFER COUNT[%d] and it will be [%d] later", + v4l2object->min_buffers, v4l2object->min_buffers + 1); + } +#else /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ v4l2object->min_buffers = control.value; +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ } else { v4l2object->min_buffers = 0; } @@ -824,7 +858,7 @@ gst_v4l2_set_defaults (GstV4l2Object * v4l2object) if (v4l2object->tv_norm) norm = gst_v4l2_tuner_get_norm_by_std_id (v4l2object, v4l2object->tv_norm); - GST_DEBUG_OBJECT (v4l2object->element, "tv_norm=0x%" G_GINT64_MODIFIER "x, " + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "tv_norm=0x%" G_GINT64_MODIFIER "x, " "norm=%p", (guint64) v4l2object->tv_norm, norm); if (norm) { gst_tuner_set_norm (tuner, norm); @@ -904,6 +938,17 @@ gst_v4l2_object_close (GstV4l2Object * v4l2object) gst_v4l2_object_clear_format_list (v4l2object); } + if (v4l2object->par) { + g_value_unset (v4l2object->par); + g_free (v4l2object->par); + v4l2object->par = NULL; + } + + if (v4l2object->channel) { + g_free (v4l2object->channel); + v4l2object->channel = NULL; + } + return TRUE; } @@ -1012,6 +1057,7 @@ gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt) break; case V4L2_PIX_FMT_GREY: /* 8 Greyscale */ + case V4L2_PIX_FMT_INVZ: rank = GREY_BASE_RANK; break; @@ -1130,7 +1176,7 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, gint n; struct v4l2_fmtdesc *format; - GST_DEBUG_OBJECT (v4l2object->element, "getting src format enumerations"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "getting src format enumerations"); /* format enumeration */ for (n = 0;; n++) { @@ -1148,12 +1194,12 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, } } - GST_LOG_OBJECT (v4l2object->element, "index: %u", format->index); - GST_LOG_OBJECT (v4l2object->element, "type: %d", format->type); - GST_LOG_OBJECT (v4l2object->element, "flags: %08x", format->flags); - GST_LOG_OBJECT (v4l2object->element, "description: '%s'", + GST_LOG_OBJECT (v4l2object->dbg_obj, "index: %u", format->index); + GST_LOG_OBJECT (v4l2object->dbg_obj, "type: %d", format->type); + GST_LOG_OBJECT (v4l2object->dbg_obj, "flags: %08x", format->flags); + GST_LOG_OBJECT (v4l2object->dbg_obj, "description: '%s'", format->description); - GST_LOG_OBJECT (v4l2object->element, "pixelformat: %" GST_FOURCC_FORMAT, + GST_LOG_OBJECT (v4l2object->dbg_obj, "pixelformat: %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (format->pixelformat)); /* sort formats according to our preference; we do this, because caps @@ -1167,11 +1213,11 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object, { GSList *l; - GST_INFO_OBJECT (v4l2object->element, "got %d format(s):", n); + GST_INFO_OBJECT (v4l2object->dbg_obj, "got %d format(s):", n); for (l = v4l2object->formats; l != NULL; l = l->next) { format = l->data; - GST_INFO_OBJECT (v4l2object->element, + GST_INFO_OBJECT (v4l2object->dbg_obj, " %" GST_FOURCC_FORMAT "%s", GST_FOURCC_ARGS (format->pixelformat), ((format->flags & V4L2_FMT_FLAG_EMULATED)) ? " (emulated)" : ""); } @@ -1185,7 +1231,7 @@ failed: { g_free (format); - if (!GST_IS_ELEMENT (v4l2object->element)) + if (v4l2object->element) return FALSE; GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS, @@ -1330,6 +1376,9 @@ gst_v4l2_object_v4l2fourcc_to_video_format (guint32 fourcc) case V4L2_PIX_FMT_NV24: format = GST_VIDEO_FORMAT_NV24; break; + case V4L2_PIX_FMT_INVZ: + format = GST_VIDEO_FORMAT_INVZ; + break; default: format = GST_VIDEO_FORMAT_UNKNOWN; break; @@ -1357,6 +1406,10 @@ gst_v4l2_object_v4l2fourcc_is_rgb (guint32 fourcc) case V4L2_PIX_FMT_BGR32: case V4L2_PIX_FMT_ABGR32: case V4L2_PIX_FMT_ARGB32: + case V4L2_PIX_FMT_SBGGR8: + case V4L2_PIX_FMT_SGBRG8: + case V4L2_PIX_FMT_SGRBG8: + case V4L2_PIX_FMT_SRGGB8: ret = TRUE; break; default: @@ -1379,7 +1432,7 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) break; case V4L2_PIX_FMT_MPEG1: structure = gst_structure_new ("video/mpeg", - "mpegversion", G_TYPE_INT, 2, NULL); + "mpegversion", G_TYPE_INT, 1, NULL); break; case V4L2_PIX_FMT_MPEG2: structure = gst_structure_new ("video/mpeg", @@ -1391,6 +1444,9 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) "mpegversion", G_TYPE_INT, 4, "systemstream", G_TYPE_BOOLEAN, FALSE, NULL); break; + case V4L2_PIX_FMT_FWHT: + structure = gst_structure_new_empty ("video/x-fwht"); + break; case V4L2_PIX_FMT_H263: structure = gst_structure_new ("video/x-h263", "variant", G_TYPE_STRING, "itu", NULL); @@ -1405,6 +1461,11 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) "stream-format", G_TYPE_STRING, "avc", "alignment", G_TYPE_STRING, "au", NULL); break; + case V4L2_PIX_FMT_HEVC: /* H.265 */ + structure = gst_structure_new ("video/x-h265", + "stream-format", G_TYPE_STRING, "byte-stream", "alignment", + G_TYPE_STRING, "au", NULL); + break; case V4L2_PIX_FMT_VC1_ANNEX_G: case V4L2_PIX_FMT_VC1_ANNEX_L: structure = gst_structure_new ("video/x-wmv", @@ -1451,7 +1512,8 @@ gst_v4l2_object_v4l2fourcc_to_bare_struct (guint32 fourcc) case V4L2_PIX_FMT_UYVY: case V4L2_PIX_FMT_YUV422P: case V4L2_PIX_FMT_YVYU: - case V4L2_PIX_FMT_YUV411P:{ + case V4L2_PIX_FMT_YUV411P: + case V4L2_PIX_FMT_INVZ:{ GstVideoFormat format; format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc); if (format != GST_VIDEO_FORMAT_UNKNOWN) @@ -1574,6 +1636,16 @@ gst_v4l2_object_get_caps_helper (GstV4L2FormatFlags flags) } switch (gst_v4l2_formats[i].format) { +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + case V4L2_PIX_FMT_YUV420: + alt_s = gst_structure_copy (structure); + gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL); + break; + case V4L2_PIX_FMT_NV12: + alt_s = gst_structure_copy (structure); + gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL); + break; +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ case V4L2_PIX_FMT_RGB32: alt_s = gst_structure_copy (structure); gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL); @@ -1663,6 +1735,9 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, if (g_str_equal (mimetype, "video/x-raw")) { switch (GST_VIDEO_INFO_FORMAT (info)) { case GST_VIDEO_FORMAT_I420: +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + case GST_VIDEO_FORMAT_S420: +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ fourcc = V4L2_PIX_FMT_YUV420; fourcc_nc = V4L2_PIX_FMT_YUV420M; break; @@ -1682,6 +1757,9 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, fourcc = V4L2_PIX_FMT_YUV422P; break; case GST_VIDEO_FORMAT_NV12: +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + case GST_VIDEO_FORMAT_SN12: +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ fourcc = V4L2_PIX_FMT_NV12; fourcc_nc = V4L2_PIX_FMT_NV12M; break; @@ -1744,6 +1822,9 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, case GST_VIDEO_FORMAT_GRAY16_BE: fourcc = V4L2_PIX_FMT_Y16_BE; break; + case GST_VIDEO_FORMAT_INVZ: + fourcc = V4L2_PIX_FMT_INVZ; + break; default: break; } @@ -1772,6 +1853,8 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, break; } } + } else if (g_str_equal (mimetype, "video/x-fwht")) { + fourcc = V4L2_PIX_FMT_FWHT; } else if (g_str_equal (mimetype, "video/x-h263")) { fourcc = V4L2_PIX_FMT_H263; } else if (g_str_equal (mimetype, "video/x-h264")) { @@ -1781,6 +1864,8 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps, fourcc = V4L2_PIX_FMT_H264_NO_SC; else fourcc = V4L2_PIX_FMT_H264; + } else if (g_str_equal (mimetype, "video/x-h265")) { + fourcc = V4L2_PIX_FMT_HEVC; } else if (g_str_equal (mimetype, "video/x-vp8")) { fourcc = V4L2_PIX_FMT_VP8; } else if (g_str_equal (mimetype, "video/x-vp9")) { @@ -1880,8 +1965,6 @@ static gboolean gst_v4l2_object_get_interlace_mode (enum v4l2_field field, GstVideoInterlaceMode * interlace_mode) { - /* NB: If you add new return values, please fix mode_strings in - * gst_v4l2_object_add_interlace_mode */ switch (field) { case V4L2_FIELD_ANY: GST_ERROR @@ -1946,7 +2029,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, cinfo->transfer = GST_VIDEO_TRANSFER_SRGB; cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709; break; - case V4L2_COLORSPACE_ADOBERGB: + case V4L2_COLORSPACE_OPRGB: cinfo->range = GST_VIDEO_COLOR_RANGE_16_235; cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601; cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB; @@ -2032,7 +2115,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT709; break; case V4L2_YCBCR_ENC_BT2020_CONST_LUM: - GST_FIXME ("BT2020 with constant lumma is not defined, assuming BT2020"); + GST_FIXME ("BT2020 with constant luma is not defined, assuming BT2020"); /* fallthrough */ case V4L2_YCBCR_ENC_BT2020: cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT2020; @@ -2057,7 +2140,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, switch (transfer) { case V4L2_XFER_FUNC_709: - if (fmt->fmt.pix.height > 2160) + if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160) cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12; else cinfo->transfer = GST_VIDEO_TRANSFER_BT709; @@ -2065,7 +2148,7 @@ gst_v4l2_object_get_colorspace (struct v4l2_format *fmt, case V4L2_XFER_FUNC_SRGB: cinfo->transfer = GST_VIDEO_TRANSFER_SRGB; break; - case V4L2_XFER_FUNC_ADOBERGB: + case V4L2_XFER_FUNC_OPRGB: cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB; break; case V4L2_XFER_FUNC_SMPTE240M: @@ -2113,7 +2196,7 @@ gst_v4l2_object_try_fmt (GstV4l2Object * v4l2object, error: memcpy (try_fmt, &fmt, sizeof (fmt)); - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unable to try format: %s", g_strerror (errno)); return r; } @@ -2125,13 +2208,10 @@ gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object, { struct v4l2_format fmt; GValue interlace_formats = { 0, }; + enum v4l2_field formats[] = { V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED }; + gsize i; GstVideoInterlaceMode interlace_mode, prev = -1; - const gchar *mode_strings[] = { "progressive", - "interleaved", - "mixed" - }; - if (!g_str_equal (gst_structure_get_name (s), "video/x-raw")) return; @@ -2143,36 +2223,26 @@ gst_v4l2_object_add_interlace_mode (GstV4l2Object * v4l2object, g_value_init (&interlace_formats, GST_TYPE_LIST); /* Try twice - once for NONE, once for INTERLACED. */ - memset (&fmt, 0, sizeof (fmt)); - fmt.type = v4l2object->type; - fmt.fmt.pix.width = width; - fmt.fmt.pix.height = height; - fmt.fmt.pix.pixelformat = pixelformat; - fmt.fmt.pix.field = V4L2_FIELD_NONE; - - if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 && - gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) { - GValue interlace_enum = { 0, }; - g_value_init (&interlace_enum, G_TYPE_STRING); - g_value_set_string (&interlace_enum, mode_strings[interlace_mode]); - gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum); - prev = interlace_mode; - } - - memset (&fmt, 0, sizeof (fmt)); - fmt.type = v4l2object->type; - fmt.fmt.pix.width = width; - fmt.fmt.pix.height = height; - fmt.fmt.pix.pixelformat = pixelformat; - fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; - - if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 && - gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) && - prev != interlace_mode) { - GValue interlace_enum = { 0, }; - g_value_init (&interlace_enum, G_TYPE_STRING); - g_value_set_string (&interlace_enum, mode_strings[interlace_mode]); - gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum); + for (i = 0; i < G_N_ELEMENTS (formats); i++) { + memset (&fmt, 0, sizeof (fmt)); + fmt.type = v4l2object->type; + fmt.fmt.pix.width = width; + fmt.fmt.pix.height = height; + fmt.fmt.pix.pixelformat = pixelformat; + fmt.fmt.pix.field = formats[i]; + + if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 && + gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) + && prev != interlace_mode) { + GValue interlace_enum = { 0, }; + const gchar *mode_string; + g_value_init (&interlace_enum, G_TYPE_STRING); + mode_string = gst_video_interlace_mode_to_string (interlace_mode); + g_value_set_string (&interlace_enum, mode_string); + gst_value_list_append_and_take_value (&interlace_formats, + &interlace_enum); + prev = interlace_mode; + } } if (gst_v4l2src_value_simplify (&interlace_formats) @@ -2294,7 +2364,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, ival.width = width; ival.height = height; - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "get frame interval for %ux%u, %" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat)); @@ -2319,7 +2389,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, denom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, "adding discrete framerate: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "adding discrete framerate: %d/%d", denom, num); /* swap to get the framerate */ @@ -2349,7 +2419,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, minnum >>= 1; mindenom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, "stepwise min frame interval: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise min frame interval: %d/%d", minnum, mindenom); gst_value_set_fraction (&min, minnum, mindenom); @@ -2361,7 +2431,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, maxdenom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, "stepwise max frame interval: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise max frame interval: %d/%d", maxnum, maxdenom); gst_value_set_fraction (&max, maxnum, maxdenom); @@ -2382,7 +2452,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, /* since we only have gst_value_fraction_subtract and not add, negate the * numerator */ - GST_LOG_OBJECT (v4l2object->element, "stepwise step frame interval: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "stepwise step frame interval: %d/%d", num, denom); gst_value_set_fraction (&step, -num, denom); @@ -2391,7 +2461,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, num = gst_value_get_fraction_numerator (&min); denom = gst_value_get_fraction_denominator (&min); - GST_LOG_OBJECT (v4l2object->element, "adding stepwise framerate: %d/%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "adding stepwise framerate: %d/%d", denom, num); /* invert to get the framerate */ @@ -2403,13 +2473,13 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, /* we're actually adding because step was negated above. This is because * there is no _add function... */ if (!gst_value_fraction_subtract (&min, &min, &step)) { - GST_WARNING_OBJECT (v4l2object->element, "could not step fraction!"); + GST_WARNING_OBJECT (v4l2object->dbg_obj, "could not step fraction!"); break; } } if (!added) { /* no range was added, leave the default range from the template */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "no range added, leaving default"); g_value_unset (&rates); } @@ -2432,7 +2502,7 @@ gst_v4l2_object_probe_caps_for_format_and_size (GstV4l2Object * v4l2object, maxdenom >>= 1; } - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "continuous frame interval %d/%d to %d/%d", maxdenom, maxnum, denom, num); @@ -2469,7 +2539,7 @@ return_data: /* ERRORS */ enum_frameintervals_failed: { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u", GST_FOURCC_ARGS (pixelformat), width, height); goto return_data; @@ -2477,7 +2547,7 @@ enum_frameintervals_failed: unknown_type: { /* I don't see how this is actually an error, we ignore the format then */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u", GST_FOURCC_ARGS (pixelformat), width, height, ival.type); return NULL; @@ -2562,7 +2632,7 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, size.index = 0; size.pixel_format = pixelformat; - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Enumerating frame sizes for %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); @@ -2571,7 +2641,7 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) { do { - GST_LOG_OBJECT (v4l2object->element, "got discrete frame size %dx%d", + GST_LOG_OBJECT (v4l2object->dbg_obj, "got discrete frame size %dx%d", size.discrete.width, size.discrete.height); w = MIN (size.discrete.width, G_MAXINT); @@ -2588,23 +2658,23 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, size.index++; } while (v4l2object->ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0); - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "done iterating discrete frame sizes"); } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) { guint32 maxw, maxh, step_w, step_h; - GST_DEBUG_OBJECT (v4l2object->element, "we have stepwise frame sizes:"); - GST_DEBUG_OBJECT (v4l2object->element, "min width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have stepwise frame sizes:"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d", size.stepwise.min_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.min_height); - GST_DEBUG_OBJECT (v4l2object->element, "max width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d", size.stepwise.max_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.max_height); - GST_DEBUG_OBJECT (v4l2object->element, "step width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d", size.stepwise.step_width); - GST_DEBUG_OBJECT (v4l2object->element, "step height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d", size.stepwise.step_height); w = MAX (size.stepwise.min_width, 1); @@ -2638,14 +2708,14 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) { guint32 maxw, maxh; - GST_DEBUG_OBJECT (v4l2object->element, "we have continuous frame sizes:"); - GST_DEBUG_OBJECT (v4l2object->element, "min width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "we have continuous frame sizes:"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min width: %d", size.stepwise.min_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.min_height); - GST_DEBUG_OBJECT (v4l2object->element, "max width: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d", size.stepwise.max_width); - GST_DEBUG_OBJECT (v4l2object->element, "min height: %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d", size.stepwise.max_height); w = MAX (size.stepwise.min_width, 1); @@ -2689,7 +2759,7 @@ gst_v4l2_object_probe_caps_for_format (GstV4l2Object * v4l2object, enum_framesizes_failed: { /* I don't see how this is actually an error */ - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno)); goto default_frame_sizes; @@ -2698,7 +2768,7 @@ enum_framesizes_no_results: { /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in * question doesn't actually support it yet */ - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "No results for pixelformat %" GST_FOURCC_FORMAT " enumerating frame sizes, trying fallback", GST_FOURCC_ARGS (pixelformat)); @@ -2706,7 +2776,7 @@ enum_framesizes_no_results: } unknown_type: { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT ": %u", GST_FOURCC_ARGS (pixelformat), size.type); goto default_frame_sizes; @@ -2721,13 +2791,13 @@ default_frame_sizes: max_w = max_h = GST_V4L2_MAX_SIZE; if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &min_w, &min_h)) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Could not probe minimum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); } if (!gst_v4l2_object_get_nearest_size (v4l2object, pixelformat, &max_w, &max_h)) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Could not probe maximum capture size for pixelformat %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); } @@ -2800,7 +2870,7 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, g_return_val_if_fail (width != NULL, FALSE); g_return_val_if_fail (height != NULL, FALSE); - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT, *width, *height, GST_FOURCC_ARGS (pixelformat)); @@ -2817,14 +2887,14 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, if (gst_v4l2_object_try_fmt (v4l2object, &fmt) < 0) goto error; - GST_LOG_OBJECT (v4l2object->element, + GST_LOG_OBJECT (v4l2object->dbg_obj, "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height); *width = fmt.fmt.pix.width; *height = fmt.fmt.pix.height; if (!gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unsupported field type for %" GST_FOURCC_FORMAT "@%ux%u: %u", GST_FOURCC_ARGS (pixelformat), *width, *height, fmt.fmt.pix.field); goto error; @@ -2834,7 +2904,7 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object, error: if (!ret) { - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Unable to try format: %s", g_strerror (errno)); } @@ -2842,11 +2912,36 @@ error: } static gboolean +gst_v4l2_object_is_dmabuf_supported (GstV4l2Object * v4l2object) +{ + gboolean ret = TRUE; + struct v4l2_exportbuffer expbuf = { + .type = v4l2object->type, + .index = -1, + .plane = -1, + .flags = O_CLOEXEC | O_RDWR, + }; + + if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) { + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "libv4l2 converter detected, disabling DMABuf"); + ret = FALSE; + } + + /* Expected to fail, but ENOTTY tells us that it is not implemented. */ + v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf); + if (errno == ENOTTY) + ret = FALSE; + + return ret; +} + +static gboolean gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps) { GstV4l2IOMode mode; - GST_DEBUG_OBJECT (v4l2object->element, "initializing the %s system", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "initializing the %s system", V4L2_TYPE_IS_OUTPUT (v4l2object->type) ? "output" : "capture"); GST_V4L2_CHECK_OPEN (v4l2object); @@ -2862,16 +2957,23 @@ gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps) goto method_not_supported; if (v4l2object->device_caps & V4L2_CAP_STREAMING) { - if (v4l2object->req_mode == GST_V4L2_IO_AUTO) - mode = GST_V4L2_IO_MMAP; - } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP) + if (v4l2object->req_mode == GST_V4L2_IO_AUTO) { + if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) && + gst_v4l2_object_is_dmabuf_supported (v4l2object)) { + mode = GST_V4L2_IO_DMABUF; + } else { + mode = GST_V4L2_IO_MMAP; + } + } + } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP || + v4l2object->req_mode == GST_V4L2_IO_DMABUF) goto method_not_supported; /* if still no transport selected, error out */ if (mode == GST_V4L2_IO_AUTO) goto no_supported_capture_method; - GST_INFO_OBJECT (v4l2object->element, "accessing buffers via mode %d", mode); + GST_INFO_OBJECT (v4l2object->dbg_obj, "accessing buffers via mode %d", mode); v4l2object->mode = mode; /* If min_buffers is not set, the driver either does not support the control or @@ -2880,7 +2982,7 @@ gst_v4l2_object_setup_pool (GstV4l2Object * v4l2object, GstCaps * caps) gst_v4l2_get_driver_min_buffers (v4l2object); /* Map the buffers */ - GST_LOG_OBJECT (v4l2object->element, "initiating buffer pool"); + GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool"); if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps))) goto buffer_pool_new_failed; @@ -2961,7 +3063,7 @@ gst_v4l2_object_extrapolate_info (GstV4l2Object * v4l2object, offs += estride * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, padded_height); - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Extrapolated for plane %d with base stride %d: " "stride %d, offset %" G_GSIZE_FORMAT, i, stride, info->stride[i], info->offset[i]); @@ -2982,7 +3084,7 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, { const GstVideoFormatInfo *finfo = info->finfo; gboolean standard_stride = TRUE; - gint stride, padded_width, padded_height, i; + gint stride, pstride, padded_width, padded_height, i; if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED) { v4l2object->n_v4l2_planes = 1; @@ -2996,11 +3098,21 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, else stride = format->fmt.pix.bytesperline; - padded_width = stride / GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0); + pstride = GST_VIDEO_FORMAT_INFO_PSTRIDE (finfo, 0); + if (pstride) { + padded_width = stride / pstride; + } else { + /* pstride can be 0 for complex formats */ + GST_WARNING_OBJECT (v4l2object->element, + "format %s has a pstride of 0, cannot compute padded with", + gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info))); + padded_width = stride; + } if (padded_width < format->fmt.pix.width) - GST_WARNING_OBJECT (v4l2object->element, - "Driver bug detected, stride is too small for the width"); + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Driver bug detected, stride (%d) is too small for the width (%d)", + padded_width, format->fmt.pix.width); align->padding_right = padded_width - info->width - align->padding_left; @@ -3056,7 +3168,7 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, /* adjust the offset to take into account left and top */ if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) { if ((align->padding_left + align->padding_top) > 0) - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Left and top padding is not permitted for tiled formats"); } else { for (i = 0; i < finfo->n_planes; i++) { @@ -3074,7 +3186,7 @@ gst_v4l2_object_save_format (GstV4l2Object * v4l2object, } store_info: - GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %" G_GSIZE_FORMAT, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got sizeimage %" G_GSIZE_FORMAT, info->size); /* to avoid copies we need video meta if there is padding */ @@ -3129,6 +3241,34 @@ gst_v4l2_object_extrapolate_stride (const GstVideoFormatInfo * finfo, } static gboolean +gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo, + const gchar * color) +{ + GstVideoColorimetry ci; + static const GstVideoColorimetry ci_likely_jpeg = { + GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601, + GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN + }; + static const GstVideoColorimetry ci_jpeg = { + GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601, + GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709 + }; + + if (!gst_video_colorimetry_from_string (&ci, color)) + return FALSE; + + if (gst_video_colorimetry_is_equal (&ci, cinfo)) + return TRUE; + + /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */ + if (gst_video_colorimetry_is_equal (&ci, &ci_likely_jpeg) + && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg)) + return TRUE; + + return FALSE; +} + +static gboolean gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, gboolean try_only, GstV4l2Error * error) { @@ -3149,6 +3289,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, enum v4l2_ycbcr_encoding matrix = 0; enum v4l2_xfer_func transfer = 0; GstStructure *s; + gboolean disable_colorimetry = FALSE; g_return_val_if_fail (!v4l2object->skip_try_fmt_probes || gst_caps_is_writable (caps), FALSE); @@ -3178,145 +3319,155 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, n_v4l_planes = 1; if (GST_VIDEO_INFO_IS_INTERLACED (&info)) { - GST_DEBUG_OBJECT (v4l2object->element, "interlaced video"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "interlaced video"); /* ideally we would differentiate between types of interlaced video * but there is not sufficient information in the caps.. */ field = V4L2_FIELD_INTERLACED; } else { - GST_DEBUG_OBJECT (v4l2object->element, "progressive video"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "progressive video"); field = V4L2_FIELD_NONE; } - if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) { - /* We first pick th main colorspace from the primaries */ - switch (info.colorimetry.primaries) { - case GST_VIDEO_COLOR_PRIMARIES_BT709: - /* There is two colorspaces using these primaries, use the range to - * differentiate */ - if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235) - colorspace = V4L2_COLORSPACE_REC709; - else - colorspace = V4L2_COLORSPACE_SRGB; - break; - case GST_VIDEO_COLOR_PRIMARIES_BT470M: - colorspace = V4L2_COLORSPACE_470_SYSTEM_M; - break; - case GST_VIDEO_COLOR_PRIMARIES_BT470BG: - colorspace = V4L2_COLORSPACE_470_SYSTEM_BG; - break; - case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: - colorspace = V4L2_COLORSPACE_SMPTE170M; - break; - case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: - colorspace = V4L2_COLORSPACE_SMPTE240M; - break; + /* We first pick the main colorspace from the primaries */ + switch (info.colorimetry.primaries) { + case GST_VIDEO_COLOR_PRIMARIES_BT709: + /* There is two colorspaces using these primaries, use the range to + * differentiate */ + if (info.colorimetry.range == GST_VIDEO_COLOR_RANGE_16_235) + colorspace = V4L2_COLORSPACE_REC709; + else + colorspace = V4L2_COLORSPACE_SRGB; + break; + case GST_VIDEO_COLOR_PRIMARIES_BT2020: + colorspace = V4L2_COLORSPACE_BT2020; + break; + case GST_VIDEO_COLOR_PRIMARIES_BT470M: + colorspace = V4L2_COLORSPACE_470_SYSTEM_M; + break; + case GST_VIDEO_COLOR_PRIMARIES_BT470BG: + colorspace = V4L2_COLORSPACE_470_SYSTEM_BG; + break; + case GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: + colorspace = V4L2_COLORSPACE_SMPTE170M; + break; + case GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: + colorspace = V4L2_COLORSPACE_SMPTE240M; + break; - case GST_VIDEO_COLOR_PRIMARIES_FILM: - case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: - /* We don't know, we will guess */ - break; + case GST_VIDEO_COLOR_PRIMARIES_FILM: + case GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: + /* We don't know, we will guess */ + break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry primaries %d", info.colorimetry.primaries); - break; - } + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry primaries %d", info.colorimetry.primaries); + break; + } - switch (info.colorimetry.range) { - case GST_VIDEO_COLOR_RANGE_0_255: - range = V4L2_QUANTIZATION_FULL_RANGE; - break; - case GST_VIDEO_COLOR_RANGE_16_235: - range = V4L2_QUANTIZATION_LIM_RANGE; - break; - case GST_VIDEO_COLOR_RANGE_UNKNOWN: - /* We let the driver pick a default one */ - break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry range %d", info.colorimetry.range); - break; - } + switch (info.colorimetry.range) { + case GST_VIDEO_COLOR_RANGE_0_255: + range = V4L2_QUANTIZATION_FULL_RANGE; + break; + case GST_VIDEO_COLOR_RANGE_16_235: + range = V4L2_QUANTIZATION_LIM_RANGE; + break; + case GST_VIDEO_COLOR_RANGE_UNKNOWN: + /* We let the driver pick a default one */ + break; + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry range %d", info.colorimetry.range); + break; + } - switch (info.colorimetry.matrix) { - case GST_VIDEO_COLOR_MATRIX_RGB: - /* Unspecified, leave to default */ - break; - /* FCC is about the same as BT601 with less digit */ - case GST_VIDEO_COLOR_MATRIX_FCC: - case GST_VIDEO_COLOR_MATRIX_BT601: - matrix = V4L2_YCBCR_ENC_601; - break; - case GST_VIDEO_COLOR_MATRIX_BT709: - matrix = V4L2_YCBCR_ENC_709; - break; - case GST_VIDEO_COLOR_MATRIX_SMPTE240M: - matrix = V4L2_YCBCR_ENC_SMPTE240M; - break; - case GST_VIDEO_COLOR_MATRIX_BT2020: - matrix = V4L2_YCBCR_ENC_BT2020; - break; - case GST_VIDEO_COLOR_MATRIX_UNKNOWN: - /* We let the driver pick a default one */ - break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry matrix %d", info.colorimetry.matrix); - break; - } + switch (info.colorimetry.matrix) { + case GST_VIDEO_COLOR_MATRIX_RGB: + /* Unspecified, leave to default */ + break; + /* FCC is about the same as BT601 with less digit */ + case GST_VIDEO_COLOR_MATRIX_FCC: + case GST_VIDEO_COLOR_MATRIX_BT601: + matrix = V4L2_YCBCR_ENC_601; + break; + case GST_VIDEO_COLOR_MATRIX_BT709: + matrix = V4L2_YCBCR_ENC_709; + break; + case GST_VIDEO_COLOR_MATRIX_SMPTE240M: + matrix = V4L2_YCBCR_ENC_SMPTE240M; + break; + case GST_VIDEO_COLOR_MATRIX_BT2020: + matrix = V4L2_YCBCR_ENC_BT2020; + break; + case GST_VIDEO_COLOR_MATRIX_UNKNOWN: + /* We let the driver pick a default one */ + break; + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry matrix %d", info.colorimetry.matrix); + break; + } - switch (info.colorimetry.transfer) { - case GST_VIDEO_TRANSFER_GAMMA18: - case GST_VIDEO_TRANSFER_GAMMA20: - case GST_VIDEO_TRANSFER_GAMMA22: - case GST_VIDEO_TRANSFER_GAMMA28: - GST_WARNING_OBJECT (v4l2object->element, - "GAMMA 18, 20, 22, 28 transfer functions not supported"); - /* fallthrough */ - case GST_VIDEO_TRANSFER_GAMMA10: - transfer = V4L2_XFER_FUNC_NONE; - break; - case GST_VIDEO_TRANSFER_BT709: - transfer = V4L2_XFER_FUNC_709; - break; - case GST_VIDEO_TRANSFER_SMPTE240M: - transfer = V4L2_XFER_FUNC_SMPTE240M; - break; - case GST_VIDEO_TRANSFER_SRGB: - transfer = V4L2_XFER_FUNC_SRGB; - break; - case GST_VIDEO_TRANSFER_LOG100: - case GST_VIDEO_TRANSFER_LOG316: - GST_WARNING_OBJECT (v4l2object->element, - "LOG 100, 316 transfer functions not supported"); - /* FIXME No known sensible default, maybe AdobeRGB ? */ - break; - case GST_VIDEO_TRANSFER_UNKNOWN: - /* We let the driver pick a default one */ - break; - default: - GST_WARNING_OBJECT (v4l2object->element, - "Unknown colorimetry tranfer %d", info.colorimetry.transfer); - break; - } + switch (info.colorimetry.transfer) { + case GST_VIDEO_TRANSFER_GAMMA18: + case GST_VIDEO_TRANSFER_GAMMA20: + case GST_VIDEO_TRANSFER_GAMMA22: + case GST_VIDEO_TRANSFER_GAMMA28: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "GAMMA 18, 20, 22, 28 transfer functions not supported"); + /* fallthrough */ + case GST_VIDEO_TRANSFER_GAMMA10: + transfer = V4L2_XFER_FUNC_NONE; + break; + case GST_VIDEO_TRANSFER_BT2020_12: + case GST_VIDEO_TRANSFER_BT709: + transfer = V4L2_XFER_FUNC_709; + break; + case GST_VIDEO_TRANSFER_SMPTE240M: + transfer = V4L2_XFER_FUNC_SMPTE240M; + break; + case GST_VIDEO_TRANSFER_SRGB: + transfer = V4L2_XFER_FUNC_SRGB; + break; + case GST_VIDEO_TRANSFER_LOG100: + case GST_VIDEO_TRANSFER_LOG316: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "LOG 100, 316 transfer functions not supported"); + /* FIXME No known sensible default, maybe AdobeRGB ? */ + break; + case GST_VIDEO_TRANSFER_UNKNOWN: + /* We let the driver pick a default one */ + break; + default: + GST_WARNING_OBJECT (v4l2object->dbg_obj, + "Unknown colorimetry tranfer %d", info.colorimetry.transfer); + break; + } - if (colorspace == 0) { - /* Try to guess colorspace according to pixelformat and size */ - if (GST_VIDEO_INFO_IS_YUV (&info)) { + if (colorspace == 0) { + /* Try to guess colorspace according to pixelformat and size */ + if (GST_VIDEO_INFO_IS_YUV (&info)) { + if (range == V4L2_QUANTIZATION_FULL_RANGE + && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) { + /* Full range BT.601 YCbCr encoding with unknown primaries and transfer + * function most likely is JPEG */ + colorspace = V4L2_COLORSPACE_JPEG; + transfer = V4L2_XFER_FUNC_SRGB; + } else { /* SD streams likely use SMPTE170M and HD streams REC709 */ if (width <= 720 && height <= 576) colorspace = V4L2_COLORSPACE_SMPTE170M; else colorspace = V4L2_COLORSPACE_REC709; - } else if (GST_VIDEO_INFO_IS_RGB (&info)) { - colorspace = V4L2_COLORSPACE_SRGB; - transfer = V4L2_XFER_FUNC_NONE; } + } else if (GST_VIDEO_INFO_IS_RGB (&info)) { + colorspace = V4L2_COLORSPACE_SRGB; + transfer = V4L2_XFER_FUNC_NONE; } } - GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format " + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format %dx%d, format " "%" GST_FOURCC_FORMAT " stride: %d", width, height, GST_FOURCC_ARGS (pixelformat), GST_VIDEO_INFO_PLANE_STRIDE (&info, 0)); @@ -3349,6 +3500,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0); format.type = v4l2object->type; + format.fmt.pix.width = width; format.fmt.pix.height = height; format.fmt.pix.pixelformat = pixelformat; @@ -3365,7 +3517,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, format.fmt.pix.sizeimage = ENCODED_BUFFER_SIZE; } - GST_DEBUG_OBJECT (v4l2object->element, "Desired format is %dx%d, format " + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired format is %dx%d, format " "%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width, format.fmt.pix_mp.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat), @@ -3374,31 +3526,30 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, #ifndef GST_DISABLE_GST_DEBUG if (is_mplane) { for (i = 0; i < format.fmt.pix_mp.num_planes; i++) - GST_DEBUG_OBJECT (v4l2object->element, " stride %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d", format.fmt.pix_mp.plane_fmt[i].bytesperline); } else { - GST_DEBUG_OBJECT (v4l2object->element, " stride %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d", format.fmt.pix.bytesperline); } #endif - if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) { - if (is_mplane) { - format.fmt.pix_mp.colorspace = colorspace; - format.fmt.pix_mp.quantization = range; - format.fmt.pix_mp.ycbcr_enc = matrix; - format.fmt.pix_mp.xfer_func = transfer; - } else { - format.fmt.pix.colorspace = colorspace; - format.fmt.pix.quantization = range; - format.fmt.pix.ycbcr_enc = matrix; - format.fmt.pix.xfer_func = transfer; - } - - GST_DEBUG_OBJECT (v4l2object->element, "Desired colorspace is %d:%d:%d:%d", - colorspace, range, matrix, transfer); + if (is_mplane) { + format.fmt.pix_mp.colorspace = colorspace; + format.fmt.pix_mp.quantization = range; + format.fmt.pix_mp.ycbcr_enc = matrix; + format.fmt.pix_mp.xfer_func = transfer; + } else { + format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC; + format.fmt.pix.colorspace = colorspace; + format.fmt.pix.quantization = range; + format.fmt.pix.ycbcr_enc = matrix; + format.fmt.pix.xfer_func = transfer; } + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d", + colorspace, range, matrix, transfer); + if (try_only) { if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0) goto try_fmt_failed; @@ -3407,21 +3558,33 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, goto set_fmt_failed; } - GST_DEBUG_OBJECT (v4l2object->element, "Got format of %dx%d, format " - "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d", + if (is_mplane) { + colorspace = format.fmt.pix_mp.colorspace; + range = format.fmt.pix_mp.quantization; + matrix = format.fmt.pix_mp.ycbcr_enc; + transfer = format.fmt.pix_mp.xfer_func; + } else { + colorspace = format.fmt.pix.colorspace; + range = format.fmt.pix.quantization; + matrix = format.fmt.pix.ycbcr_enc; + transfer = format.fmt.pix.xfer_func; + } + + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format " + "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d", format.fmt.pix.width, format.fmt.pix_mp.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat), is_mplane ? format.fmt.pix_mp.num_planes : 1, - is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace); + colorspace, range, matrix, transfer); #ifndef GST_DISABLE_GST_DEBUG if (is_mplane) { for (i = 0; i < format.fmt.pix_mp.num_planes; i++) - GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d", format.fmt.pix_mp.plane_fmt[i].bytesperline, format.fmt.pix_mp.plane_fmt[i].sizeimage); } else { - GST_DEBUG_OBJECT (v4l2object->element, " stride %d, sizeimage %d", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, " stride %d, sizeimage %d", format.fmt.pix.bytesperline, format.fmt.pix.sizeimage); } #endif @@ -3448,25 +3611,35 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes) goto invalid_planes; - if ((is_mplane && format.fmt.pix_mp.field != field) - || format.fmt.pix.field != field) - goto invalid_field; + /* used to check colorimetry and interlace mode fields presence */ + s = gst_caps_get_structure (caps, 0); - gst_v4l2_object_get_colorspace (&format, &info.colorimetry); + if (!gst_v4l2_object_get_interlace_mode (format.fmt.pix.field, + &info.interlace_mode)) + goto invalid_field; + if (gst_structure_has_field (s, "interlace-mode")) { + if (format.fmt.pix.field != field) + goto invalid_field; + } - s = gst_caps_get_structure (caps, 0); - if (gst_structure_has_field (s, "colorimetry")) { - GstVideoColorimetry ci; - if (!gst_video_colorimetry_from_string (&ci, - gst_structure_get_string (s, "colorimetry")) - || !gst_video_colorimetry_is_equal (&ci, &info.colorimetry)) - goto invalid_colorimetry; + if (gst_v4l2_object_get_colorspace (&format, &info.colorimetry)) { + if (gst_structure_has_field (s, "colorimetry")) { + if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry, + gst_structure_get_string (s, "colorimetry"))) + goto invalid_colorimetry; + } + } else { + /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from + * the TRY_FMT */ + disable_colorimetry = TRUE; + if (gst_structure_has_field (s, "colorimetry")) + gst_structure_remove_field (s, "colorimetry"); } /* In case we have skipped the try_fmt probes, we'll need to set the * colorimetry and interlace-mode back into the caps. */ if (v4l2object->skip_try_fmt_probes) { - if (!gst_structure_has_field (s, "colorimetry")) { + if (!disable_colorimetry && !gst_structure_has_field (s, "colorimetry")) { gchar *str = gst_video_colorimetry_to_string (&info.colorimetry); gst_structure_set (s, "colorimetry", G_TYPE_STRING, str, NULL); g_free (str); @@ -3486,12 +3659,12 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, ctl.value = 0xff; if (v4l2object->ioctl (fd, VIDIOC_S_CTRL, &ctl) < 0) - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Failed to set alpha component value"); } /* Is there a reason we require the caller to always specify a framerate? */ - GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired framerate: %u/%u", fps_n, fps_d); memset (&streamparm, 0x00, sizeof (struct v4l2_streamparm)); @@ -3507,7 +3680,7 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator; - GST_DEBUG_OBJECT (v4l2object->element, "Got capture framerate: %u/%u", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got capture framerate: %u/%u", streamparm.parm.capture.timeperframe.denominator, streamparm.parm.capture.timeperframe.numerator); @@ -3516,12 +3689,12 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, * causing them to not output data (several models of Thinkpad cameras * have this problem at least). * So, don't skip. */ - GST_LOG_OBJECT (v4l2object->element, "Setting capture framerate to %u/%u", + GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting capture framerate to %u/%u", fps_n, fps_d); /* We want to change the frame rate, so check whether we can. Some cheap USB * cameras don't have the capability */ if ((streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Not setting capture framerate (not supported)"); goto done; } @@ -3540,11 +3713,11 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, fps_d = streamparm.parm.capture.timeperframe.numerator; fps_n = streamparm.parm.capture.timeperframe.denominator; - GST_INFO_OBJECT (v4l2object->element, "Set capture framerate to %u/%u", + GST_INFO_OBJECT (v4l2object->dbg_obj, "Set capture framerate to %u/%u", fps_n, fps_d); } else { /* fix v4l2 capture driver to provide framerate values */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Reuse caps framerate %u/%u - fix v4l2 capture driver", fps_n, fps_d); } @@ -3557,14 +3730,14 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.output.timeperframe.numerator; - GST_DEBUG_OBJECT (v4l2object->element, "Got output framerate: %u/%u", + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got output framerate: %u/%u", streamparm.parm.output.timeperframe.denominator, streamparm.parm.output.timeperframe.numerator); - GST_LOG_OBJECT (v4l2object->element, "Setting output framerate to %u/%u", + GST_LOG_OBJECT (v4l2object->dbg_obj, "Setting output framerate to %u/%u", fps_n, fps_d); if ((streamparm.parm.output.capability & V4L2_CAP_TIMEPERFRAME) == 0) { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Not setting output framerate (not supported)"); goto done; } @@ -3582,11 +3755,11 @@ gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps, fps_d = streamparm.parm.output.timeperframe.numerator; fps_n = streamparm.parm.output.timeperframe.denominator; - GST_INFO_OBJECT (v4l2object->element, "Set output framerate to %u/%u", + GST_INFO_OBJECT (v4l2object->dbg_obj, "Set output framerate to %u/%u", fps_n, fps_d); } else { /* fix v4l2 output driver to provide framerate values */ - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Reuse caps framerate %u/%u - fix v4l2 output driver", fps_n, fps_d); } @@ -3607,7 +3780,7 @@ done: /* ERRORS */ invalid_caps: { - GST_DEBUG_OBJECT (v4l2object->element, "can't parse caps %" GST_PTR_FORMAT, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "can't parse caps %" GST_PTR_FORMAT, caps); return FALSE; } @@ -3740,7 +3913,7 @@ gboolean gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error) { - GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %" GST_PTR_FORMAT, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Setting format to %" GST_PTR_FORMAT, caps); return gst_v4l2_object_set_format_full (v4l2object, caps, FALSE, error); } @@ -3749,7 +3922,7 @@ gboolean gst_v4l2_object_try_format (GstV4l2Object * v4l2object, GstCaps * caps, GstV4l2Error * error) { - GST_DEBUG_OBJECT (v4l2object->element, "Trying format %" GST_PTR_FORMAT, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Trying format %" GST_PTR_FORMAT, caps); return gst_v4l2_object_set_format_full (v4l2object, caps, TRUE, error); } @@ -3882,35 +4055,52 @@ unsupported_format: gboolean gst_v4l2_object_set_crop (GstV4l2Object * obj) { + struct v4l2_selection sel = { 0 }; struct v4l2_crop crop = { 0 }; + sel.type = obj->type; + sel.target = V4L2_SEL_TGT_CROP; + sel.flags = 0; + sel.r.left = obj->align.padding_left; + sel.r.top = obj->align.padding_top; + sel.r.width = obj->info.width; + sel.r.height = obj->info.height; + crop.type = obj->type; - crop.c.left = obj->align.padding_left; - crop.c.top = obj->align.padding_top; - crop.c.width = obj->info.width; - crop.c.height = obj->info.height; + crop.c = sel.r; if (obj->align.padding_left + obj->align.padding_top + obj->align.padding_right + obj->align.padding_bottom == 0) { - GST_DEBUG_OBJECT (obj->element, "no cropping needed"); + GST_DEBUG_OBJECT (obj->dbg_obj, "no cropping needed"); return TRUE; } - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top, crop.c.width, crop.c.height); - if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) { - GST_WARNING_OBJECT (obj->element, "VIDIOC_S_CROP failed"); - return FALSE; - } + if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) { + if (errno != ENOTTY) { + GST_WARNING_OBJECT (obj->dbg_obj, + "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s", + g_strerror (errno)); + return FALSE; + } else { + if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) { + GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed"); + return FALSE; + } - if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) { - GST_WARNING_OBJECT (obj->element, "VIDIOC_G_CROP failed"); - return FALSE; + if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) { + GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed"); + return FALSE; + } + + sel.r = crop.c; + } } - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "Got cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top, crop.c.width, crop.c.height); @@ -3938,11 +4128,51 @@ gst_v4l2_object_caps_equal (GstV4l2Object * v4l2object, GstCaps * caps) } gboolean +gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps) +{ + GstStructure *config; + GstCaps *oldcaps; + gboolean ret; + + if (!v4l2object->pool) + return FALSE; + + config = gst_buffer_pool_get_config (v4l2object->pool); + gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL); + + ret = oldcaps && gst_caps_is_subset (oldcaps, caps); + + gst_structure_free (config); + + return ret; +} + +GstCaps * +gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object) +{ + GstStructure *config; + GstCaps *oldcaps; + + if (!v4l2object->pool) + return NULL; + + config = gst_buffer_pool_get_config (v4l2object->pool); + gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL); + + if (oldcaps) + gst_caps_ref (oldcaps); + + gst_structure_free (config); + + return oldcaps; +} + +gboolean gst_v4l2_object_unlock (GstV4l2Object * v4l2object) { gboolean ret = TRUE; - GST_LOG_OBJECT (v4l2object->element, "start flushing"); + GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing"); if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool)) gst_buffer_pool_set_flushing (v4l2object->pool, TRUE); @@ -3955,7 +4185,7 @@ gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object) { gboolean ret = TRUE; - GST_LOG_OBJECT (v4l2object->element, "stop flushing"); + GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing"); if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool)) gst_buffer_pool_set_flushing (v4l2object->pool, FALSE); @@ -3966,7 +4196,7 @@ gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object) gboolean gst_v4l2_object_stop (GstV4l2Object * v4l2object) { - GST_DEBUG_OBJECT (v4l2object->element, "stopping"); + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping"); if (!GST_V4L2_IS_OPEN (v4l2object)) goto done; @@ -3974,9 +4204,11 @@ gst_v4l2_object_stop (GstV4l2Object * v4l2object) goto done; if (v4l2object->pool) { - GST_DEBUG_OBJECT (v4l2object->element, "deactivating pool"); - gst_buffer_pool_set_active (v4l2object->pool, FALSE); - gst_object_unref (v4l2object->pool); + if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) { + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool"); + gst_buffer_pool_set_active (v4l2object->pool, FALSE); + gst_object_unref (v4l2object->pool); + } v4l2object->pool = NULL; } @@ -4005,10 +4237,10 @@ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter) cropcap.type = v4l2object->type; if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_CROPCAP, &cropcap) < 0) { if (errno != ENOTTY) - GST_WARNING_OBJECT (v4l2object->element, + GST_WARNING_OBJECT (v4l2object->dbg_obj, "Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s", g_strerror (errno)); - } else { + } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) { v4l2object->par = g_new0 (GValue, 1); g_value_init (v4l2object->par, GST_TYPE_FRACTION); gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator, @@ -4026,7 +4258,7 @@ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter) template = gst_v4l2_object_v4l2fourcc_to_bare_struct (format->pixelformat); if (!template) { - GST_DEBUG_OBJECT (v4l2object->element, + GST_DEBUG_OBJECT (v4l2object->dbg_obj, "unknown format %" GST_FOURCC_FORMAT, GST_FOURCC_ARGS (format->pixelformat)); continue; @@ -4052,6 +4284,25 @@ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter) if (tmp) gst_caps_append (ret, tmp); +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + if (format->pixelformat == V4L2_PIX_FMT_NV12 || + format->pixelformat == V4L2_PIX_FMT_YUV420) { + GstStructure *alt_s = gst_structure_copy (template); + + if (format->pixelformat == V4L2_PIX_FMT_NV12) + gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL); + else + gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL); + + tmp = gst_v4l2_object_probe_caps_for_format (v4l2object, + format->pixelformat, alt_s); + + if (tmp) + gst_caps_append (ret, tmp); + + gst_structure_free (alt_s); + } +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ gst_structure_free (template); } @@ -4063,7 +4314,7 @@ gst_v4l2_object_probe_caps (GstV4l2Object * v4l2object, GstCaps * filter) gst_caps_unref (tmp); } - GST_INFO_OBJECT (v4l2object->element, "probed caps: %" GST_PTR_FORMAT, ret); + GST_INFO_OBJECT (v4l2object->dbg_obj, "probed caps: %" GST_PTR_FORMAT, ret); return ret; } @@ -4099,7 +4350,12 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) GstAllocator *allocator = NULL; GstAllocationParams params = { 0 }; - GST_DEBUG_OBJECT (obj->element, "decide allocation"); +#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER + GST_INFO_OBJECT (obj->dbg_obj, "decide allocation - %s", + V4L2_TYPE_IS_OUTPUT (obj->type) ? "output" : "capture"); +#else /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ + GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation"); +#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */ g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE); @@ -4124,7 +4380,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) update = FALSE; } - GST_DEBUG_OBJECT (obj->element, "allocation: size:%u min:%u max:%u pool:%" + GST_DEBUG_OBJECT (obj->dbg_obj, "allocation: size:%u min:%u max:%u pool:%" GST_PTR_FORMAT, size, min, max, pool); has_video_meta = @@ -4143,7 +4399,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) if (pool) { /* in READ/WRITE mode, prefer a downstream pool because our own pool * doesn't help much, we have to write to it as well */ - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "read/write mode: using downstream pool"); /* use the bigest size, when we use our own pool we can't really do any * other size than what the hardware gives us but for downstream pools @@ -4151,7 +4407,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) size = MAX (size, obj->info.size); } else if (can_share_own_pool) { /* no downstream pool, use our own then */ - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "read/write mode: no downstream pool, using our own"); pool = gst_object_ref (obj->pool); size = obj->info.size; @@ -4182,22 +4438,22 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) gst_object_unref (pool); pool = gst_object_ref (obj->pool); size = obj->info.size; - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "streaming mode: using our own pool %" GST_PTR_FORMAT, pool); pushing_from_our_pool = TRUE; } else if (pool) { - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "streaming mode: copying to downstream pool %" GST_PTR_FORMAT, pool); } else { - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "streaming mode: no usable pool, copying to generic pool"); size = MAX (size, obj->info.size); } break; case GST_V4L2_IO_AUTO: default: - GST_WARNING_OBJECT (obj->element, "unhandled mode"); + GST_WARNING_OBJECT (obj->dbg_obj, "unhandled mode"); break; } @@ -4250,7 +4506,7 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) config = gst_buffer_pool_get_config (obj->pool); if (obj->need_video_meta || has_video_meta) { - GST_DEBUG_OBJECT (obj->element, "activate Video Meta"); + GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta"); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); } @@ -4258,14 +4514,14 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); gst_buffer_pool_config_set_params (config, caps, size, own_min, 0); - GST_DEBUG_OBJECT (obj->element, "setting own pool config to %" + GST_DEBUG_OBJECT (obj->dbg_obj, "setting own pool config to %" GST_PTR_FORMAT, config); /* Our pool often need to adjust the value */ if (!gst_buffer_pool_set_config (obj->pool, config)) { config = gst_buffer_pool_get_config (obj->pool); - GST_DEBUG_OBJECT (obj->element, "own pool config changed to %" + GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %" GST_PTR_FORMAT, config); /* our pool will adjust the maximum buffer, which we are fine with */ @@ -4282,12 +4538,12 @@ gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query) gst_buffer_pool_config_set_allocator (config, allocator, ¶ms); gst_buffer_pool_config_set_params (config, caps, size, min, max); - GST_DEBUG_OBJECT (obj->element, "setting other pool config to %" + GST_DEBUG_OBJECT (obj->dbg_obj, "setting other pool config to %" GST_PTR_FORMAT, config); /* if downstream supports video metadata, add this to the pool config */ if (has_video_meta) { - GST_DEBUG_OBJECT (obj->element, "activate Video Meta"); + GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta"); gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META); } @@ -4381,8 +4637,16 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query) if (caps == NULL) goto no_caps; - if ((pool = obj->pool)) - gst_object_ref (pool); + switch (obj->mode) { + case GST_V4L2_IO_MMAP: + case GST_V4L2_IO_DMABUF: + if ((pool = obj->pool)) + gst_object_ref (pool); + break; + default: + pool = NULL; + break; + } if (pool != NULL) { GstCaps *pcaps; @@ -4392,7 +4656,7 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query) config = gst_buffer_pool_get_config (pool); gst_buffer_pool_config_get_params (config, &pcaps, NULL, NULL, NULL); - GST_DEBUG_OBJECT (obj->element, + GST_DEBUG_OBJECT (obj->dbg_obj, "we had a pool with caps %" GST_PTR_FORMAT, pcaps); if (!gst_caps_is_equal (caps, pcaps)) { gst_structure_free (config); @@ -4418,13 +4682,164 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query) /* ERRORS */ no_caps: { - GST_DEBUG_OBJECT (obj->element, "no caps specified"); + GST_DEBUG_OBJECT (obj->dbg_obj, "no caps specified"); return FALSE; } different_caps: { /* different caps, we can't use this pool */ - GST_DEBUG_OBJECT (obj->element, "pool has different caps"); + GST_DEBUG_OBJECT (obj->dbg_obj, "pool has different caps"); + return FALSE; + } +} + +gboolean +gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer) +{ + GstVideoMeta *vmeta; + guint n_mem = gst_buffer_n_memory (buffer); + + /* only import if requested */ + switch (obj->mode) { + case GST_V4L2_IO_USERPTR: + case GST_V4L2_IO_DMABUF_IMPORT: + break; + default: + GST_DEBUG_OBJECT (obj->dbg_obj, + "The io-mode does not enable importation"); + return FALSE; + } + + vmeta = gst_buffer_get_video_meta (buffer); + if (!vmeta && obj->need_video_meta) { + GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard " + "stride/offset while the driver does not."); return FALSE; } + + /* we need matching strides/offsets and size */ + if (vmeta) { + guint p; + gboolean need_fmt_update = FALSE; + + if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) { + GST_WARNING_OBJECT (obj->dbg_obj, + "Cannot import buffers with different number planes"); + return FALSE; + } + + for (p = 0; p < vmeta->n_planes; p++) { + if (vmeta->stride[p] < obj->info.stride[p]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "Not importing as remote stride %i is smaller then %i on plane %u", + vmeta->stride[p], obj->info.stride[p], p); + return FALSE; + } else if (vmeta->stride[p] > obj->info.stride[p]) { + need_fmt_update = TRUE; + } + + if (vmeta->offset[p] < obj->info.offset[p]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "Not importing as offset %" G_GSIZE_FORMAT + " is smaller then %" G_GSIZE_FORMAT " on plane %u", + vmeta->offset[p], obj->info.offset[p], p); + return FALSE; + } else if (vmeta->offset[p] > obj->info.offset[p]) { + need_fmt_update = TRUE; + } + } + + if (need_fmt_update) { + struct v4l2_format format; + gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, }; + + format = obj->format; + + /* update the current format with the stride we want to import from */ + if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) { + guint i; + + GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:"); + + for (i = 0; i < obj->n_v4l2_planes; i++) { + gint stride = vmeta->stride[i]; + + if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo)) + stride = GST_VIDEO_TILE_X_TILES (stride) << + GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo); + + format.fmt.pix_mp.plane_fmt[i].bytesperline = stride; + wanted_stride[i] = stride; + GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]); + } + } else { + gint stride = vmeta->stride[0]; + + GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", stride); + + if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo)) + stride = GST_VIDEO_TILE_X_TILES (stride) << + GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo); + + format.fmt.pix.bytesperline = stride; + wanted_stride[0] = stride; + } + + if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) { + GST_WARNING_OBJECT (obj->dbg_obj, + "Something went wrong trying to update current format: %s", + g_strerror (errno)); + return FALSE; + } + + gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info, + &obj->align); + + if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) { + guint i; + + for (i = 0; i < obj->n_v4l2_planes; i++) { + if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "[%i] Driver did not accept the new stride (wants %i, got %i)", + i, format.fmt.pix_mp.plane_fmt[i].bytesperline, + wanted_stride[i]); + return FALSE; + } + } + } else { + if (format.fmt.pix.bytesperline != wanted_stride[0]) { + GST_DEBUG_OBJECT (obj->dbg_obj, + "Driver did not accept the new stride (wants %i, got %i)", + format.fmt.pix.bytesperline, wanted_stride[0]); + return FALSE; + } + } + } + } + + /* we can always import single memory buffer, but otherwise we need the same + * amount of memory object. */ + if (n_mem != 1 && n_mem != obj->n_v4l2_planes) { + GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, " + "buffers contains %u memory", obj->n_v4l2_planes, n_mem); + return FALSE; + } + + /* For DMABuf importation we need DMABuf of course */ + if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) { + guint i; + + for (i = 0; i < n_mem; i++) { + GstMemory *mem = gst_buffer_peek_memory (buffer, i); + + if (!gst_is_dmabuf_memory (mem)) { + GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory."); + return FALSE; + } + } + } + + /* for the remaining, only the kernel driver can tell */ + return TRUE; }