#include "gst/gst-i18n-plugin.h"
#include <gst/video/video.h>
+#include <gst/allocators/gstdmabuf.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
#define GST_CAT_DEFAULT v4l2_debug
{V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_INVZ, TRUE, GST_V4L2_RAW},
/* Palette formats */
{V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV42, TRUE, GST_V4L2_RAW},
/* Bayer formats - see http://www.siliconimaging.com/RGB%20Bayer.htm */
- {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_CODEC},
- {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_SBGGR8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGBRG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SGRBG8, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_SRGGB8, TRUE, GST_V4L2_RAW},
/* compressed formats */
{V4L2_PIX_FMT_MJPEG, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_PJPG, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_DV, FALSE, GST_V4L2_TRANSPORT},
{V4L2_PIX_FMT_MPEG, FALSE, GST_V4L2_TRANSPORT},
+ {V4L2_PIX_FMT_FWHT, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264_NO_SC, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H264_MVC, FALSE, GST_V4L2_CODEC},
+ {V4L2_PIX_FMT_HEVC, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_H263, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_MPEG1, FALSE, GST_V4L2_CODEC},
{V4L2_PIX_FMT_MPEG2, FALSE, GST_V4L2_CODEC},
/* We now disable libv4l2 by default, but have an env to enable it. */
#ifdef HAVE_LIBV4L2
+#ifdef TIZEN_FEATURE_USE_LIBV4L2
+ if (1) {
+#else /* TIZEN_FEATURE_USE_LIBV4L2 */
if (g_getenv ("GST_V4L2_USE_LIBV4L2")) {
+#endif /* TIZEN_FEATURE_USE_LIBV4L2 */
v4l2object->fd_open = v4l2_fd_open;
v4l2object->close = v4l2_close;
v4l2object->dup = v4l2_dup;
break;
case PROP_DEVICE_NAME:
{
- const guchar *new = NULL;
+ const guchar *name = NULL;
- if (GST_V4L2_IS_OPEN (v4l2object)) {
- new = v4l2object->vcap.card;
- } else if (gst_v4l2_open (v4l2object)) {
- new = v4l2object->vcap.card;
- gst_v4l2_close (v4l2object);
- }
- g_value_set_string (value, (gchar *) new);
+ if (GST_V4L2_IS_OPEN (v4l2object))
+ name = v4l2object->vcap.card;
+
+ g_value_set_string (value, (gchar *) name);
break;
}
case PROP_DEVICE_FD:
if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"driver requires a minimum of %d buffers", control.value);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+#define DEFAULT_DECODER_OUTPUT_BUFFER_COUNT 5
+ if (!V4L2_TYPE_IS_OUTPUT (v4l2object->type) && control.value == 1) {
+ v4l2object->min_buffers = DEFAULT_DECODER_OUTPUT_BUFFER_COUNT;
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "but SET MIN BUFFER COUNT[%d] and it will be [%d] later",
+ v4l2object->min_buffers, v4l2object->min_buffers + 1);
+ }
+#else /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
v4l2object->min_buffers = control.value;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
} else {
v4l2object->min_buffers = 0;
}
v4l2object->par = NULL;
}
+ if (v4l2object->channel) {
+ g_free (v4l2object->channel);
+ v4l2object->channel = NULL;
+ }
+
return TRUE;
}
break;
case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_INVZ:
rank = GREY_BASE_RANK;
break;
case V4L2_PIX_FMT_NV24:
format = GST_VIDEO_FORMAT_NV24;
break;
+ case V4L2_PIX_FMT_INVZ:
+ format = GST_VIDEO_FORMAT_INVZ;
+ break;
default:
format = GST_VIDEO_FORMAT_UNKNOWN;
break;
case V4L2_PIX_FMT_BGR32:
case V4L2_PIX_FMT_ABGR32:
case V4L2_PIX_FMT_ARGB32:
+ case V4L2_PIX_FMT_SBGGR8:
+ case V4L2_PIX_FMT_SGBRG8:
+ case V4L2_PIX_FMT_SGRBG8:
+ case V4L2_PIX_FMT_SRGGB8:
ret = TRUE;
break;
default:
break;
case V4L2_PIX_FMT_MPEG1:
structure = gst_structure_new ("video/mpeg",
- "mpegversion", G_TYPE_INT, 2, NULL);
+ "mpegversion", G_TYPE_INT, 1, NULL);
break;
case V4L2_PIX_FMT_MPEG2:
structure = gst_structure_new ("video/mpeg",
"mpegversion", G_TYPE_INT, 4, "systemstream",
G_TYPE_BOOLEAN, FALSE, NULL);
break;
+ case V4L2_PIX_FMT_FWHT:
+ structure = gst_structure_new_empty ("video/x-fwht");
+ break;
case V4L2_PIX_FMT_H263:
structure = gst_structure_new ("video/x-h263",
"variant", G_TYPE_STRING, "itu", NULL);
"stream-format", G_TYPE_STRING, "avc", "alignment",
G_TYPE_STRING, "au", NULL);
break;
+ case V4L2_PIX_FMT_HEVC: /* H.265 */
+ structure = gst_structure_new ("video/x-h265",
+ "stream-format", G_TYPE_STRING, "byte-stream", "alignment",
+ G_TYPE_STRING, "au", NULL);
+ break;
case V4L2_PIX_FMT_VC1_ANNEX_G:
case V4L2_PIX_FMT_VC1_ANNEX_L:
structure = gst_structure_new ("video/x-wmv",
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUV422P:
case V4L2_PIX_FMT_YVYU:
- case V4L2_PIX_FMT_YUV411P:{
+ case V4L2_PIX_FMT_YUV411P:
+ case V4L2_PIX_FMT_INVZ:{
GstVideoFormat format;
format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
if (format != GST_VIDEO_FORMAT_UNKNOWN)
}
switch (gst_v4l2_formats[i].format) {
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case V4L2_PIX_FMT_YUV420:
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL);
+ break;
+ case V4L2_PIX_FMT_NV12:
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL);
+ break;
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
case V4L2_PIX_FMT_RGB32:
alt_s = gst_structure_copy (structure);
gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
if (g_str_equal (mimetype, "video/x-raw")) {
switch (GST_VIDEO_INFO_FORMAT (info)) {
case GST_VIDEO_FORMAT_I420:
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case GST_VIDEO_FORMAT_S420:
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
fourcc = V4L2_PIX_FMT_YUV420;
fourcc_nc = V4L2_PIX_FMT_YUV420M;
break;
fourcc = V4L2_PIX_FMT_YUV422P;
break;
case GST_VIDEO_FORMAT_NV12:
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ case GST_VIDEO_FORMAT_SN12:
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
fourcc = V4L2_PIX_FMT_NV12;
fourcc_nc = V4L2_PIX_FMT_NV12M;
break;
case GST_VIDEO_FORMAT_GRAY16_BE:
fourcc = V4L2_PIX_FMT_Y16_BE;
break;
+ case GST_VIDEO_FORMAT_INVZ:
+ fourcc = V4L2_PIX_FMT_INVZ;
+ break;
default:
break;
}
break;
}
}
+ } else if (g_str_equal (mimetype, "video/x-fwht")) {
+ fourcc = V4L2_PIX_FMT_FWHT;
} else if (g_str_equal (mimetype, "video/x-h263")) {
fourcc = V4L2_PIX_FMT_H263;
} else if (g_str_equal (mimetype, "video/x-h264")) {
fourcc = V4L2_PIX_FMT_H264_NO_SC;
else
fourcc = V4L2_PIX_FMT_H264;
+ } else if (g_str_equal (mimetype, "video/x-h265")) {
+ fourcc = V4L2_PIX_FMT_HEVC;
} else if (g_str_equal (mimetype, "video/x-vp8")) {
fourcc = V4L2_PIX_FMT_VP8;
} else if (g_str_equal (mimetype, "video/x-vp9")) {
gst_v4l2_object_get_interlace_mode (enum v4l2_field field,
GstVideoInterlaceMode * interlace_mode)
{
- /* NB: If you add new return values, please fix mode_strings in
- * gst_v4l2_object_add_interlace_mode */
switch (field) {
case V4L2_FIELD_ANY:
GST_ERROR
cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
cinfo->primaries = GST_VIDEO_COLOR_PRIMARIES_BT709;
break;
- case V4L2_COLORSPACE_ADOBERGB:
+ case V4L2_COLORSPACE_OPRGB:
cinfo->range = GST_VIDEO_COLOR_RANGE_16_235;
cinfo->matrix = GST_VIDEO_COLOR_MATRIX_BT601;
cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
switch (transfer) {
case V4L2_XFER_FUNC_709:
- if (fmt->fmt.pix.height >= 2160)
+ if (colorspace == V4L2_COLORSPACE_BT2020 && fmt->fmt.pix.height >= 2160)
cinfo->transfer = GST_VIDEO_TRANSFER_BT2020_12;
else
cinfo->transfer = GST_VIDEO_TRANSFER_BT709;
case V4L2_XFER_FUNC_SRGB:
cinfo->transfer = GST_VIDEO_TRANSFER_SRGB;
break;
- case V4L2_XFER_FUNC_ADOBERGB:
+ case V4L2_XFER_FUNC_OPRGB:
cinfo->transfer = GST_VIDEO_TRANSFER_ADOBERGB;
break;
case V4L2_XFER_FUNC_SMPTE240M:
{
struct v4l2_format fmt;
GValue interlace_formats = { 0, };
+ enum v4l2_field formats[] = { V4L2_FIELD_NONE, V4L2_FIELD_INTERLACED };
+ gsize i;
GstVideoInterlaceMode interlace_mode, prev = -1;
- const gchar *mode_strings[] = { "progressive",
- "interleaved",
- "mixed"
- };
-
if (!g_str_equal (gst_structure_get_name (s), "video/x-raw"))
return;
g_value_init (&interlace_formats, GST_TYPE_LIST);
/* Try twice - once for NONE, once for INTERLACED. */
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = v4l2object->type;
- fmt.fmt.pix.width = width;
- fmt.fmt.pix.height = height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_NONE;
-
- if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
- gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)) {
- GValue interlace_enum = { 0, };
- g_value_init (&interlace_enum, G_TYPE_STRING);
- g_value_set_string (&interlace_enum, mode_strings[interlace_mode]);
- gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
- prev = interlace_mode;
- }
-
- memset (&fmt, 0, sizeof (fmt));
- fmt.type = v4l2object->type;
- fmt.fmt.pix.width = width;
- fmt.fmt.pix.height = height;
- fmt.fmt.pix.pixelformat = pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
-
- if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
- gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode) &&
- prev != interlace_mode) {
- GValue interlace_enum = { 0, };
- g_value_init (&interlace_enum, G_TYPE_STRING);
- g_value_set_string (&interlace_enum, mode_strings[interlace_mode]);
- gst_value_list_append_and_take_value (&interlace_formats, &interlace_enum);
+ for (i = 0; i < G_N_ELEMENTS (formats); i++) {
+ memset (&fmt, 0, sizeof (fmt));
+ fmt.type = v4l2object->type;
+ fmt.fmt.pix.width = width;
+ fmt.fmt.pix.height = height;
+ fmt.fmt.pix.pixelformat = pixelformat;
+ fmt.fmt.pix.field = formats[i];
+
+ if (gst_v4l2_object_try_fmt (v4l2object, &fmt) == 0 &&
+ gst_v4l2_object_get_interlace_mode (fmt.fmt.pix.field, &interlace_mode)
+ && prev != interlace_mode) {
+ GValue interlace_enum = { 0, };
+ const gchar *mode_string;
+ g_value_init (&interlace_enum, G_TYPE_STRING);
+ mode_string = gst_video_interlace_mode_to_string (interlace_mode);
+ g_value_set_string (&interlace_enum, mode_string);
+ gst_value_list_append_and_take_value (&interlace_formats,
+ &interlace_enum);
+ prev = interlace_mode;
+ }
}
if (gst_v4l2src_value_simplify (&interlace_formats)
.flags = O_CLOEXEC | O_RDWR,
};
+ if (v4l2object->fmtdesc->flags & V4L2_FMT_FLAG_EMULATED) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "libv4l2 converter detected, disabling DMABuf");
+ ret = FALSE;
+ }
+
/* Expected to fail, but ENOTTY tells us that it is not implemented. */
v4l2object->ioctl (v4l2object->video_fd, VIDIOC_EXPBUF, &expbuf);
if (errno == ENOTTY)
mode = GST_V4L2_IO_MMAP;
}
}
- } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP)
+ } else if (v4l2object->req_mode == GST_V4L2_IO_MMAP ||
+ v4l2object->req_mode == GST_V4L2_IO_DMABUF)
goto method_not_supported;
/* if still no transport selected, error out */
}
static gboolean
+gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo,
+ const gchar * color)
+{
+ GstVideoColorimetry ci;
+ static const GstVideoColorimetry ci_likely_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_UNKNOWN, GST_VIDEO_COLOR_PRIMARIES_UNKNOWN
+ };
+ static const GstVideoColorimetry ci_jpeg = {
+ GST_VIDEO_COLOR_RANGE_0_255, GST_VIDEO_COLOR_MATRIX_BT601,
+ GST_VIDEO_TRANSFER_SRGB, GST_VIDEO_COLOR_PRIMARIES_BT709
+ };
+
+ if (!gst_video_colorimetry_from_string (&ci, color))
+ return FALSE;
+
+ if (gst_video_colorimetry_is_equal (&ci, cinfo))
+ return TRUE;
+
+ /* Allow 1:4:0:0 (produced by jpegdec) if the device expects 1:4:7:1 */
+ if (gst_video_colorimetry_is_equal (&ci, &ci_likely_jpeg)
+ && gst_video_colorimetry_is_equal (cinfo, &ci_jpeg))
+ return TRUE;
+
+ return FALSE;
+}
+
+static gboolean
gst_v4l2_object_set_format_full (GstV4l2Object * v4l2object, GstCaps * caps,
gboolean try_only, GstV4l2Error * error)
{
enum v4l2_ycbcr_encoding matrix = 0;
enum v4l2_xfer_func transfer = 0;
GstStructure *s;
+ gboolean disable_colorimetry = FALSE;
g_return_val_if_fail (!v4l2object->skip_try_fmt_probes ||
gst_caps_is_writable (caps), FALSE);
field = V4L2_FIELD_NONE;
}
- /* We first pick th main colorspace from the primaries */
+ /* We first pick the main colorspace from the primaries */
switch (info.colorimetry.primaries) {
case GST_VIDEO_COLOR_PRIMARIES_BT709:
/* There is two colorspaces using these primaries, use the range to
if (colorspace == 0) {
/* Try to guess colorspace according to pixelformat and size */
if (GST_VIDEO_INFO_IS_YUV (&info)) {
- /* SD streams likely use SMPTE170M and HD streams REC709 */
- if (width <= 720 && height <= 576)
- colorspace = V4L2_COLORSPACE_SMPTE170M;
- else
- colorspace = V4L2_COLORSPACE_REC709;
+ if (range == V4L2_QUANTIZATION_FULL_RANGE
+ && matrix == V4L2_YCBCR_ENC_601 && transfer == 0) {
+ /* Full range BT.601 YCbCr encoding with unknown primaries and transfer
+ * function most likely is JPEG */
+ colorspace = V4L2_COLORSPACE_JPEG;
+ transfer = V4L2_XFER_FUNC_SRGB;
+ } else {
+ /* SD streams likely use SMPTE170M and HD streams REC709 */
+ if (width <= 720 && height <= 576)
+ colorspace = V4L2_COLORSPACE_SMPTE170M;
+ else
+ colorspace = V4L2_COLORSPACE_REC709;
+ }
} else if (GST_VIDEO_INFO_IS_RGB (&info)) {
colorspace = V4L2_COLORSPACE_SRGB;
transfer = V4L2_XFER_FUNC_NONE;
}
#endif
- if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
- if (is_mplane) {
- format.fmt.pix_mp.colorspace = colorspace;
- format.fmt.pix_mp.quantization = range;
- format.fmt.pix_mp.ycbcr_enc = matrix;
- format.fmt.pix_mp.xfer_func = transfer;
- } else {
- format.fmt.pix.colorspace = colorspace;
- format.fmt.pix.quantization = range;
- format.fmt.pix.ycbcr_enc = matrix;
- format.fmt.pix.xfer_func = transfer;
- }
-
- GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
- colorspace, range, matrix, transfer);
+ if (is_mplane) {
+ format.fmt.pix_mp.colorspace = colorspace;
+ format.fmt.pix_mp.quantization = range;
+ format.fmt.pix_mp.ycbcr_enc = matrix;
+ format.fmt.pix_mp.xfer_func = transfer;
+ } else {
+ format.fmt.pix.priv = V4L2_PIX_FMT_PRIV_MAGIC;
+ format.fmt.pix.colorspace = colorspace;
+ format.fmt.pix.quantization = range;
+ format.fmt.pix.ycbcr_enc = matrix;
+ format.fmt.pix.xfer_func = transfer;
}
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Desired colorspace is %d:%d:%d:%d",
+ colorspace, range, matrix, transfer);
+
if (try_only) {
if (v4l2object->ioctl (fd, VIDIOC_TRY_FMT, &format) < 0)
goto try_fmt_failed;
goto set_fmt_failed;
}
+ if (is_mplane) {
+ colorspace = format.fmt.pix_mp.colorspace;
+ range = format.fmt.pix_mp.quantization;
+ matrix = format.fmt.pix_mp.ycbcr_enc;
+ transfer = format.fmt.pix_mp.xfer_func;
+ } else {
+ colorspace = format.fmt.pix.colorspace;
+ range = format.fmt.pix.quantization;
+ matrix = format.fmt.pix.ycbcr_enc;
+ transfer = format.fmt.pix.xfer_func;
+ }
+
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "Got format of %dx%d, format "
- "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d",
+ "%" GST_FOURCC_FORMAT ", nb planes %d, colorspace %d:%d:%d:%d",
format.fmt.pix.width, format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
is_mplane ? format.fmt.pix_mp.num_planes : 1,
- is_mplane ? format.fmt.pix_mp.colorspace : format.fmt.pix.colorspace);
+ colorspace, range, matrix, transfer);
#ifndef GST_DISABLE_GST_DEBUG
if (is_mplane) {
if (is_mplane && format.fmt.pix_mp.num_planes != n_v4l_planes)
goto invalid_planes;
- if ((is_mplane && format.fmt.pix_mp.field != field)
- || format.fmt.pix.field != field)
- goto invalid_field;
+ /* used to check colorimetry and interlace mode fields presence */
+ s = gst_caps_get_structure (caps, 0);
- gst_v4l2_object_get_colorspace (&format, &info.colorimetry);
+ if (!gst_v4l2_object_get_interlace_mode (format.fmt.pix.field,
+ &info.interlace_mode))
+ goto invalid_field;
+ if (gst_structure_has_field (s, "interlace-mode")) {
+ if (format.fmt.pix.field != field)
+ goto invalid_field;
+ }
- s = gst_caps_get_structure (caps, 0);
- if (gst_structure_has_field (s, "colorimetry")) {
- GstVideoColorimetry ci;
- if (!gst_video_colorimetry_from_string (&ci,
- gst_structure_get_string (s, "colorimetry"))
- || !gst_video_colorimetry_is_equal (&ci, &info.colorimetry))
- goto invalid_colorimetry;
+ if (gst_v4l2_object_get_colorspace (&format, &info.colorimetry)) {
+ if (gst_structure_has_field (s, "colorimetry")) {
+ if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry,
+ gst_structure_get_string (s, "colorimetry")))
+ goto invalid_colorimetry;
+ }
+ } else {
+ /* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
+ * the TRY_FMT */
+ disable_colorimetry = TRUE;
+ if (gst_structure_has_field (s, "colorimetry"))
+ gst_structure_remove_field (s, "colorimetry");
}
/* In case we have skipped the try_fmt probes, we'll need to set the
* colorimetry and interlace-mode back into the caps. */
if (v4l2object->skip_try_fmt_probes) {
- if (!gst_structure_has_field (s, "colorimetry")) {
+ if (!disable_colorimetry && !gst_structure_has_field (s, "colorimetry")) {
gchar *str = gst_video_colorimetry_to_string (&info.colorimetry);
gst_structure_set (s, "colorimetry", G_TYPE_STRING, str, NULL);
g_free (str);
gboolean
gst_v4l2_object_set_crop (GstV4l2Object * obj)
{
+ struct v4l2_selection sel = { 0 };
struct v4l2_crop crop = { 0 };
+ sel.type = obj->type;
+ sel.target = V4L2_SEL_TGT_CROP;
+ sel.flags = 0;
+ sel.r.left = obj->align.padding_left;
+ sel.r.top = obj->align.padding_top;
+ sel.r.width = obj->info.width;
+ sel.r.height = obj->info.height;
+
crop.type = obj->type;
- crop.c.left = obj->align.padding_left;
- crop.c.top = obj->align.padding_top;
- crop.c.width = obj->info.width;
- crop.c.height = obj->info.height;
+ crop.c = sel.r;
if (obj->align.padding_left + obj->align.padding_top +
obj->align.padding_right + obj->align.padding_bottom == 0) {
"Desired cropping left %u, top %u, size %ux%u", crop.c.left, crop.c.top,
crop.c.width, crop.c.height);
- if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
- GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed");
- return FALSE;
- }
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_SELECTION, &sel) < 0) {
+ if (errno != ENOTTY) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Failed to set crop rectangle with VIDIOC_S_SELECTION: %s",
+ g_strerror (errno));
+ return FALSE;
+ } else {
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_S_CROP failed");
+ return FALSE;
+ }
- if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
- GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed");
- return FALSE;
+ if (obj->ioctl (obj->video_fd, VIDIOC_G_CROP, &crop) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj, "VIDIOC_G_CROP failed");
+ return FALSE;
+ }
+
+ sel.r = crop.c;
+ }
}
GST_DEBUG_OBJECT (obj->dbg_obj,
}
gboolean
+gst_v4l2_object_caps_is_subset (GstV4l2Object * v4l2object, GstCaps * caps)
+{
+ GstStructure *config;
+ GstCaps *oldcaps;
+ gboolean ret;
+
+ if (!v4l2object->pool)
+ return FALSE;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ ret = oldcaps && gst_caps_is_subset (oldcaps, caps);
+
+ gst_structure_free (config);
+
+ return ret;
+}
+
+GstCaps *
+gst_v4l2_object_get_current_caps (GstV4l2Object * v4l2object)
+{
+ GstStructure *config;
+ GstCaps *oldcaps;
+
+ if (!v4l2object->pool)
+ return NULL;
+
+ config = gst_buffer_pool_get_config (v4l2object->pool);
+ gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
+
+ if (oldcaps)
+ gst_caps_ref (oldcaps);
+
+ gst_structure_free (config);
+
+ return oldcaps;
+}
+
+gboolean
gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
{
gboolean ret = TRUE;
goto done;
if (v4l2object->pool) {
- GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
- gst_buffer_pool_set_active (v4l2object->pool, FALSE);
- gst_object_unref (v4l2object->pool);
+ if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
+ gst_buffer_pool_set_active (v4l2object->pool, FALSE);
+ gst_object_unref (v4l2object->pool);
+ }
v4l2object->pool = NULL;
}
GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Failed to probe pixel aspect ratio with VIDIOC_CROPCAP: %s",
g_strerror (errno));
- } else {
+ } else if (cropcap.pixelaspect.numerator && cropcap.pixelaspect.denominator) {
v4l2object->par = g_new0 (GValue, 1);
g_value_init (v4l2object->par, GST_TYPE_FRACTION);
gst_value_set_fraction (v4l2object->par, cropcap.pixelaspect.numerator,
if (tmp)
gst_caps_append (ret, tmp);
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ if (format->pixelformat == V4L2_PIX_FMT_NV12 ||
+ format->pixelformat == V4L2_PIX_FMT_YUV420) {
+ GstStructure *alt_s = gst_structure_copy (template);
+
+ if (format->pixelformat == V4L2_PIX_FMT_NV12)
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL);
+ else
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL);
+
+ tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
+ format->pixelformat, alt_s);
+
+ if (tmp)
+ gst_caps_append (ret, tmp);
+
+ gst_structure_free (alt_s);
+ }
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
gst_structure_free (template);
}
GstAllocator *allocator = NULL;
GstAllocationParams params = { 0 };
+#ifdef TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER
+ GST_INFO_OBJECT (obj->dbg_obj, "decide allocation - %s",
+ V4L2_TYPE_IS_OUTPUT (obj->type) ? "output" : "capture");
+#else /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation");
+#endif /* TIZEN_FEATURE_TBM_SUPPORT_FOR_V4L2_DECODER */
g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
if (caps == NULL)
goto no_caps;
- if ((pool = obj->pool))
- gst_object_ref (pool);
+ switch (obj->mode) {
+ case GST_V4L2_IO_MMAP:
+ case GST_V4L2_IO_DMABUF:
+ if ((pool = obj->pool))
+ gst_object_ref (pool);
+ break;
+ default:
+ pool = NULL;
+ break;
+ }
if (pool != NULL) {
GstCaps *pcaps;
return FALSE;
}
}
+
+gboolean
+gst_v4l2_object_try_import (GstV4l2Object * obj, GstBuffer * buffer)
+{
+ GstVideoMeta *vmeta;
+ guint n_mem = gst_buffer_n_memory (buffer);
+
+ /* only import if requested */
+ switch (obj->mode) {
+ case GST_V4L2_IO_USERPTR:
+ case GST_V4L2_IO_DMABUF_IMPORT:
+ break;
+ default:
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "The io-mode does not enable importation");
+ return FALSE;
+ }
+
+ vmeta = gst_buffer_get_video_meta (buffer);
+ if (!vmeta && obj->need_video_meta) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Downstream buffer uses standard "
+ "stride/offset while the driver does not.");
+ return FALSE;
+ }
+
+ /* we need matching strides/offsets and size */
+ if (vmeta) {
+ guint p;
+ gboolean need_fmt_update = FALSE;
+
+ if (vmeta->n_planes != GST_VIDEO_INFO_N_PLANES (&obj->info)) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Cannot import buffers with different number planes");
+ return FALSE;
+ }
+
+ for (p = 0; p < vmeta->n_planes; p++) {
+ if (vmeta->stride[p] < obj->info.stride[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not importing as remote stride %i is smaller then %i on plane %u",
+ vmeta->stride[p], obj->info.stride[p], p);
+ return FALSE;
+ } else if (vmeta->stride[p] > obj->info.stride[p]) {
+ need_fmt_update = TRUE;
+ }
+
+ if (vmeta->offset[p] < obj->info.offset[p]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Not importing as offset %" G_GSIZE_FORMAT
+ " is smaller then %" G_GSIZE_FORMAT " on plane %u",
+ vmeta->offset[p], obj->info.offset[p], p);
+ return FALSE;
+ } else if (vmeta->offset[p] > obj->info.offset[p]) {
+ need_fmt_update = TRUE;
+ }
+ }
+
+ if (need_fmt_update) {
+ struct v4l2_format format;
+ gint wanted_stride[GST_VIDEO_MAX_PLANES] = { 0, };
+
+ format = obj->format;
+
+ /* update the current format with the stride we want to import from */
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted strides:");
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ gint stride = vmeta->stride[i];
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
+ wanted_stride[i] = stride;
+ GST_DEBUG_OBJECT (obj->dbg_obj, " [%u] %i", i, wanted_stride[i]);
+ }
+ } else {
+ gint stride = vmeta->stride[0];
+
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", stride);
+
+ if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
+ stride = GST_VIDEO_TILE_X_TILES (stride) <<
+ GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+
+ format.fmt.pix.bytesperline = stride;
+ wanted_stride[0] = stride;
+ }
+
+ if (obj->ioctl (obj->video_fd, VIDIOC_S_FMT, &format) < 0) {
+ GST_WARNING_OBJECT (obj->dbg_obj,
+ "Something went wrong trying to update current format: %s",
+ g_strerror (errno));
+ return FALSE;
+ }
+
+ gst_v4l2_object_save_format (obj, obj->fmtdesc, &format, &obj->info,
+ &obj->align);
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
+ guint i;
+
+ for (i = 0; i < obj->n_v4l2_planes; i++) {
+ if (format.fmt.pix_mp.plane_fmt[i].bytesperline != wanted_stride[i]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "[%i] Driver did not accept the new stride (wants %i, got %i)",
+ i, format.fmt.pix_mp.plane_fmt[i].bytesperline,
+ wanted_stride[i]);
+ return FALSE;
+ }
+ }
+ } else {
+ if (format.fmt.pix.bytesperline != wanted_stride[0]) {
+ GST_DEBUG_OBJECT (obj->dbg_obj,
+ "Driver did not accept the new stride (wants %i, got %i)",
+ format.fmt.pix.bytesperline, wanted_stride[0]);
+ return FALSE;
+ }
+ }
+ }
+ }
+
+ /* we can always import single memory buffer, but otherwise we need the same
+ * amount of memory object. */
+ if (n_mem != 1 && n_mem != obj->n_v4l2_planes) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Can only import %i memory, "
+ "buffers contains %u memory", obj->n_v4l2_planes, n_mem);
+ return FALSE;
+ }
+
+ /* For DMABuf importation we need DMABuf of course */
+ if (obj->mode == GST_V4L2_IO_DMABUF_IMPORT) {
+ guint i;
+
+ for (i = 0; i < n_mem; i++) {
+ GstMemory *mem = gst_buffer_peek_memory (buffer, i);
+
+ if (!gst_is_dmabuf_memory (mem)) {
+ GST_DEBUG_OBJECT (obj->dbg_obj, "Cannot import non-DMABuf memory.");
+ return FALSE;
+ }
+ }
+ }
+
+ /* for the remaining, only the kernel driver can tell */
+ return TRUE;
+}