#include "gstv4l2tuner.h"
#include "gstv4l2colorbalance.h"
-#include "gst/gst-i18n-plugin.h"
+#include <glib/gi18n-lib.h>
#include <gst/video/video.h>
#include <gst/allocators/gstdmabuf.h>
#define DEFAULT_PROP_IO_MODE GST_V4L2_IO_AUTO
#define ENCODED_BUFFER_SIZE (2 * 1024 * 1024)
+#define GST_V4L2_DEFAULT_WIDTH 320
+#define GST_V4L2_DEFAULT_HEIGHT 240
enum
{
{V4L2_PIX_FMT_Y16, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y16_BE, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_Y10BPACK, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_INVZ, TRUE, GST_V4L2_RAW},
/* Palette formats */
{V4L2_PIX_FMT_PAL8, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV12M, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV12MT, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV12MT_16X16, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV12M_8L128, TRUE, GST_V4L2_RAW},
+ {V4L2_PIX_FMT_NV12M_10BE_8L128, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV21, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV21M, TRUE, GST_V4L2_RAW},
{V4L2_PIX_FMT_NV16, TRUE, GST_V4L2_RAW},
v4l2object->no_initial_format = FALSE;
+ v4l2object->poll = gst_poll_new (TRUE);
+ v4l2object->can_poll_device = TRUE;
+
/* We now disable libv4l2 by default, but have an env to enable it. */
#ifdef HAVE_LIBV4L2
+#ifdef TIZEN_FEATURE_USE_LIBV4L2
+ if (1) {
+#else /* TIZEN_FEATURE_USE_LIBV4L2 */
if (g_getenv ("GST_V4L2_USE_LIBV4L2")) {
+#endif /* TIZEN_FEATURE_USE_LIBV4L2 */
v4l2object->fd_open = v4l2_fd_open;
v4l2object->close = v4l2_close;
v4l2object->dup = v4l2_dup;
return v4l2object;
}
+
+static gboolean
+gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
+{
+ g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
+ g_slist_free (v4l2object->formats);
+ v4l2object->formats = NULL;
+ v4l2object->fmtdesc = NULL;
+
+ return TRUE;
+}
+
+
void
gst_v4l2_object_destroy (GstV4l2Object * v4l2object)
{
g_free (v4l2object->par);
g_free (v4l2object->channel);
+ gst_poll_free (v4l2object->poll);
+
if (v4l2object->formats) {
gst_v4l2_object_clear_format_list (v4l2object);
}
}
-gboolean
-gst_v4l2_object_clear_format_list (GstV4l2Object * v4l2object)
-{
- g_slist_foreach (v4l2object->formats, (GFunc) g_free, NULL);
- g_slist_free (v4l2object->formats);
- v4l2object->formats = NULL;
-
- return TRUE;
-}
-
static gint
gst_v4l2_object_prop_to_cid (guint prop_id)
{
if (v4l2object->ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) == 0) {
GST_DEBUG_OBJECT (v4l2object->dbg_obj,
"driver requires a minimum of %d buffers", control.value);
+#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
+#define DEFAULT_DECODER_OUTPUT_BUFFER_COUNT 5
+ if (v4l2object->tbm_output &&
+ !V4L2_TYPE_IS_OUTPUT (v4l2object->type) && control.value == 1) {
+ v4l2object->min_buffers = DEFAULT_DECODER_OUTPUT_BUFFER_COUNT;
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "but SET MIN BUFFER COUNT[%d] and it will be [%d] later",
+ v4l2object->min_buffers, v4l2object->min_buffers + 1);
+ } else {
+ v4l2object->min_buffers = control.value;
+ }
+#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
v4l2object->min_buffers = control.value;
+#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
} else {
v4l2object->min_buffers = 0;
}
}
}
+static void
+gst_v4l2_object_init_poll (GstV4l2Object * v4l2object)
+{
+ gst_poll_fd_init (&v4l2object->pollfd);
+ v4l2object->pollfd.fd = v4l2object->video_fd;
+ gst_poll_add_fd (v4l2object->poll, &v4l2object->pollfd);
+ if (V4L2_TYPE_IS_OUTPUT (v4l2object->type))
+ gst_poll_fd_ctl_write (v4l2object->poll, &v4l2object->pollfd, TRUE);
+ else
+ gst_poll_fd_ctl_read (v4l2object->poll, &v4l2object->pollfd, TRUE);
+
+ v4l2object->can_poll_device = TRUE;
+}
+
gboolean
gst_v4l2_object_open (GstV4l2Object * v4l2object, GstV4l2Error * error)
{
else
return FALSE;
+ gst_v4l2_object_init_poll (v4l2object);
+
return TRUE;
}
gboolean
gst_v4l2_object_open_shared (GstV4l2Object * v4l2object, GstV4l2Object * other)
{
- gboolean ret;
-
- ret = gst_v4l2_dup (v4l2object, other);
+ if (gst_v4l2_dup (v4l2object, other)) {
+ gst_v4l2_object_init_poll (v4l2object);
+ return TRUE;
+ }
- return ret;
+ return FALSE;
}
gboolean
break;
case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_INVZ:
rank = GREY_BASE_RANK;
break;
case V4L2_PIX_FMT_NV61M: /* Same as NV61 */
case V4L2_PIX_FMT_NV24: /* 24 Y/CrCb 4:4:4 */
case V4L2_PIX_FMT_MM21: /* NV12 Y 16x32, UV 16x16 tile */
+ case V4L2_PIX_FMT_NV12M_8L128:
+ case V4L2_PIX_FMT_NV12M_10BE_8L128:
rank = YUV_ODD_BASE_RANK;
break;
rank = YUV_BASE_RANK + 10;
break;
case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */
+ case V4L2_PIX_FMT_YVU420M:
rank = YUV_BASE_RANK + 6;
break;
case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */
case V4L2_PIX_FMT_MM21:
format = GST_VIDEO_FORMAT_NV12_16L32S;
break;
+ case V4L2_PIX_FMT_NV12M_8L128:
+ format = GST_VIDEO_FORMAT_NV12_8L128;
+ break;
+ case V4L2_PIX_FMT_NV12M_10BE_8L128:
+ format = GST_VIDEO_FORMAT_NV12_10BE_8L128;
+ break;
case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_NV21M:
format = GST_VIDEO_FORMAT_NV21;
format = GST_VIDEO_FORMAT_YUY2;
break;
case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_YVU420M:
format = GST_VIDEO_FORMAT_YV12;
break;
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_NV24:
format = GST_VIDEO_FORMAT_NV24;
break;
+ case V4L2_PIX_FMT_INVZ:
+ format = GST_VIDEO_FORMAT_INVZ;
+ break;
default:
format = GST_VIDEO_FORMAT_UNKNOWN;
break;
case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
case V4L2_PIX_FMT_PJPG: /* Progressive-JPEG */
case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
- structure = gst_structure_new_empty ("image/jpeg");
+ structure = gst_structure_new ("image/jpeg",
+ "parsed", G_TYPE_BOOLEAN, TRUE, NULL);
break;
case V4L2_PIX_FMT_MPEG1:
structure = gst_structure_new ("video/mpeg",
case V4L2_PIX_FMT_NV12M:
case V4L2_PIX_FMT_NV12MT:
case V4L2_PIX_FMT_MM21:
+ case V4L2_PIX_FMT_NV12M_8L128:
+ case V4L2_PIX_FMT_NV12M_10BE_8L128:
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_NV21M:
case V4L2_PIX_FMT_NV16: /* 16 Y/CbCr 4:2:2 */
case V4L2_PIX_FMT_YUV420M:
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_YVU420M:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUV422P:
case V4L2_PIX_FMT_YVYU:
- case V4L2_PIX_FMT_YUV411P:{
+ case V4L2_PIX_FMT_YUV411P:
+ case V4L2_PIX_FMT_INVZ:{
GstVideoFormat format;
format = gst_v4l2_object_v4l2fourcc_to_video_format (fourcc);
if (format != GST_VIDEO_FORMAT_UNKNOWN)
return template;
}
+gboolean
+gst_v4l2_object_is_raw (GstV4l2Object * v4l2object)
+{
+ gint i;
+
+ if (GST_VIDEO_INFO_FORMAT (&v4l2object->info) != GST_VIDEO_FORMAT_ENCODED)
+ return TRUE;
+
+ for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
+ if (gst_v4l2_formats[i].format == GST_V4L2_PIXELFORMAT (v4l2object)) {
+ return ! !(gst_v4l2_formats[i].flags & GST_V4L2_RAW);
+ }
+ }
+ return FALSE;
+}
+
/* Add an 'alternate' variant of the caps with the feature */
static void
add_alternate_variant (GstV4l2Object * v4l2object, GstCaps * caps,
}
switch (gst_v4l2_formats[i].format) {
+#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
+ case V4L2_PIX_FMT_YUV420:
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL);
+ break;
+ case V4L2_PIX_FMT_NV12:
+ alt_s = gst_structure_copy (structure);
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL);
+ break;
+#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
case V4L2_PIX_FMT_RGB32:
alt_s = gst_structure_copy (structure);
gst_structure_set (alt_s, "format", G_TYPE_STRING, "ARGB", NULL);
if (g_str_equal (mimetype, "video/x-raw")) {
switch (GST_VIDEO_INFO_FORMAT (info)) {
case GST_VIDEO_FORMAT_I420:
+#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
+ case GST_VIDEO_FORMAT_S420:
+#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
fourcc = V4L2_PIX_FMT_YUV420;
fourcc_nc = V4L2_PIX_FMT_YUV420M;
break;
break;
case GST_VIDEO_FORMAT_YV12:
fourcc = V4L2_PIX_FMT_YVU420;
+ fourcc_nc = V4L2_PIX_FMT_YVU420M;
break;
case GST_VIDEO_FORMAT_Y41B:
fourcc = V4L2_PIX_FMT_YUV411P;
fourcc = V4L2_PIX_FMT_YUV422P;
break;
case GST_VIDEO_FORMAT_NV12:
+#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
+ case GST_VIDEO_FORMAT_SN12:
+#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
fourcc = V4L2_PIX_FMT_NV12;
fourcc_nc = V4L2_PIX_FMT_NV12M;
break;
case GST_VIDEO_FORMAT_NV12_16L32S:
fourcc_nc = V4L2_PIX_FMT_MM21;
break;
+ case GST_VIDEO_FORMAT_NV12_8L128:
+ fourcc_nc = V4L2_PIX_FMT_NV12M_8L128;
+ break;
+ case GST_VIDEO_FORMAT_NV12_10BE_8L128:
+ fourcc_nc = V4L2_PIX_FMT_NV12M_10BE_8L128;
+ break;
case GST_VIDEO_FORMAT_NV21:
fourcc = V4L2_PIX_FMT_NV21;
fourcc_nc = V4L2_PIX_FMT_NV21M;
fourcc = V4L2_PIX_FMT_RGB555X;
fourcc_nc = V4L2_PIX_FMT_XRGB555X;
break;
+ case GST_VIDEO_FORMAT_INVZ:
+ fourcc = V4L2_PIX_FMT_INVZ;
+ break;
default:
break;
}
/* ERRORS */
invalid_format:
{
- GST_DEBUG_OBJECT (v4l2object, "invalid format");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "invalid format");
return FALSE;
}
unhandled_format:
{
- GST_DEBUG_OBJECT (v4l2object, "unhandled format");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "unhandled format");
return FALSE;
}
unsupported_format:
{
- GST_DEBUG_OBJECT (v4l2object, "unsupported format");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "unsupported format");
return FALSE;
}
}
|| gst_value_list_get_size (&interlace_formats) > 0)
gst_structure_take_value (s, "interlace-mode", &interlace_formats);
else
- GST_WARNING_OBJECT (v4l2object, "Failed to determine interlace mode");
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "Failed to determine interlace mode");
return;
}
GstVideoColorimetry cinfo;
enum v4l2_colorspace req_cspace;
+#ifdef TIZEN_FEATURE_V4L2_SKIP_ADD_COLORSPACE
+ if (V4L2_TYPE_IS_OUTPUT (v4l2object->type)) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "skip for output");
+ return;
+ }
+#endif
memset (&fmt, 0, sizeof (fmt));
fmt.type = v4l2object->type;
fmt.fmt.pix.width = width;
if (colorspace == req_cspace) {
if (gst_v4l2_object_get_colorspace (v4l2object, &fmt, &cinfo))
gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
+ if (colorspace == V4L2_COLORSPACE_REC709) {
+ /* support for full-range variants of colorspaces V4L2_COLORSPACE_REC709
+ * (such as Apple's full-range bt709 variant 1:3:5:1) */
+ struct v4l2_format alt_fmt;
+ memcpy (&alt_fmt, &fmt, sizeof (alt_fmt));
+
+ if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type))
+ alt_fmt.fmt.pix_mp.quantization = V4L2_QUANTIZATION_FULL_RANGE;
+ else
+ alt_fmt.fmt.pix.quantization = V4L2_QUANTIZATION_FULL_RANGE;
+
+ if (gst_v4l2_object_get_colorspace (v4l2object, &alt_fmt, &cinfo))
+ gst_v4l2_object_fill_colorimetry_list (&list, &cinfo);
+ }
}
}
}
size.stepwise.min_height);
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max width: %d",
size.stepwise.max_width);
- GST_DEBUG_OBJECT (v4l2object->dbg_obj, "min height: %d",
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "max height: %d",
size.stepwise.max_height);
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step width: %d",
size.stepwise.step_width);
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "step height: %d",
size.stepwise.step_height);
- w = MAX (size.stepwise.min_width, 1);
- h = MAX (size.stepwise.min_height, 1);
- maxw = MIN (size.stepwise.max_width, G_MAXINT);
- maxh = MIN (size.stepwise.max_height, G_MAXINT);
-
step_w = MAX (size.stepwise.step_width, 1);
step_h = MAX (size.stepwise.step_height, 1);
+ w = MAX (size.stepwise.min_width, step_w);
+ h = MAX (size.stepwise.min_height, step_h);
+ maxw = MIN (size.stepwise.max_width, G_MAXINT);
+ maxh = MIN (size.stepwise.max_height, G_MAXINT);
/* FIXME: check for sanity and that min/max are multiples of the steps */
/* Map the buffers */
GST_LOG_OBJECT (v4l2object->dbg_obj, "initiating buffer pool");
- if (!(v4l2object->pool = gst_v4l2_buffer_pool_new (v4l2object, caps)))
- goto buffer_pool_new_failed;
+ {
+ GstBufferPool *pool = gst_v4l2_buffer_pool_new (v4l2object, caps);
+ GST_OBJECT_LOCK (v4l2object->element);
+ v4l2object->pool = pool;
+ GST_OBJECT_UNLOCK (v4l2object->element);
+ if (!pool)
+ goto buffer_pool_new_failed;
+ }
GST_V4L2_SET_ACTIVE (v4l2object);
struct v4l2_selection sel = { 0 };
GST_V4L2_CHECK_OPEN (obj);
- GST_V4L2_CHECK_NOT_ACTIVE (obj);
sel.type = obj->type;
sel.target = V4L2_SEL_TGT_COMPOSE_DEFAULT;
const GstVideoFormatInfo *finfo = info->finfo;
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
- gint x_tiles, y_tiles, ws, hs, tile_height, padded_height;
-
- ws = GST_VIDEO_FORMAT_INFO_TILE_WS (finfo);
- hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
+ gint x_tiles, y_tiles, tile_height, padded_height;
- /* this only works for what we support, NV12 subsampled tiles */
- if (GST_VIDEO_FORMAT_INFO_HAS_SUBTILES (finfo) && plane == 1)
- hs -= 1;
-
- tile_height = 1 << hs;
+ tile_height = GST_VIDEO_FORMAT_INFO_TILE_HEIGHT (finfo, plane);
padded_height = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, plane,
info->height + align->padding_top + align->padding_bottom);
- padded_height = GST_ROUND_UP_N (padded_height, tile_height);
- x_tiles = stride >> ws;
- y_tiles = padded_height >> hs;
+ x_tiles = stride / GST_VIDEO_FORMAT_INFO_TILE_STRIDE (finfo, plane);
+ y_tiles = (padded_height + tile_height - 1) / tile_height;
info->stride[plane] = GST_VIDEO_TILE_MAKE_STRIDE (x_tiles, y_tiles);
} else {
info->stride[plane] = stride;
padded_width = stride / pstride;
} else {
/* pstride can be 0 for complex formats */
- GST_WARNING_OBJECT (v4l2object->element,
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
"format %s has a pstride of 0, cannot compute padded with",
gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (info)));
padded_width = stride;
padded_height = format->fmt.pix.height;
if (GST_VIDEO_FORMAT_INFO_IS_TILED (finfo)) {
- guint hs, tile_height;
-
- hs = GST_VIDEO_FORMAT_INFO_TILE_HS (finfo);
- tile_height = 1 << hs;
-
- padded_height = GST_ROUND_UP_N (padded_height, tile_height);
+ guint tile_height;
+ tile_height = GST_VIDEO_FORMAT_INFO_TILE_HEIGHT (finfo, 0);
+ padded_height = (padded_height + tile_height - 1) / tile_height;
}
align->padding_bottom =
if ((align->padding_left + align->padding_top) > 0)
GST_WARNING_OBJECT (v4l2object->dbg_obj,
"Left and top padding is not permitted for tiled formats");
+ memset (v4l2object->plane_size, 0,
+ sizeof (v4l2object->plane_size[0] * GST_VIDEO_MAX_PLANES));
} else {
- for (i = 0; i < finfo->n_planes; i++) {
- gint vedge, hedge;
-
- /* FIXME we assume plane as component as this is true for all supported
- * format we support. */
-
- hedge = GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i, align->padding_left);
- vedge = GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, align->padding_top);
-
- info->offset[i] += (vedge * info->stride[i]) +
- (hedge * GST_VIDEO_INFO_COMP_PSTRIDE (info, i));
+ if (!gst_video_info_align_full (info, align, v4l2object->plane_size)) {
+ GST_WARNING_OBJECT (v4l2object->dbg_obj, "Failed to align video info");
}
}
}
}
+#ifndef TIZEN_FEATURE_V4L2_DISABLE_COLORIMETRY
static gboolean
gst_v4l2_video_colorimetry_matches (const GstVideoColorimetry * cinfo,
GstCaps * caps)
&& gst_video_colorimetry_is_equal (cinfo, &ci_jpeg))
return TRUE;
+ /* bypass check the below transfer types, because those types are cast to
+ * V4L2_XFER_FUNC_NONE type when try format or set format and V4L2_XFER_FUNC_NONE
+ * type is cast to GST_VIDEO_TRANSFER_GAMMA10 type in gst_v4l2_object_get_colorspace */
+ if ((info.colorimetry.transfer == GST_VIDEO_TRANSFER_GAMMA18) ||
+ (info.colorimetry.transfer == GST_VIDEO_TRANSFER_GAMMA20) ||
+ (info.colorimetry.transfer == GST_VIDEO_TRANSFER_GAMMA22) ||
+ (info.colorimetry.transfer == GST_VIDEO_TRANSFER_GAMMA28)) {
+ info.colorimetry.transfer = GST_VIDEO_TRANSFER_GAMMA10;
+ if (gst_video_colorimetry_is_equal (&info.colorimetry, cinfo))
+ return TRUE;
+ }
+
return FALSE;
}
+#endif
static const gchar *
field_to_str (enum v4l2_field f)
pixelformat = fmtdesc->pixelformat;
width = GST_VIDEO_INFO_WIDTH (&info);
height = GST_VIDEO_INFO_FIELD_HEIGHT (&info);
+ /* if caps has no width and height info, use default value */
+ if (V4L2_TYPE_IS_OUTPUT (v4l2object->type) && width == 0 && height == 0) {
+ width = GST_V4L2_DEFAULT_WIDTH;
+ height = GST_V4L2_DEFAULT_HEIGHT;
+ }
fps_n = GST_VIDEO_INFO_FPS_N (&info);
fps_d = GST_VIDEO_INFO_FPS_D (&info);
field = get_v4l2_field_for_info (&info);
if (field != V4L2_FIELD_NONE)
- GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "interlaced video");
else
- GST_DEBUG_OBJECT (v4l2object->element, "progressive video");
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "progressive video");
/* We first pick the main colorspace from the primaries */
switch (info.colorimetry.primaries) {
gint stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
- stride = GST_VIDEO_TILE_X_TILES (stride) <<
- GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
+ stride = GST_VIDEO_TILE_X_TILES (stride) *
+ GST_VIDEO_FORMAT_INFO_TILE_STRIDE (info.finfo, i);
format.fmt.pix_mp.plane_fmt[i].bytesperline = stride;
}
format.fmt.pix.field = field;
if (GST_VIDEO_FORMAT_INFO_IS_TILED (info.finfo))
- stride = GST_VIDEO_TILE_X_TILES (stride) <<
- GST_VIDEO_FORMAT_INFO_TILE_WS (info.finfo);
+ stride = GST_VIDEO_TILE_X_TILES (stride) *
+ GST_VIDEO_FORMAT_INFO_TILE_STRIDE (info.finfo, i);
/* try to ask our preferred stride */
format.fmt.pix.bytesperline = stride;
gst_structure_remove_field (s, "interlace-mode");
}
+#ifndef TIZEN_FEATURE_V4L2_DISABLE_COLORIMETRY
if (gst_v4l2_object_get_colorspace (v4l2object, &format, &info.colorimetry)) {
if (gst_structure_has_field (s, "colorimetry")) {
if (!gst_v4l2_video_colorimetry_matches (&info.colorimetry, caps))
goto invalid_colorimetry;
}
} else {
+#endif
/* The driver (or libv4l2) is miss-behaving, just ignore colorimetry from
* the TRY_FMT */
disable_colorimetry = TRUE;
if (gst_structure_has_field (s, "colorimetry"))
gst_structure_remove_field (s, "colorimetry");
+#ifndef TIZEN_FEATURE_V4L2_DISABLE_COLORIMETRY
}
+#endif
/* In case we have skipped the try_fmt probes, we'll need to set the
* interlace-mode and colorimetry back into the caps. */
wanted_field == V4L2_FIELD_NONE ? "progressive" : "interleaved"));
return FALSE;
}
+#ifndef TIZEN_FEATURE_V4L2_DISABLE_COLORIMETRY
invalid_colorimetry:
{
gchar *wanted_colorimetry;
g_free (wanted_colorimetry);
return FALSE;
}
+#endif
get_parm_failed:
{
/* it's possible that this call is not supported */
gst_video_info_init (info);
gst_video_alignment_reset (&align);
+ v4l2object->transfer = GST_VIDEO_TRANSFER_UNKNOWN;
memset (&fmt, 0x00, sizeof (struct v4l2_format));
fmt.type = v4l2object->type;
goto unsupported_field;
}
- gst_video_info_set_interlaced_format (info, format, interlace_mode, width,
- height);
+ if (!gst_video_info_set_interlaced_format (info, format, interlace_mode,
+ width, height))
+ goto invalid_dimensions;
gst_v4l2_object_get_colorspace (v4l2object, &fmt, &info->colorimetry);
gst_v4l2_object_get_streamparm (v4l2object, info);
struct v4l2_crop crop = { 0 };
GST_V4L2_CHECK_OPEN (obj);
- GST_V4L2_CHECK_NOT_ACTIVE (obj);
sel.type = obj->type;
sel.target = V4L2_SEL_TGT_CROP;
GstStructure *config;
GstCaps *oldcaps;
gboolean ret;
+ GstBufferPool *pool = gst_v4l2_object_get_buffer_pool (v4l2object);
- if (!v4l2object->pool)
+ if (!pool)
return FALSE;
- config = gst_buffer_pool_get_config (v4l2object->pool);
+ config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
ret = oldcaps && gst_caps_is_equal (caps, oldcaps);
gst_structure_free (config);
+ gst_object_unref (pool);
return ret;
}
GstStructure *config;
GstCaps *oldcaps;
gboolean ret;
+ GstBufferPool *pool = gst_v4l2_object_get_buffer_pool (v4l2object);
- if (!v4l2object->pool)
+ if (!pool)
return FALSE;
- config = gst_buffer_pool_get_config (v4l2object->pool);
+ config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
ret = oldcaps && gst_caps_is_subset (oldcaps, caps);
gst_structure_free (config);
+ gst_object_unref (pool);
return ret;
}
{
GstStructure *config;
GstCaps *oldcaps;
+ GstBufferPool *pool = gst_v4l2_object_get_buffer_pool (v4l2object);
- if (!v4l2object->pool)
+ if (!pool)
return NULL;
- config = gst_buffer_pool_get_config (v4l2object->pool);
+ config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_get_params (config, &oldcaps, NULL, NULL, NULL);
if (oldcaps)
gst_structure_free (config);
+ gst_object_unref (pool);
return oldcaps;
}
gst_v4l2_object_unlock (GstV4l2Object * v4l2object)
{
gboolean ret = TRUE;
+ GstBufferPool *pool = gst_v4l2_object_get_buffer_pool (v4l2object);
GST_LOG_OBJECT (v4l2object->dbg_obj, "start flushing");
- if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
- gst_buffer_pool_set_flushing (v4l2object->pool, TRUE);
+ gst_poll_set_flushing (v4l2object->poll, TRUE);
+
+ if (!pool)
+ return ret;
+
+ if (gst_buffer_pool_is_active (pool))
+ gst_buffer_pool_set_flushing (pool, TRUE);
+ gst_object_unref (pool);
return ret;
}
gst_v4l2_object_unlock_stop (GstV4l2Object * v4l2object)
{
gboolean ret = TRUE;
+ GstBufferPool *pool = gst_v4l2_object_get_buffer_pool (v4l2object);
GST_LOG_OBJECT (v4l2object->dbg_obj, "stop flushing");
- if (v4l2object->pool && gst_buffer_pool_is_active (v4l2object->pool))
- gst_buffer_pool_set_flushing (v4l2object->pool, FALSE);
+ gst_poll_set_flushing (v4l2object->poll, FALSE);
+
+ if (!pool)
+ return ret;
+
+ if (gst_buffer_pool_is_active (pool))
+ gst_buffer_pool_set_flushing (pool, FALSE);
+ gst_object_unref (pool);
return ret;
}
gboolean
gst_v4l2_object_stop (GstV4l2Object * v4l2object)
{
+ GstBufferPool *pool;
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stopping");
if (!GST_V4L2_IS_OPEN (v4l2object))
if (!GST_V4L2_IS_ACTIVE (v4l2object))
goto done;
- if (v4l2object->pool) {
- if (!gst_v4l2_buffer_pool_orphan (&v4l2object->pool)) {
+ gst_poll_set_flushing (v4l2object->poll, TRUE);
+
+ pool = gst_v4l2_object_get_buffer_pool (v4l2object);
+ if (pool) {
+ if (!gst_v4l2_buffer_pool_orphan (v4l2object)) {
GST_DEBUG_OBJECT (v4l2object->dbg_obj, "deactivating pool");
- gst_buffer_pool_set_active (v4l2object->pool, FALSE);
- gst_object_unref (v4l2object->pool);
+ gst_buffer_pool_set_active (pool, FALSE);
+
+ {
+ GstBufferPool *old_pool;
+ GST_OBJECT_LOCK (v4l2object->element);
+ old_pool = v4l2object->pool;
+ v4l2object->pool = NULL;
+ GST_OBJECT_UNLOCK (v4l2object->element);
+ if (old_pool)
+ gst_object_unref (old_pool);
+ }
}
- v4l2object->pool = NULL;
+ gst_object_unref (pool);
}
GST_V4L2_SET_INACTIVE (v4l2object);
GstCaps *ret;
GSList *walk;
GSList *formats;
+ guint32 fourcc = 0;
+
+ if (v4l2object->fmtdesc)
+ fourcc = GST_V4L2_PIXELFORMAT (v4l2object);
+ gst_v4l2_object_clear_format_list (v4l2object);
formats = gst_v4l2_object_get_format_list (v4l2object);
+ /* Recover the fmtdesc, it may no longer exist, in which case it will be set
+ * to null */
+ if (fourcc)
+ v4l2object->fmtdesc =
+ gst_v4l2_object_get_format_from_fourcc (v4l2object, fourcc);
+
ret = gst_caps_new_empty ();
if (v4l2object->keep_aspect && !v4l2object->par) {
gst_caps_get_size (ret) - 1));
}
+#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
+ if (format->pixelformat == V4L2_PIX_FMT_NV12 ||
+ format->pixelformat == V4L2_PIX_FMT_YUV420) {
+ GstStructure *alt_s = gst_structure_copy (template);
+
+ if (format->pixelformat == V4L2_PIX_FMT_NV12)
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "SN12", NULL);
+ else
+ gst_structure_set (alt_s, "format", G_TYPE_STRING, "S420", NULL);
+
+ tmp = gst_v4l2_object_probe_caps_for_format (v4l2object,
+ format->pixelformat, alt_s);
+
+ if (tmp)
+ gst_caps_append (ret, tmp);
+
+ gst_structure_free (alt_s);
+ }
+#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
gst_structure_free (template);
}
gint plane_stride = stride[i];
if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
- plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) <<
- GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+ plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) *
+ GST_VIDEO_FORMAT_INFO_TILE_STRIDE (obj->info.finfo, i);
format.fmt.pix_mp.plane_fmt[i].bytesperline = plane_stride;
format.fmt.pix_mp.height = padded_height;
GST_DEBUG_OBJECT (obj->dbg_obj, "Wanted stride: %i", plane_stride);
if (GST_VIDEO_FORMAT_INFO_IS_TILED (obj->info.finfo))
- plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) <<
- GST_VIDEO_FORMAT_INFO_TILE_WS (obj->info.finfo);
+ plane_stride = GST_VIDEO_TILE_X_TILES (plane_stride) *
+ GST_VIDEO_FORMAT_INFO_TILE_STRIDE (obj->info.finfo, 0);
format.fmt.pix.bytesperline = plane_stride;
format.fmt.pix.height = padded_height;
gst_v4l2_object_decide_allocation (GstV4l2Object * obj, GstQuery * query)
{
GstCaps *caps;
- GstBufferPool *pool = NULL, *other_pool = NULL;
+ GstBufferPool *pool = NULL, *other_pool = NULL, *obj_pool = NULL;
GstStructure *config;
guint size, min, max, own_min = 0;
gboolean update;
GstAllocationParams params = { 0 };
guint video_idx;
+#ifdef TIZEN_FEATURE_V4L2_TBM_SUPPORT
+ GST_INFO_OBJECT (obj->dbg_obj, "decide allocation - %s",
+ V4L2_TYPE_IS_OUTPUT (obj->type) ? "output" : "capture");
+#else /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
GST_DEBUG_OBJECT (obj->dbg_obj, "decide allocation");
+#endif /* TIZEN_FEATURE_V4L2_TBM_SUPPORT */
g_return_val_if_fail (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE ||
obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, FALSE);
gst_query_parse_allocation (query, &caps, NULL);
- if (obj->pool == NULL) {
+ obj_pool = gst_v4l2_object_get_buffer_pool (obj);
+ if (obj_pool == NULL) {
if (!gst_v4l2_object_setup_pool (obj, caps))
goto pool_failed;
+ obj_pool = gst_v4l2_object_get_buffer_pool (obj);
+ if (obj_pool == NULL)
+ goto pool_failed;
}
if (gst_query_get_n_allocation_params (query) > 0)
/* no downstream pool, use our own then */
GST_DEBUG_OBJECT (obj->dbg_obj,
"read/write mode: no downstream pool, using our own");
- pool = gst_object_ref (obj->pool);
+ pool = gst_object_ref (obj_pool);
size = obj->info.size;
pushing_from_our_pool = TRUE;
}
* our own, so it can serve itself */
if (pool == NULL)
goto no_downstream_pool;
- gst_v4l2_buffer_pool_set_other_pool (GST_V4L2_BUFFER_POOL (obj->pool),
+ gst_v4l2_buffer_pool_set_other_pool (GST_V4L2_BUFFER_POOL (obj_pool),
pool);
other_pool = pool;
gst_object_unref (pool);
- pool = gst_object_ref (obj->pool);
+ pool = gst_object_ref (obj_pool);
size = obj->info.size;
break;
if (can_share_own_pool) {
if (pool)
gst_object_unref (pool);
- pool = gst_object_ref (obj->pool);
+ pool = gst_object_ref (obj_pool);
size = obj->info.size;
GST_DEBUG_OBJECT (obj->dbg_obj,
"streaming mode: using our own pool %" GST_PTR_FORMAT, pool);
min = MAX (min, GST_V4L2_MIN_BUFFERS (obj));
/* To import we need the other pool to hold at least own_min */
- if (obj->pool == pool)
+ if (obj_pool == pool)
min += own_min;
}
max = MAX (min, max);
/* First step, configure our own pool */
- config = gst_buffer_pool_get_config (obj->pool);
+ config = gst_buffer_pool_get_config (obj_pool);
if (obj->need_video_meta || has_video_meta) {
GST_DEBUG_OBJECT (obj->dbg_obj, "activate Video Meta");
GST_PTR_FORMAT, config);
/* Our pool often need to adjust the value */
- if (!gst_buffer_pool_set_config (obj->pool, config)) {
- config = gst_buffer_pool_get_config (obj->pool);
+ if (!gst_buffer_pool_set_config (obj_pool, config)) {
+ config = gst_buffer_pool_get_config (obj_pool);
GST_DEBUG_OBJECT (obj->dbg_obj, "own pool config changed to %"
GST_PTR_FORMAT, config);
/* our pool will adjust the maximum buffer, which we are fine with */
- if (!gst_buffer_pool_set_config (obj->pool, config))
+ if (!gst_buffer_pool_set_config (obj_pool, config))
goto config_failed;
}
/* Now configure the other pool if different */
- if (obj->pool != pool)
+ if (obj_pool != pool)
other_pool = pool;
if (other_pool) {
if (pool)
gst_object_unref (pool);
+ if (obj_pool)
+ gst_object_unref (obj_pool);
+
return TRUE;
pool_failed:
(_("Video device did not suggest any buffer size.")), (NULL));
goto cleanup;
}
+no_downstream_pool:
+ {
+ GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
+ (_("No downstream pool to import from.")),
+ ("When importing DMABUF or USERPTR, we need a pool to import from"));
+ goto cleanup;
+ }
cleanup:
{
if (allocator)
if (pool)
gst_object_unref (pool);
- return FALSE;
- }
-no_downstream_pool:
- {
- GST_ELEMENT_ERROR (obj->element, RESOURCE, SETTINGS,
- (_("No downstream pool to import from.")),
- ("When importing DMABUF or USERPTR, we need a pool to import from"));
+
+ if (obj_pool)
+ gst_object_unref (obj_pool);
return FALSE;
}
}
switch (obj->mode) {
case GST_V4L2_IO_MMAP:
case GST_V4L2_IO_DMABUF:
- if (need_pool && obj->pool) {
- if (!gst_buffer_pool_is_active (obj->pool))
- pool = gst_object_ref (obj->pool);
+ if (need_pool) {
+ GstBufferPool *obj_pool = gst_v4l2_object_get_buffer_pool (obj);
+ if (obj_pool) {
+ if (!gst_buffer_pool_is_active (obj_pool))
+ pool = gst_object_ref (obj_pool);
+
+ gst_object_unref (obj_pool);
+ }
}
break;
default:
/* for the remaining, only the kernel driver can tell */
return TRUE;
}
+
+/**
+ * gst_v4l2_object_get_buffer_pool:
+ * @src: a #GstV4l2Object
+ *
+ * Returns: (nullable) (transfer full): the instance of the #GstBufferPool used
+ * by the v4l2object; unref it after usage.
+ */
+GstBufferPool *
+gst_v4l2_object_get_buffer_pool (GstV4l2Object * v4l2object)
+{
+ GstBufferPool *ret = NULL;
+
+ g_return_val_if_fail (v4l2object != NULL, NULL);
+
+ GST_OBJECT_LOCK (v4l2object->element);
+ if (v4l2object->pool)
+ ret = gst_object_ref (v4l2object->pool);
+ GST_OBJECT_UNLOCK (v4l2object->element);
+
+ return ret;
+}
+
+/**
+ * gst_v4l2_object_poll:
+ * @v4l2object: a #GstV4l2Object
+ * @timeout: timeout of type #GstClockTime
+ *
+ * Poll the video file descriptor for read when this is a capture, write when
+ * this is an output. It will also watch for errors and source change events.
+ * If a source change event is received, %GST_V4L2_FLOW_RESOLUTION_CHANGE will
+ * be returned. If the poll was interrupted, %GST_FLOW_FLUSHING is returned.
+ * If there was no read or write indicator, %GST_V4L2_FLOW_LAST_BUFFER is
+ * returned. It may also return %GST_FLOW_ERROR if some unexpected error
+ * occured.
+ *
+ * Returns: GST_FLOW_OK if buffers are ready to be queued or dequeued.
+ */
+GstFlowReturn
+gst_v4l2_object_poll (GstV4l2Object * v4l2object, GstClockTime timeout)
+{
+ gint ret;
+
+ if (!v4l2object->can_poll_device) {
+ if (timeout != 0)
+ goto done;
+ else
+ goto no_buffers;
+ }
+
+ GST_LOG_OBJECT (v4l2object->dbg_obj, "polling device");
+
+again:
+ ret = gst_poll_wait (v4l2object->poll, timeout);
+ if (G_UNLIKELY (ret < 0)) {
+ switch (errno) {
+ case EBUSY:
+ goto stopped;
+ case EAGAIN:
+ case EINTR:
+ goto again;
+ case ENXIO:
+ GST_WARNING_OBJECT (v4l2object->dbg_obj,
+ "v4l2 device doesn't support polling. Disabling"
+ " using libv4l2 in this case may cause deadlocks");
+ v4l2object->can_poll_device = FALSE;
+ goto done;
+ default:
+ goto select_error;
+ }
+ }
+
+ if (gst_poll_fd_has_error (v4l2object->poll, &v4l2object->pollfd))
+ goto select_error;
+
+ /* PRI is used to signal that events are available */
+ if (gst_poll_fd_has_pri (v4l2object->poll, &v4l2object->pollfd)) {
+ struct v4l2_event event = { 0, };
+
+ if (!gst_v4l2_dequeue_event (v4l2object, &event))
+ goto dqevent_failed;
+
+ if (event.type != V4L2_EVENT_SOURCE_CHANGE) {
+ GST_INFO_OBJECT (v4l2object->dbg_obj,
+ "Received unhandled event, ignoring.");
+ goto again;
+ }
+
+ if ((event.u.src_change.changes & V4L2_EVENT_SRC_CH_RESOLUTION) == 0) {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj,
+ "Received non-resolution source-change, ignoring.");
+ goto again;
+ }
+
+ if (v4l2object->formats)
+ gst_v4l2_object_clear_format_list (v4l2object);
+
+ return GST_V4L2_FLOW_RESOLUTION_CHANGE;
+ }
+
+ if (ret == 0)
+ goto no_buffers;
+
+done:
+ return GST_FLOW_OK;
+
+ /* ERRORS */
+stopped:
+ {
+ GST_DEBUG_OBJECT (v4l2object->dbg_obj, "stop called");
+ return GST_FLOW_FLUSHING;
+ }
+select_error:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ, (NULL),
+ ("poll error %d: %s (%d)", ret, g_strerror (errno), errno));
+ return GST_FLOW_ERROR;
+ }
+no_buffers:
+ {
+ return GST_V4L2_FLOW_LAST_BUFFER;
+ }
+dqevent_failed:
+ {
+ GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, READ, (NULL),
+ ("dqevent error: %s (%d)", g_strerror (errno), errno));
+ return GST_FLOW_ERROR;
+ }
+}
+
+/**
+ * gst_v4l2_object_subscribe_event:
+ * @v4l2object: a #GstV4l2Object
+ * @event: the event ID
+ *
+ * Subscribe to an event, and enable polling for these. Note that only
+ * %V4L2_EVENT_SOURCE_CHANGE is currently supported by the poll helper.
+ *
+ * Returns: %TRUE if the driver supports this event
+ */
+gboolean
+gst_v4l2_object_subscribe_event (GstV4l2Object * v4l2object, guint32 event)
+{
+ guint32 id = 0;
+
+ g_return_val_if_fail (v4l2object != NULL, FALSE);
+ g_return_val_if_fail (GST_V4L2_IS_OPEN (v4l2object), FALSE);
+
+ v4l2object->get_in_out_func (v4l2object, &id);
+
+ if (gst_v4l2_subscribe_event (v4l2object, event, id)) {
+ gst_poll_fd_ctl_pri (v4l2object->poll, &v4l2object->pollfd, TRUE);
+ return TRUE;
+ }
+
+ return FALSE;
+}