2 * Copyright (C) 2014 Collabora Ltd.
3 * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 # define _GNU_SOURCE /* O_CLOEXEC */
28 #include "ext/videodev2.h"
29 #include "gstv4l2allocator.h"
30 #include "v4l2_calls.h"
32 #include <gst/allocators/gstdmabuf.h>
37 #include <sys/types.h>
40 #define GST_V4L2_MEMORY_TYPE "V4l2Memory"
42 #define gst_v4l2_allocator_parent_class parent_class
43 G_DEFINE_TYPE (GstV4l2Allocator, gst_v4l2_allocator, GST_TYPE_ALLOCATOR);
45 GST_DEBUG_CATEGORY_STATIC (v4l2allocator_debug);
46 #define GST_CAT_DEFAULT v4l2allocator_debug
48 #define UNSET_QUEUED(buffer) \
49 ((buffer).flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
51 #define SET_QUEUED(buffer) ((buffer).flags |= V4L2_BUF_FLAG_QUEUED)
53 #define IS_QUEUED(buffer) \
54 ((buffer).flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
62 static guint gst_v4l2_allocator_signals[LAST_SIGNAL] = { 0 };
64 static void gst_v4l2_allocator_release (GstV4l2Allocator * allocator,
68 memory_type_to_str (guint32 memory)
71 case V4L2_MEMORY_MMAP:
73 case V4L2_MEMORY_USERPTR:
75 case V4L2_MEMORY_DMABUF:
82 /*************************************/
83 /* GstV4lMemory implementation */
84 /*************************************/
87 _v4l2mem_map (GstV4l2Memory * mem, gsize maxsize, GstMapFlags flags)
91 switch (mem->group->buffer.memory) {
92 case V4L2_MEMORY_MMAP:
93 case V4L2_MEMORY_USERPTR:
96 case V4L2_MEMORY_DMABUF:
97 /* v4l2 dmabuf memory are not shared with downstream */
98 g_assert_not_reached ();
101 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
108 _v4l2mem_unmap (GstV4l2Memory * mem)
110 gboolean ret = FALSE;
112 switch (mem->group->buffer.memory) {
113 case V4L2_MEMORY_MMAP:
114 case V4L2_MEMORY_USERPTR:
117 case V4L2_MEMORY_DMABUF:
118 /* v4l2 dmabuf memory are not share with downstream */
119 g_assert_not_reached ();
122 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
129 _v4l2mem_dispose (GstV4l2Memory * mem)
131 GstV4l2Allocator *allocator = (GstV4l2Allocator *) mem->mem.allocator;
132 GstV4l2MemoryGroup *group = mem->group;
135 if (group->mem[mem->plane]) {
136 /* We may have a dmabuf, replace it with returned original memory */
137 group->mem[mem->plane] = gst_memory_ref ((GstMemory *) mem);
138 gst_v4l2_allocator_release (allocator, mem);
141 gst_object_ref (allocator);
148 static inline GstV4l2Memory *
149 _v4l2mem_new (GstMemoryFlags flags, GstAllocator * allocator,
150 GstMemory * parent, gsize maxsize, gsize align, gsize offset, gsize size,
151 gint plane, gpointer data, int dmafd, GstV4l2MemoryGroup * group)
155 mem = g_slice_new0 (GstV4l2Memory);
156 gst_memory_init (GST_MEMORY_CAST (mem),
157 flags, allocator, parent, maxsize, align, offset, size);
160 mem->mem.mini_object.dispose =
161 (GstMiniObjectDisposeFunction) _v4l2mem_dispose;
171 static GstV4l2Memory *
172 _v4l2mem_share (GstV4l2Memory * mem, gssize offset, gsize size)
177 /* find the real parent */
178 if ((parent = mem->mem.parent) == NULL)
179 parent = (GstMemory *) mem;
182 size = mem->mem.size - offset;
184 /* the shared memory is always readonly */
185 sub = _v4l2mem_new (GST_MINI_OBJECT_FLAGS (parent) |
186 GST_MINI_OBJECT_FLAG_LOCK_READONLY, mem->mem.allocator, parent,
187 mem->mem.maxsize, mem->mem.align, offset, size, mem->plane, mem->data,
194 _v4l2mem_is_span (GstV4l2Memory * mem1, GstV4l2Memory * mem2, gsize * offset)
197 *offset = mem1->mem.offset - mem1->mem.parent->offset;
199 /* and memory is contiguous */
200 return mem1->mem.offset + mem1->mem.size == mem2->mem.offset;
204 gst_is_v4l2_memory (GstMemory * mem)
206 return gst_memory_is_type (mem, GST_V4L2_MEMORY_TYPE);
210 gst_v4l2_memory_quark (void)
212 static GQuark quark = 0;
215 quark = g_quark_from_string ("GstV4l2Memory");
221 /*************************************/
222 /* GstV4l2MemoryGroup implementation */
223 /*************************************/
226 gst_v4l2_memory_group_free (GstV4l2MemoryGroup * group)
230 for (i = 0; i < group->n_mem; i++) {
231 GstMemory *mem = group->mem[i];
232 group->mem[i] = NULL;
234 gst_memory_unref (mem);
237 g_slice_free (GstV4l2MemoryGroup, group);
240 static GstV4l2MemoryGroup *
241 gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index)
243 gint video_fd = allocator->video_fd;
244 guint32 memory = allocator->memory;
245 struct v4l2_format *format = &allocator->format;
246 GstV4l2MemoryGroup *group;
247 gsize img_size, buf_size;
249 group = g_slice_new0 (GstV4l2MemoryGroup);
251 group->buffer.type = format->type;
252 group->buffer.index = index;
253 group->buffer.memory = memory;
255 if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
256 group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes;
257 group->buffer.m.planes = group->planes;
262 if (v4l2_ioctl (video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0)
263 goto querybuf_failed;
265 if (group->buffer.index != index) {
266 GST_ERROR_OBJECT (allocator, "Buffer index returned by VIDIOC_QUERYBUF "
267 "didn't match, this indicate the presence of a bug in your driver or "
269 g_slice_free (GstV4l2MemoryGroup, group);
273 /* Check that provided size matches the format we have negotiation. Failing
274 * there usually means a driver of libv4l bug. */
275 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
278 for (i = 0; i < group->n_mem; i++) {
279 img_size = allocator->format.fmt.pix_mp.plane_fmt[i].sizeimage;
280 buf_size = group->planes[i].length;
281 if (buf_size < img_size)
282 goto buffer_too_short;
285 img_size = allocator->format.fmt.pix.sizeimage;
286 buf_size = group->buffer.length;
287 if (buf_size < img_size)
288 goto buffer_too_short;
291 /* We save non planar buffer information into the multi-planar plane array
292 * to avoid duplicating the code later */
293 if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
294 group->planes[0].bytesused = group->buffer.bytesused;
295 group->planes[0].length = group->buffer.length;
296 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
297 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
300 GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory));
301 GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index);
302 GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type);
303 GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags);
304 GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field);
305 GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory);
306 GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);
308 #ifndef GST_DISABLE_GST_DEBUG
309 if (memory == V4L2_MEMORY_MMAP) {
311 for (i = 0; i < group->n_mem; i++) {
312 GST_LOG_OBJECT (allocator, " [%u] bytesused: %u, length: %u", i,
313 group->planes[i].bytesused, group->planes[i].length);
314 GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i,
315 group->planes[i].m.mem_offset);
324 GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno));
329 GST_ERROR ("buffer size %" G_GSIZE_FORMAT
330 " is smaller then negotiated size %" G_GSIZE_FORMAT
331 ", this is usually the result of a bug in the v4l2 driver or libv4l.",
336 gst_v4l2_memory_group_free (group);
341 /*************************************/
342 /* GstV4lAllocator implementation */
343 /*************************************/
346 gst_v4l2_allocator_release (GstV4l2Allocator * allocator, GstV4l2Memory * mem)
348 GstV4l2MemoryGroup *group = mem->group;
350 GST_LOG_OBJECT (allocator, "plane %i of buffer %u released",
351 mem->plane, group->buffer.index);
353 switch (allocator->memory) {
354 case V4L2_MEMORY_DMABUF:
358 case V4L2_MEMORY_USERPTR:
365 /* When all memory are back, put the group back in the free queue */
366 if (g_atomic_int_dec_and_test (&group->mems_allocated)) {
367 GST_LOG_OBJECT (allocator, "buffer %u released", group->buffer.index);
368 gst_atomic_queue_push (allocator->free_queue, group);
369 g_signal_emit (allocator, gst_v4l2_allocator_signals[GROUP_RELEASED], 0);
372 /* Keep last, allocator may be freed after this call */
373 g_object_unref (allocator);
377 gst_v4l2_allocator_free (GstAllocator * gallocator, GstMemory * gmem)
379 GstV4l2Allocator *allocator = (GstV4l2Allocator *) gallocator;
380 GstV4l2Memory *mem = (GstV4l2Memory *) gmem;
381 GstV4l2MemoryGroup *group = mem->group;
383 /* Only free unparented memory */
384 if (mem->mem.parent == NULL) {
385 GST_LOG_OBJECT (allocator, "freeing plane %i of buffer %u",
386 mem->plane, group->buffer.index);
388 if (allocator->memory == V4L2_MEMORY_MMAP) {
390 v4l2_munmap (mem->data, group->planes[mem->plane].length);
393 /* This apply for both mmap with expbuf, and dmabuf imported memory */
398 g_slice_free (GstV4l2Memory, mem);
402 gst_v4l2_allocator_dispose (GObject * obj)
404 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
407 GST_LOG_OBJECT (obj, "called");
409 for (i = 0; i < allocator->count; i++) {
410 GstV4l2MemoryGroup *group = allocator->groups[i];
411 allocator->groups[i] = NULL;
413 gst_v4l2_memory_group_free (group);
416 G_OBJECT_CLASS (parent_class)->dispose (obj);
420 gst_v4l2_allocator_finalize (GObject * obj)
422 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
424 GST_LOG_OBJECT (obj, "called");
426 v4l2_close (allocator->video_fd);
427 gst_atomic_queue_unref (allocator->free_queue);
429 G_OBJECT_CLASS (parent_class)->finalize (obj);
433 gst_v4l2_allocator_class_init (GstV4l2AllocatorClass * klass)
435 GObjectClass *object_class;
436 GstAllocatorClass *allocator_class;
438 allocator_class = (GstAllocatorClass *) klass;
439 object_class = (GObjectClass *) klass;
441 allocator_class->alloc = NULL;
442 allocator_class->free = gst_v4l2_allocator_free;
444 object_class->dispose = gst_v4l2_allocator_dispose;
445 object_class->finalize = gst_v4l2_allocator_finalize;
447 gst_v4l2_allocator_signals[GROUP_RELEASED] = g_signal_new ("group-released",
448 G_TYPE_FROM_CLASS (object_class), G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
451 GST_DEBUG_CATEGORY_INIT (v4l2allocator_debug, "v4l2allocator", 0,
456 gst_v4l2_allocator_init (GstV4l2Allocator * allocator)
458 GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
460 alloc->mem_type = GST_V4L2_MEMORY_TYPE;
461 alloc->mem_map = (GstMemoryMapFunction) _v4l2mem_map;
462 alloc->mem_unmap = (GstMemoryUnmapFunction) _v4l2mem_unmap;
463 alloc->mem_share = (GstMemoryShareFunction) _v4l2mem_share;
464 alloc->mem_is_span = (GstMemoryIsSpanFunction) _v4l2mem_is_span;
465 /* Use the default, fallback copy function */
467 allocator->free_queue = gst_atomic_queue_new (VIDEO_MAX_FRAME);
469 GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
472 #define GST_V4L2_ALLOCATOR_PROBE(obj,type) \
473 gst_v4l2_allocator_probe ((obj), V4L2_MEMORY_ ## type, \
474 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS, \
475 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS)
477 gst_v4l2_allocator_probe (GstV4l2Allocator * allocator, guint32 memory,
478 guint32 breq_flag, guint32 bcreate_flag)
480 struct v4l2_requestbuffers breq = { 0 };
483 breq.type = allocator->type;
485 breq.memory = memory;
487 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) == 0) {
488 struct v4l2_create_buffers bcreate = { 0 };
492 bcreate.memory = memory;
493 bcreate.format = allocator->format;
495 if ((v4l2_ioctl (allocator->video_fd, VIDIOC_CREATE_BUFS, &bcreate) == 0))
496 flags |= bcreate_flag;
502 static GstV4l2MemoryGroup *
503 gst_v4l2_allocator_create_buf (GstV4l2Allocator * allocator)
505 struct v4l2_create_buffers bcreate = { 0 };
506 GstV4l2MemoryGroup *group = NULL;
508 GST_OBJECT_LOCK (allocator);
510 if (!g_atomic_int_get (&allocator->active))
513 bcreate.memory = allocator->memory;
514 bcreate.format = allocator->format;
517 if (!allocator->can_allocate)
520 if (v4l2_ioctl (allocator->video_fd, VIDIOC_CREATE_BUFS, &bcreate) < 0)
521 goto create_bufs_failed;
523 if (allocator->groups[bcreate.index] != NULL)
524 goto create_bufs_bug;
526 group = gst_v4l2_memory_group_new (allocator, bcreate.index);
529 allocator->groups[bcreate.index] = group;
534 GST_OBJECT_UNLOCK (allocator);
539 GST_WARNING_OBJECT (allocator, "error creating a new buffer: %s",
545 GST_ERROR_OBJECT (allocator, "created buffer has already used buffer "
546 "index %i, this means there is an bug in your driver or libv4l2",
552 static GstV4l2MemoryGroup *
553 gst_v4l2_allocator_alloc (GstV4l2Allocator * allocator)
555 GstV4l2MemoryGroup *group;
557 if (!g_atomic_int_get (&allocator->active))
560 group = gst_atomic_queue_pop (allocator->free_queue);
563 if (allocator->can_allocate) {
564 group = gst_v4l2_allocator_create_buf (allocator);
566 /* Don't hammer on CREATE_BUFS */
568 allocator->can_allocate = FALSE;
576 gst_v4l2_allocator_reset_size (GstV4l2Allocator * allocator,
577 GstV4l2MemoryGroup * group)
580 gboolean imported = FALSE;
582 switch (allocator->memory) {
583 case V4L2_MEMORY_USERPTR:
584 case V4L2_MEMORY_DMABUF:
589 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
592 for (i = 0; i < group->n_mem; i++) {
593 size = allocator->format.fmt.pix_mp.plane_fmt[i].sizeimage;
596 group->mem[i]->maxsize = size;
598 gst_memory_resize (group->mem[i], 0, size);
602 size = allocator->format.fmt.pix.sizeimage;
605 group->mem[0]->maxsize = size;
607 gst_memory_resize (group->mem[0], 0, size);
612 _cleanup_failed_alloc (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group)
614 if (group->mems_allocated > 0) {
616 /* If one or more mmap worked, we need to unref the memory, otherwise
617 * they will keep a ref on the allocator and leak it. This will put back
618 * the group into the free_queue */
619 for (i = 0; i < group->n_mem; i++)
620 gst_memory_unref (group->mem[i]);
622 /* Otherwise, group has to be on free queue for _stop() to work */
623 gst_atomic_queue_push (allocator->free_queue, group);
630 gst_v4l2_allocator_new (GstObject * parent, gint video_fd,
631 struct v4l2_format *format)
633 GstV4l2Allocator *allocator;
635 gchar *name, *parent_name;
637 parent_name = gst_object_get_name (parent);
638 name = g_strconcat (parent_name, ":allocator", NULL);
639 g_free (parent_name);
641 allocator = g_object_new (GST_TYPE_V4L2_ALLOCATOR, "name", name, NULL);
644 /* Save everything */
645 allocator->video_fd = v4l2_dup (video_fd);
646 allocator->type = format->type;
647 allocator->format = *format;
649 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, MMAP);
650 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, USERPTR);
651 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, DMABUF);
655 /* Drivers not ported from videobuf to videbuf2 don't allow freeing buffers
656 * using REQBUFS(0). This is a workaround to still support these drivers,
657 * which are known to have MMAP support. */
658 GST_WARNING_OBJECT (allocator, "Could not probe supported memory type, "
659 "assuming MMAP is supported, this is expected for older drivers not "
660 " yet ported to videobuf2 framework");
661 flags = GST_V4L2_ALLOCATOR_FLAG_MMAP_REQBUFS;
664 GST_OBJECT_FLAG_SET (allocator, flags);
670 gst_v4l2_allocator_start (GstV4l2Allocator * allocator, guint32 count,
673 struct v4l2_requestbuffers breq = { count, allocator->type, memory };
674 gboolean can_allocate;
677 g_return_val_if_fail (count != 0, 0);
679 GST_OBJECT_LOCK (allocator);
681 if (g_atomic_int_get (&allocator->active))
684 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0)
691 case V4L2_MEMORY_MMAP:
692 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, MMAP);
694 case V4L2_MEMORY_USERPTR:
695 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, USERPTR);
697 case V4L2_MEMORY_DMABUF:
698 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, DMABUF);
701 can_allocate = FALSE;
705 GST_DEBUG_OBJECT (allocator, "allocated %u %s buffers out of %u requested",
706 breq.count, memory_type_to_str (memory), count);
708 allocator->can_allocate = can_allocate;
709 allocator->count = breq.count;
710 allocator->memory = memory;
712 /* Create memory groups */
713 for (i = 0; i < allocator->count; i++) {
714 allocator->groups[i] = gst_v4l2_memory_group_new (allocator, i);
715 if (allocator->groups[i] == NULL)
718 gst_atomic_queue_push (allocator->free_queue, allocator->groups[i]);
721 g_atomic_int_set (&allocator->active, TRUE);
724 GST_OBJECT_UNLOCK (allocator);
729 GST_ERROR_OBJECT (allocator, "allocator already active");
734 GST_ERROR_OBJECT (allocator,
735 "error requesting %d buffers: %s", count, g_strerror (errno));
740 GST_ERROR_OBJECT (allocator, "Not enough memory to allocate buffers");
751 gst_v4l2_allocator_stop (GstV4l2Allocator * allocator)
753 struct v4l2_requestbuffers breq = { 0, allocator->type, allocator->memory };
755 GstV4l2Return ret = GST_V4L2_OK;
757 GST_DEBUG_OBJECT (allocator, "stop allocator");
759 GST_OBJECT_LOCK (allocator);
761 if (!g_atomic_int_get (&allocator->active))
764 if (gst_atomic_queue_length (allocator->free_queue) != allocator->count) {
765 GST_DEBUG_OBJECT (allocator, "allocator is still in use");
770 while (gst_atomic_queue_pop (allocator->free_queue)) {
774 for (i = 0; i < allocator->count; i++) {
775 GstV4l2MemoryGroup *group = allocator->groups[i];
776 allocator->groups[i] = NULL;
778 gst_v4l2_memory_group_free (group);
781 /* Not all drivers support rebufs(0), so warn only */
782 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0)
783 GST_WARNING_OBJECT (allocator,
784 "error releasing buffers buffers: %s", g_strerror (errno));
786 allocator->count = 0;
788 g_atomic_int_set (&allocator->active, FALSE);
791 GST_OBJECT_UNLOCK (allocator);
796 gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator)
798 GstV4l2MemoryGroup *group;
801 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
803 group = gst_v4l2_allocator_alloc (allocator);
808 for (i = 0; i < group->n_mem; i++) {
809 if (group->mem[i] == NULL) {
811 data = v4l2_mmap (NULL, group->planes[i].length, PROT_READ | PROT_WRITE,
812 MAP_SHARED, allocator->video_fd, group->planes[i].m.mem_offset);
814 if (data == MAP_FAILED)
817 GST_LOG_OBJECT (allocator,
818 "mmap buffer length %d, data offset %d, plane %d",
819 group->planes[i].length, group->planes[i].data_offset, i);
821 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
822 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
825 /* Take back the allocator reference */
826 gst_object_ref (allocator);
829 group->mems_allocated++;
832 /* Ensure group size. Unlike GST, v4l2 have size (bytesused) initially set
833 * to 0. As length might be bigger then the expected size exposed in the
834 * format, we simply set bytesused initially and reset it here for
836 gst_v4l2_allocator_reset_size (allocator, group);
842 GST_ERROR_OBJECT (allocator, "Failed to mmap buffer: %s",
844 _cleanup_failed_alloc (allocator, group);
850 gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
851 GstAllocator * dmabuf_allocator)
853 GstV4l2MemoryGroup *group;
856 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
858 group = gst_v4l2_allocator_alloc (allocator);
863 for (i = 0; i < group->n_mem; i++) {
868 if (group->mem[i] == NULL) {
869 struct v4l2_exportbuffer expbuf = { 0 };
871 expbuf.type = allocator->type;
872 expbuf.index = group->buffer.index;
874 expbuf.flags = O_CLOEXEC | O_RDWR;
876 if (v4l2_ioctl (allocator->video_fd, VIDIOC_EXPBUF, &expbuf) < 0)
879 GST_LOG_OBJECT (allocator, "exported DMABUF as fd %i plane %d",
882 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
883 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
884 NULL, expbuf.fd, group);
886 /* Take back the allocator reference */
887 gst_object_ref (allocator);
890 g_assert (gst_is_v4l2_memory (group->mem[i]));
891 mem = (GstV4l2Memory *) group->mem[i];
893 if ((dmafd = dup (mem->dmafd)) < 0)
896 dma_mem = gst_dmabuf_allocator_alloc (dmabuf_allocator, dmafd,
899 gst_mini_object_set_qdata (GST_MINI_OBJECT (dma_mem),
900 GST_V4L2_MEMORY_QUARK, mem, (GDestroyNotify) gst_memory_unref);
902 group->mem[i] = dma_mem;
903 group->mems_allocated++;
906 gst_v4l2_allocator_reset_size (allocator, group);
912 GST_ERROR_OBJECT (allocator, "Failed to export DMABUF: %s",
918 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
924 _cleanup_failed_alloc (allocator, group);
930 gst_v4l2_allocator_clear_dmabufin (GstV4l2Allocator * allocator,
931 GstV4l2MemoryGroup * group)
936 g_return_if_fail (allocator->memory == V4L2_MEMORY_DMABUF);
938 for (i = 0; i < group->n_mem; i++) {
940 mem = (GstV4l2Memory *) group->mem[i];
942 GST_LOG_OBJECT (allocator, "clearing DMABUF import, fd %i plane %d",
949 mem->mem.maxsize = 0;
954 /* Update v4l2 structure */
955 group->planes[i].length = 0;
956 group->planes[i].bytesused = 0;
957 group->planes[i].m.fd = -1;
958 group->planes[i].data_offset = 0;
961 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
962 group->buffer.bytesused = 0;
963 group->buffer.length = 0;
964 group->buffer.m.fd = -1;
969 gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator)
971 GstV4l2MemoryGroup *group;
974 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, NULL);
976 group = gst_v4l2_allocator_alloc (allocator);
981 GST_LOG_OBJECT (allocator, "allocating empty DMABUF import group");
983 for (i = 0; i < group->n_mem; i++) {
984 if (group->mem[i] == NULL) {
985 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
986 NULL, 0, 0, 0, 0, i, NULL, -1, group);
988 /* Take back the allocator reference */
989 gst_object_ref (allocator);
992 group->mems_allocated++;
995 gst_v4l2_allocator_clear_dmabufin (allocator, group);
1001 gst_v4l2_allocator_clear_userptr (GstV4l2Allocator * allocator,
1002 GstV4l2MemoryGroup * group)
1007 g_return_if_fail (allocator->memory == V4L2_MEMORY_USERPTR);
1009 for (i = 0; i < group->n_mem; i++) {
1010 mem = (GstV4l2Memory *) group->mem[i];
1012 GST_LOG_OBJECT (allocator, "clearing USERPTR %p plane %d size %"
1013 G_GSIZE_FORMAT, mem->data, i, mem->mem.size);
1015 mem->mem.maxsize = 0;
1019 group->planes[i].length = 0;
1020 group->planes[i].bytesused = 0;
1021 group->planes[i].m.userptr = 0;
1024 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1025 group->buffer.bytesused = 0;
1026 group->buffer.length = 0;
1027 group->buffer.m.userptr = 0;
1031 GstV4l2MemoryGroup *
1032 gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator)
1034 GstV4l2MemoryGroup *group;
1037 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, NULL);
1039 group = gst_v4l2_allocator_alloc (allocator);
1044 GST_LOG_OBJECT (allocator, "allocating empty USERPTR group");
1046 for (i = 0; i < group->n_mem; i++) {
1048 if (group->mem[i] == NULL) {
1049 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
1050 NULL, 0, 0, 0, 0, i, NULL, -1, group);
1052 /* Take back the allocator reference */
1053 gst_object_ref (allocator);
1056 group->mems_allocated++;
1059 gst_v4l2_allocator_clear_userptr (allocator, group);
1065 gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
1066 GstV4l2MemoryGroup * group, gint n_mem, GstMemory ** dma_mem)
1071 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, FALSE);
1073 if (group->n_mem != n_mem)
1074 goto n_mem_missmatch;
1076 for (i = 0; i < group->n_mem; i++) {
1078 gsize size, offset, maxsize;
1080 if (!gst_is_dmabuf_memory (dma_mem[i]))
1083 size = gst_memory_get_sizes (dma_mem[i], &offset, &maxsize);
1085 if ((dmafd = dup (gst_dmabuf_memory_get_fd (dma_mem[i]))) < 0)
1088 GST_LOG_OBJECT (allocator, "imported DMABUF as fd %i plane %d", dmafd, i);
1090 mem = (GstV4l2Memory *) group->mem[i];
1093 mem->mem.maxsize = maxsize;
1094 mem->mem.offset = offset;
1095 mem->mem.size = size;
1098 /* Update v4l2 structure */
1099 group->planes[i].length = maxsize;
1100 group->planes[i].bytesused = size;
1101 group->planes[i].m.fd = dmafd;
1102 group->planes[i].data_offset = offset;
1105 /* Copy into buffer structure if not using planes */
1106 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1107 group->buffer.bytesused = group->planes[0].bytesused;
1108 group->buffer.length = group->planes[0].length;
1109 group->buffer.m.fd = group->planes[0].m.userptr;
1111 group->buffer.length = group->n_mem;
1118 GST_ERROR_OBJECT (allocator, "Got %i dmabuf but needed %i", n_mem,
1124 GST_ERROR_OBJECT (allocator, "Memory %i is not of DMABUF", i);
1129 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
1130 g_strerror (errno));
1136 gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
1137 GstV4l2MemoryGroup * group, gsize img_size, int n_planes,
1138 gpointer * data, gsize * size)
1143 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, FALSE);
1145 /* TODO Support passing N plane from 1 memory to MPLANE v4l2 format */
1146 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type) && n_planes != group->n_mem)
1147 goto n_mem_missmatch;
1149 for (i = 0; i < group->n_mem; i++) {
1150 gsize maxsize, psize;
1152 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1153 struct v4l2_pix_format_mplane *pix = &allocator->format.fmt.pix_mp;
1154 maxsize = pix->plane_fmt[i].sizeimage;
1157 maxsize = allocator->format.fmt.pix.sizeimage;
1161 g_assert (psize <= img_size);
1163 GST_LOG_OBJECT (allocator, "imported USERPTR %p plane %d size %"
1164 G_GSIZE_FORMAT, data[i], i, psize);
1166 mem = (GstV4l2Memory *) group->mem[i];
1168 mem->mem.maxsize = maxsize;
1169 mem->mem.size = psize;
1170 mem->data = data[i];
1172 group->planes[i].length = maxsize;
1173 group->planes[i].bytesused = psize;
1174 group->planes[i].m.userptr = (unsigned long) data[i];
1175 group->planes[i].data_offset = 0;
1178 /* Copy into buffer structure if not using planes */
1179 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1180 group->buffer.bytesused = group->planes[0].bytesused;
1181 group->buffer.length = group->planes[0].length;
1182 group->buffer.m.userptr = group->planes[0].m.userptr;
1184 group->buffer.length = group->n_mem;
1191 GST_ERROR_OBJECT (allocator, "Got %i userptr plane while driver need %i",
1192 n_planes, group->n_mem);
1198 gst_v4l2_allocator_flush (GstV4l2Allocator * allocator)
1202 GST_OBJECT_LOCK (allocator);
1204 if (!g_atomic_int_get (&allocator->active))
1207 for (i = 0; i < allocator->count; i++) {
1208 GstV4l2MemoryGroup *group = allocator->groups[i];
1211 if (IS_QUEUED (group->buffer)) {
1212 UNSET_QUEUED (group->buffer);
1214 gst_v4l2_allocator_reset_group (allocator, group);
1216 for (n = 0; n < group->n_mem; n++)
1217 gst_memory_unref (group->mem[n]);
1222 GST_OBJECT_UNLOCK (allocator);
1226 gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
1227 GstV4l2MemoryGroup * group)
1229 gboolean ret = TRUE;
1232 g_return_val_if_fail (g_atomic_int_get (&allocator->active), FALSE);
1235 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1236 for (i = 0; i < group->n_mem; i++)
1237 group->planes[i].bytesused =
1238 gst_memory_get_sizes (group->mem[i], NULL, NULL);
1240 group->buffer.bytesused = gst_memory_get_sizes (group->mem[0], NULL, NULL);
1243 /* Ensure the memory will stay around and is RO */
1244 for (i = 0; i < group->n_mem; i++)
1245 gst_memory_ref (group->mem[i]);
1247 if (v4l2_ioctl (allocator->video_fd, VIDIOC_QBUF, &group->buffer) < 0) {
1248 GST_ERROR_OBJECT (allocator, "failed queueing buffer %i: %s",
1249 group->buffer.index, g_strerror (errno));
1251 /* Release the memory, possibly making it RW again */
1252 for (i = 0; i < group->n_mem; i++)
1253 gst_memory_unref (group->mem[i]);
1256 if (IS_QUEUED (group->buffer)) {
1257 GST_DEBUG_OBJECT (allocator,
1258 "driver pretends buffer is queued even if queue failed");
1259 UNSET_QUEUED (group->buffer);
1264 GST_LOG_OBJECT (allocator, "queued buffer %i (flags 0x%X)",
1265 group->buffer.index, group->buffer.flags);
1267 if (!IS_QUEUED (group->buffer)) {
1268 GST_DEBUG_OBJECT (allocator,
1269 "driver pretends buffer is not queued even if queue succeeded");
1270 SET_QUEUED (group->buffer);
1278 gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator,
1279 GstV4l2MemoryGroup ** group_out)
1281 struct v4l2_buffer buffer = { 0 };
1282 struct v4l2_plane planes[VIDEO_MAX_PLANES] = { {0} };
1285 GstV4l2MemoryGroup *group = NULL;
1287 g_return_val_if_fail (g_atomic_int_get (&allocator->active), GST_FLOW_ERROR);
1289 buffer.type = allocator->type;
1290 buffer.memory = allocator->memory;
1292 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1293 buffer.length = allocator->format.fmt.pix_mp.num_planes;
1294 buffer.m.planes = planes;
1297 if (v4l2_ioctl (allocator->video_fd, VIDIOC_DQBUF, &buffer) < 0)
1300 group = allocator->groups[buffer.index];
1302 if (!IS_QUEUED (group->buffer)) {
1303 GST_ERROR_OBJECT (allocator,
1304 "buffer %i was not queued, this indicate a driver bug.", buffer.index);
1305 return GST_FLOW_ERROR;
1308 group->buffer = buffer;
1310 GST_LOG_OBJECT (allocator, "dequeued buffer %i (flags 0x%X)", buffer.index,
1313 if (IS_QUEUED (group->buffer)) {
1314 GST_DEBUG_OBJECT (allocator,
1315 "driver pretends buffer is queued even if dequeue succeeded");
1316 UNSET_QUEUED (group->buffer);
1319 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1320 group->buffer.m.planes = group->planes;
1321 memcpy (group->planes, buffer.m.planes, sizeof (planes));
1323 group->planes[0].bytesused = group->buffer.bytesused;
1324 group->planes[0].length = group->buffer.length;
1325 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
1326 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
1329 /* And update memory size */
1330 if (V4L2_TYPE_IS_OUTPUT (allocator->type)) {
1331 gst_v4l2_allocator_reset_size (allocator, group);
1333 /* for capture, simply read the size */
1334 for (i = 0; i < group->n_mem; i++) {
1335 if (G_LIKELY (group->planes[i].bytesused <= group->mem[i]->maxsize))
1336 gst_memory_resize (group->mem[i], 0, group->planes[i].bytesused);
1338 GST_WARNING_OBJECT (allocator,
1339 "v4l2 provided buffer that is too big for the memory it was "
1340 "writing into. v4l2 claims %" G_GUINT32_FORMAT " bytes used but "
1341 "memory is only %" G_GSIZE_FORMAT "B. This is probably a driver "
1342 "bug.", group->planes[i].bytesused, group->mem[i]->maxsize);
1343 gst_memory_resize (group->mem[i], 0, group->mem[i]->maxsize);
1348 /* Release the memory, possibly making it RW again */
1349 for (i = 0; i < group->n_mem; i++)
1350 gst_memory_unref (group->mem[i]);
1356 if (errno == EPIPE) {
1357 GST_DEBUG_OBJECT (allocator, "broken pipe signals last buffer");
1358 return GST_FLOW_EOS;
1361 GST_ERROR_OBJECT (allocator, "failed dequeuing a %s buffer: %s",
1362 memory_type_to_str (allocator->memory), g_strerror (errno));
1366 GST_WARNING_OBJECT (allocator,
1367 "Non-blocking I/O has been selected using O_NONBLOCK and"
1368 " no buffer was in the outgoing queue.");
1371 GST_ERROR_OBJECT (allocator,
1372 "The buffer type is not supported, or the index is out of bounds, "
1373 "or no buffers have been allocated yet, or the userptr "
1374 "or length are invalid.");
1377 GST_ERROR_OBJECT (allocator,
1378 "insufficient memory to enqueue a user pointer buffer");
1381 GST_INFO_OBJECT (allocator,
1382 "VIDIOC_DQBUF failed due to an internal error."
1383 " Can also indicate temporary problems like signal loss."
1384 " Note the driver might dequeue an (empty) buffer despite"
1385 " returning an error, or even stop capturing.");
1386 /* have we de-queued a buffer ? */
1387 if (!IS_QUEUED (buffer)) {
1388 GST_DEBUG_OBJECT (allocator, "reenqueueing buffer");
1389 /* FIXME ... should we do something here? */
1393 GST_WARNING_OBJECT (allocator, "could not sync on a buffer on device");
1396 GST_WARNING_OBJECT (allocator,
1397 "Grabbing frame got interrupted unexpectedly. %d: %s.", errno,
1398 g_strerror (errno));
1402 return GST_FLOW_ERROR;
1406 gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
1407 GstV4l2MemoryGroup * group)
1409 switch (allocator->memory) {
1410 case V4L2_MEMORY_USERPTR:
1411 gst_v4l2_allocator_clear_userptr (allocator, group);
1413 case V4L2_MEMORY_DMABUF:
1414 gst_v4l2_allocator_clear_dmabufin (allocator, group);
1416 case V4L2_MEMORY_MMAP:
1419 g_assert_not_reached ();
1423 gst_v4l2_allocator_reset_size (allocator, group);