2 * Copyright (C) 2014 Collabora Ltd.
3 * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 # define _GNU_SOURCE /* O_CLOEXEC */
28 #include "ext/videodev2.h"
30 #include "gstv4l2object.h"
31 #include "gstv4l2allocator.h"
33 #include <gst/allocators/gstdmabuf.h>
38 #include <sys/types.h>
41 #define GST_V4L2_MEMORY_TYPE "V4l2Memory"
43 #define gst_v4l2_allocator_parent_class parent_class
44 G_DEFINE_TYPE (GstV4l2Allocator, gst_v4l2_allocator, GST_TYPE_ALLOCATOR);
46 GST_DEBUG_CATEGORY_STATIC (v4l2allocator_debug);
47 #define GST_CAT_DEFAULT v4l2allocator_debug
49 #define UNSET_QUEUED(buffer) \
50 ((buffer).flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
52 #define SET_QUEUED(buffer) ((buffer).flags |= V4L2_BUF_FLAG_QUEUED)
54 #define IS_QUEUED(buffer) \
55 ((buffer).flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
63 static guint gst_v4l2_allocator_signals[LAST_SIGNAL] = { 0 };
65 static void gst_v4l2_allocator_release (GstV4l2Allocator * allocator,
69 memory_type_to_str (guint32 memory)
72 case V4L2_MEMORY_MMAP:
74 case V4L2_MEMORY_USERPTR:
76 case V4L2_MEMORY_DMABUF:
83 /*************************************/
84 /* GstV4lMemory implementation */
85 /*************************************/
88 _v4l2mem_map (GstV4l2Memory * mem, gsize maxsize, GstMapFlags flags)
92 switch (mem->group->buffer.memory) {
93 case V4L2_MEMORY_MMAP:
94 case V4L2_MEMORY_USERPTR:
97 case V4L2_MEMORY_DMABUF:
98 /* v4l2 dmabuf memory are not shared with downstream */
99 g_assert_not_reached ();
102 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
109 _v4l2mem_unmap (GstV4l2Memory * mem)
111 gboolean ret = FALSE;
113 switch (mem->group->buffer.memory) {
114 case V4L2_MEMORY_MMAP:
115 case V4L2_MEMORY_USERPTR:
118 case V4L2_MEMORY_DMABUF:
119 /* v4l2 dmabuf memory are not share with downstream */
120 g_assert_not_reached ();
123 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
130 _v4l2mem_dispose (GstV4l2Memory * mem)
132 GstV4l2Allocator *allocator = (GstV4l2Allocator *) mem->mem.allocator;
133 GstV4l2MemoryGroup *group = mem->group;
136 if (group->mem[mem->plane]) {
137 /* We may have a dmabuf, replace it with returned original memory */
138 group->mem[mem->plane] = gst_memory_ref ((GstMemory *) mem);
139 gst_v4l2_allocator_release (allocator, mem);
142 gst_object_ref (allocator);
149 static inline GstV4l2Memory *
150 _v4l2mem_new (GstMemoryFlags flags, GstAllocator * allocator,
151 GstMemory * parent, gsize maxsize, gsize align, gsize offset, gsize size,
152 gint plane, gpointer data, int dmafd, GstV4l2MemoryGroup * group)
156 mem = g_slice_new0 (GstV4l2Memory);
157 gst_memory_init (GST_MEMORY_CAST (mem),
158 flags, allocator, parent, maxsize, align, offset, size);
161 mem->mem.mini_object.dispose =
162 (GstMiniObjectDisposeFunction) _v4l2mem_dispose;
172 static GstV4l2Memory *
173 _v4l2mem_share (GstV4l2Memory * mem, gssize offset, gsize size)
178 /* find the real parent */
179 if ((parent = mem->mem.parent) == NULL)
180 parent = (GstMemory *) mem;
183 size = mem->mem.size - offset;
185 /* the shared memory is always readonly */
186 sub = _v4l2mem_new (GST_MINI_OBJECT_FLAGS (parent) |
187 GST_MINI_OBJECT_FLAG_LOCK_READONLY, mem->mem.allocator, parent,
188 mem->mem.maxsize, mem->mem.align, offset, size, mem->plane, mem->data,
195 _v4l2mem_is_span (GstV4l2Memory * mem1, GstV4l2Memory * mem2, gsize * offset)
198 *offset = mem1->mem.offset - mem1->mem.parent->offset;
200 /* and memory is contiguous */
201 return mem1->mem.offset + mem1->mem.size == mem2->mem.offset;
205 gst_is_v4l2_memory (GstMemory * mem)
207 return gst_memory_is_type (mem, GST_V4L2_MEMORY_TYPE);
211 gst_v4l2_memory_quark (void)
213 static GQuark quark = 0;
216 quark = g_quark_from_string ("GstV4l2Memory");
222 /*************************************/
223 /* GstV4l2MemoryGroup implementation */
224 /*************************************/
227 gst_v4l2_memory_group_free (GstV4l2MemoryGroup * group)
231 for (i = 0; i < group->n_mem; i++) {
232 GstMemory *mem = group->mem[i];
233 group->mem[i] = NULL;
235 gst_memory_unref (mem);
238 g_slice_free (GstV4l2MemoryGroup, group);
241 static GstV4l2MemoryGroup *
242 gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index)
244 GstV4l2Object *obj = allocator->obj;
245 guint32 memory = allocator->memory;
246 struct v4l2_format *format = &obj->format;
247 GstV4l2MemoryGroup *group;
248 gsize img_size, buf_size;
250 group = g_slice_new0 (GstV4l2MemoryGroup);
252 group->buffer.type = format->type;
253 group->buffer.index = index;
254 group->buffer.memory = memory;
256 if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
257 group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes;
258 group->buffer.m.planes = group->planes;
263 if (obj->ioctl (obj->video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0)
264 goto querybuf_failed;
266 if (group->buffer.index != index) {
267 GST_ERROR_OBJECT (allocator, "Buffer index returned by VIDIOC_QUERYBUF "
268 "didn't match, this indicate the presence of a bug in your driver or "
270 g_slice_free (GstV4l2MemoryGroup, group);
274 /* Check that provided size matches the format we have negotiation. Failing
275 * there usually means a driver of libv4l bug. */
276 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
279 for (i = 0; i < group->n_mem; i++) {
280 img_size = obj->format.fmt.pix_mp.plane_fmt[i].sizeimage;
281 buf_size = group->planes[i].length;
282 if (buf_size < img_size)
283 goto buffer_too_short;
286 img_size = obj->format.fmt.pix.sizeimage;
287 buf_size = group->buffer.length;
288 if (buf_size < img_size)
289 goto buffer_too_short;
292 /* We save non planar buffer information into the multi-planar plane array
293 * to avoid duplicating the code later */
294 if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
295 group->planes[0].bytesused = group->buffer.bytesused;
296 group->planes[0].length = group->buffer.length;
297 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
298 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
301 GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory));
302 GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index);
303 GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type);
304 GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags);
305 GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field);
306 GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory);
307 GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);
309 #ifndef GST_DISABLE_GST_DEBUG
310 if (memory == V4L2_MEMORY_MMAP) {
312 for (i = 0; i < group->n_mem; i++) {
313 GST_LOG_OBJECT (allocator, " [%u] bytesused: %u, length: %u", i,
314 group->planes[i].bytesused, group->planes[i].length);
315 GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i,
316 group->planes[i].m.mem_offset);
325 GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno));
330 GST_ERROR ("buffer size %" G_GSIZE_FORMAT
331 " is smaller then negotiated size %" G_GSIZE_FORMAT
332 ", this is usually the result of a bug in the v4l2 driver or libv4l.",
337 gst_v4l2_memory_group_free (group);
342 /*************************************/
343 /* GstV4lAllocator implementation */
344 /*************************************/
347 gst_v4l2_allocator_release (GstV4l2Allocator * allocator, GstV4l2Memory * mem)
349 GstV4l2MemoryGroup *group = mem->group;
351 GST_LOG_OBJECT (allocator, "plane %i of buffer %u released",
352 mem->plane, group->buffer.index);
354 switch (allocator->memory) {
355 case V4L2_MEMORY_DMABUF:
359 case V4L2_MEMORY_USERPTR:
366 /* When all memory are back, put the group back in the free queue */
367 if (g_atomic_int_dec_and_test (&group->mems_allocated)) {
368 GST_LOG_OBJECT (allocator, "buffer %u released", group->buffer.index);
369 gst_atomic_queue_push (allocator->free_queue, group);
370 g_signal_emit (allocator, gst_v4l2_allocator_signals[GROUP_RELEASED], 0);
373 /* Keep last, allocator may be freed after this call */
374 g_object_unref (allocator);
378 gst_v4l2_allocator_free (GstAllocator * gallocator, GstMemory * gmem)
380 GstV4l2Allocator *allocator = (GstV4l2Allocator *) gallocator;
381 GstV4l2Object *obj = allocator->obj;
382 GstV4l2Memory *mem = (GstV4l2Memory *) gmem;
383 GstV4l2MemoryGroup *group = mem->group;
385 /* Only free unparented memory */
386 if (mem->mem.parent == NULL) {
387 GST_LOG_OBJECT (allocator, "freeing plane %i of buffer %u",
388 mem->plane, group->buffer.index);
390 if (allocator->memory == V4L2_MEMORY_MMAP) {
392 obj->munmap (mem->data, group->planes[mem->plane].length);
395 /* This apply for both mmap with expbuf, and dmabuf imported memory */
400 g_slice_free (GstV4l2Memory, mem);
404 gst_v4l2_allocator_dispose (GObject * obj)
406 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
409 GST_LOG_OBJECT (obj, "called");
411 for (i = 0; i < allocator->count; i++) {
412 GstV4l2MemoryGroup *group = allocator->groups[i];
413 allocator->groups[i] = NULL;
415 gst_v4l2_memory_group_free (group);
418 G_OBJECT_CLASS (parent_class)->dispose (obj);
422 gst_v4l2_allocator_finalize (GObject * obj)
424 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
426 GST_LOG_OBJECT (obj, "called");
428 gst_atomic_queue_unref (allocator->free_queue);
430 G_OBJECT_CLASS (parent_class)->finalize (obj);
434 gst_v4l2_allocator_class_init (GstV4l2AllocatorClass * klass)
436 GObjectClass *object_class;
437 GstAllocatorClass *allocator_class;
439 allocator_class = (GstAllocatorClass *) klass;
440 object_class = (GObjectClass *) klass;
442 allocator_class->alloc = NULL;
443 allocator_class->free = gst_v4l2_allocator_free;
445 object_class->dispose = gst_v4l2_allocator_dispose;
446 object_class->finalize = gst_v4l2_allocator_finalize;
448 gst_v4l2_allocator_signals[GROUP_RELEASED] = g_signal_new ("group-released",
449 G_TYPE_FROM_CLASS (object_class), G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
452 GST_DEBUG_CATEGORY_INIT (v4l2allocator_debug, "v4l2allocator", 0,
457 gst_v4l2_allocator_init (GstV4l2Allocator * allocator)
459 GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
461 alloc->mem_type = GST_V4L2_MEMORY_TYPE;
462 alloc->mem_map = (GstMemoryMapFunction) _v4l2mem_map;
463 alloc->mem_unmap = (GstMemoryUnmapFunction) _v4l2mem_unmap;
464 alloc->mem_share = (GstMemoryShareFunction) _v4l2mem_share;
465 alloc->mem_is_span = (GstMemoryIsSpanFunction) _v4l2mem_is_span;
466 /* Use the default, fallback copy function */
468 allocator->free_queue = gst_atomic_queue_new (VIDEO_MAX_FRAME);
470 GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
473 #define GST_V4L2_ALLOCATOR_PROBE(obj,type) \
474 gst_v4l2_allocator_probe ((obj), V4L2_MEMORY_ ## type, \
475 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS, \
476 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS)
478 gst_v4l2_allocator_probe (GstV4l2Allocator * allocator, guint32 memory,
479 guint32 breq_flag, guint32 bcreate_flag)
481 GstV4l2Object *obj = allocator->obj;
482 struct v4l2_requestbuffers breq = { 0 };
485 breq.type = obj->type;
487 breq.memory = memory;
489 if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) == 0) {
490 struct v4l2_create_buffers bcreate = { 0 };
494 bcreate.memory = memory;
495 bcreate.format = obj->format;
497 if ((obj->ioctl (obj->video_fd, VIDIOC_CREATE_BUFS, &bcreate) == 0))
498 flags |= bcreate_flag;
504 static GstV4l2MemoryGroup *
505 gst_v4l2_allocator_create_buf (GstV4l2Allocator * allocator)
507 GstV4l2Object *obj = allocator->obj;
508 struct v4l2_create_buffers bcreate = { 0 };
509 GstV4l2MemoryGroup *group = NULL;
511 GST_OBJECT_LOCK (allocator);
513 if (!g_atomic_int_get (&allocator->active))
516 bcreate.memory = allocator->memory;
517 bcreate.format = obj->format;
520 if (!allocator->can_allocate)
523 if (obj->ioctl (obj->video_fd, VIDIOC_CREATE_BUFS, &bcreate) < 0)
524 goto create_bufs_failed;
526 if (allocator->groups[bcreate.index] != NULL)
527 goto create_bufs_bug;
529 group = gst_v4l2_memory_group_new (allocator, bcreate.index);
532 allocator->groups[bcreate.index] = group;
537 GST_OBJECT_UNLOCK (allocator);
542 GST_WARNING_OBJECT (allocator, "error creating a new buffer: %s",
548 GST_ERROR_OBJECT (allocator, "created buffer has already used buffer "
549 "index %i, this means there is an bug in your driver or libv4l2",
555 static GstV4l2MemoryGroup *
556 gst_v4l2_allocator_alloc (GstV4l2Allocator * allocator)
558 GstV4l2MemoryGroup *group;
560 if (!g_atomic_int_get (&allocator->active))
563 group = gst_atomic_queue_pop (allocator->free_queue);
566 if (allocator->can_allocate) {
567 group = gst_v4l2_allocator_create_buf (allocator);
569 /* Don't hammer on CREATE_BUFS */
571 allocator->can_allocate = FALSE;
579 gst_v4l2_allocator_reset_size (GstV4l2Allocator * allocator,
580 GstV4l2MemoryGroup * group)
582 GstV4l2Object *obj = allocator->obj;
584 gboolean imported = FALSE;
586 switch (allocator->memory) {
587 case V4L2_MEMORY_USERPTR:
588 case V4L2_MEMORY_DMABUF:
593 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
596 for (i = 0; i < group->n_mem; i++) {
597 size = obj->format.fmt.pix_mp.plane_fmt[i].sizeimage;
600 group->mem[i]->maxsize = size;
602 gst_memory_resize (group->mem[i], 0, size);
606 size = obj->format.fmt.pix.sizeimage;
609 group->mem[0]->maxsize = size;
611 gst_memory_resize (group->mem[0], 0, size);
616 _cleanup_failed_alloc (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group)
618 if (group->mems_allocated > 0) {
620 /* If one or more mmap worked, we need to unref the memory, otherwise
621 * they will keep a ref on the allocator and leak it. This will put back
622 * the group into the free_queue */
623 for (i = 0; i < group->n_mem; i++)
624 gst_memory_unref (group->mem[i]);
626 /* Otherwise, group has to be on free queue for _stop() to work */
627 gst_atomic_queue_push (allocator->free_queue, group);
634 gst_v4l2_allocator_new (GstObject * parent, GstV4l2Object * v4l2object)
636 GstV4l2Allocator *allocator;
638 gchar *name, *parent_name;
640 parent_name = gst_object_get_name (parent);
641 name = g_strconcat (parent_name, ":allocator", NULL);
642 g_free (parent_name);
644 allocator = g_object_new (GST_TYPE_V4L2_ALLOCATOR, "name", name, NULL);
645 gst_object_ref_sink (allocator);
648 /* Save everything */
649 allocator->obj = v4l2object;
651 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, MMAP);
652 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, USERPTR);
653 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, DMABUF);
657 /* Drivers not ported from videobuf to videbuf2 don't allow freeing buffers
658 * using REQBUFS(0). This is a workaround to still support these drivers,
659 * which are known to have MMAP support. */
660 GST_WARNING_OBJECT (allocator, "Could not probe supported memory type, "
661 "assuming MMAP is supported, this is expected for older drivers not "
662 " yet ported to videobuf2 framework");
663 flags = GST_V4L2_ALLOCATOR_FLAG_MMAP_REQBUFS;
666 GST_OBJECT_FLAG_SET (allocator, flags);
672 gst_v4l2_allocator_start (GstV4l2Allocator * allocator, guint32 count,
675 GstV4l2Object *obj = allocator->obj;
676 struct v4l2_requestbuffers breq = { count, obj->type, memory };
677 gboolean can_allocate;
680 g_return_val_if_fail (count != 0, 0);
682 GST_OBJECT_LOCK (allocator);
684 if (g_atomic_int_get (&allocator->active))
687 if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) < 0)
694 case V4L2_MEMORY_MMAP:
695 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, MMAP);
697 case V4L2_MEMORY_USERPTR:
698 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, USERPTR);
700 case V4L2_MEMORY_DMABUF:
701 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, DMABUF);
704 can_allocate = FALSE;
708 GST_DEBUG_OBJECT (allocator, "allocated %u %s buffers out of %u requested",
709 breq.count, memory_type_to_str (memory), count);
711 allocator->can_allocate = can_allocate;
712 allocator->count = breq.count;
713 allocator->memory = memory;
715 /* Create memory groups */
716 for (i = 0; i < allocator->count; i++) {
717 allocator->groups[i] = gst_v4l2_memory_group_new (allocator, i);
718 if (allocator->groups[i] == NULL)
721 gst_atomic_queue_push (allocator->free_queue, allocator->groups[i]);
724 g_atomic_int_set (&allocator->active, TRUE);
727 GST_OBJECT_UNLOCK (allocator);
732 GST_ERROR_OBJECT (allocator, "allocator already active");
737 GST_ERROR_OBJECT (allocator,
738 "error requesting %d buffers: %s", count, g_strerror (errno));
743 GST_ERROR_OBJECT (allocator, "Not enough memory to allocate buffers");
754 gst_v4l2_allocator_stop (GstV4l2Allocator * allocator)
756 GstV4l2Object *obj = allocator->obj;
757 struct v4l2_requestbuffers breq = { 0, obj->type, allocator->memory };
759 GstV4l2Return ret = GST_V4L2_OK;
761 GST_DEBUG_OBJECT (allocator, "stop allocator");
763 GST_OBJECT_LOCK (allocator);
765 if (!g_atomic_int_get (&allocator->active))
768 if (gst_atomic_queue_length (allocator->free_queue) != allocator->count) {
769 GST_DEBUG_OBJECT (allocator, "allocator is still in use");
774 while (gst_atomic_queue_pop (allocator->free_queue)) {
778 for (i = 0; i < allocator->count; i++) {
779 GstV4l2MemoryGroup *group = allocator->groups[i];
780 allocator->groups[i] = NULL;
782 gst_v4l2_memory_group_free (group);
785 /* Not all drivers support rebufs(0), so warn only */
786 if (obj->ioctl (obj->video_fd, VIDIOC_REQBUFS, &breq) < 0)
787 GST_WARNING_OBJECT (allocator,
788 "error releasing buffers buffers: %s", g_strerror (errno));
790 allocator->count = 0;
792 g_atomic_int_set (&allocator->active, FALSE);
795 GST_OBJECT_UNLOCK (allocator);
800 gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator)
802 GstV4l2Object *obj = allocator->obj;
803 GstV4l2MemoryGroup *group;
806 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
808 group = gst_v4l2_allocator_alloc (allocator);
813 for (i = 0; i < group->n_mem; i++) {
814 if (group->mem[i] == NULL) {
816 data = obj->mmap (NULL, group->planes[i].length, PROT_READ | PROT_WRITE,
817 MAP_SHARED, obj->video_fd, group->planes[i].m.mem_offset);
819 if (data == MAP_FAILED)
822 GST_LOG_OBJECT (allocator,
823 "mmap buffer length %d, data offset %d, plane %d",
824 group->planes[i].length, group->planes[i].data_offset, i);
826 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
827 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
830 /* Take back the allocator reference */
831 gst_object_ref (allocator);
834 group->mems_allocated++;
837 /* Ensure group size. Unlike GST, v4l2 have size (bytesused) initially set
838 * to 0. As length might be bigger then the expected size exposed in the
839 * format, we simply set bytesused initially and reset it here for
841 gst_v4l2_allocator_reset_size (allocator, group);
847 GST_ERROR_OBJECT (allocator, "Failed to mmap buffer: %s",
849 _cleanup_failed_alloc (allocator, group);
855 gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
856 GstAllocator * dmabuf_allocator)
858 GstV4l2Object *obj = allocator->obj;
859 GstV4l2MemoryGroup *group;
862 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
864 group = gst_v4l2_allocator_alloc (allocator);
869 for (i = 0; i < group->n_mem; i++) {
874 if (group->mem[i] == NULL) {
875 struct v4l2_exportbuffer expbuf = { 0 };
877 expbuf.type = obj->type;
878 expbuf.index = group->buffer.index;
880 expbuf.flags = O_CLOEXEC | O_RDWR;
882 if (obj->ioctl (obj->video_fd, VIDIOC_EXPBUF, &expbuf) < 0)
885 GST_LOG_OBJECT (allocator, "exported DMABUF as fd %i plane %d",
888 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
889 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
890 NULL, expbuf.fd, group);
892 /* Take back the allocator reference */
893 gst_object_ref (allocator);
896 group->mems_allocated++;
898 g_assert (gst_is_v4l2_memory (group->mem[i]));
899 mem = (GstV4l2Memory *) group->mem[i];
901 if ((dmafd = dup (mem->dmafd)) < 0)
904 dma_mem = gst_dmabuf_allocator_alloc (dmabuf_allocator, dmafd,
907 gst_mini_object_set_qdata (GST_MINI_OBJECT (dma_mem),
908 GST_V4L2_MEMORY_QUARK, mem, (GDestroyNotify) gst_memory_unref);
910 group->mem[i] = dma_mem;
913 gst_v4l2_allocator_reset_size (allocator, group);
919 GST_ERROR_OBJECT (allocator, "Failed to export DMABUF: %s",
925 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
931 _cleanup_failed_alloc (allocator, group);
937 gst_v4l2_allocator_clear_dmabufin (GstV4l2Allocator * allocator,
938 GstV4l2MemoryGroup * group)
940 GstV4l2Object *obj = allocator->obj;
944 g_return_if_fail (allocator->memory == V4L2_MEMORY_DMABUF);
946 for (i = 0; i < group->n_mem; i++) {
948 mem = (GstV4l2Memory *) group->mem[i];
950 GST_LOG_OBJECT (allocator, "clearing DMABUF import, fd %i plane %d",
957 mem->mem.maxsize = 0;
962 /* Update v4l2 structure */
963 group->planes[i].length = 0;
964 group->planes[i].bytesused = 0;
965 group->planes[i].m.fd = -1;
966 group->planes[i].data_offset = 0;
969 if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
970 group->buffer.bytesused = 0;
971 group->buffer.length = 0;
972 group->buffer.m.fd = -1;
977 gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator)
979 GstV4l2MemoryGroup *group;
982 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, NULL);
984 group = gst_v4l2_allocator_alloc (allocator);
989 GST_LOG_OBJECT (allocator, "allocating empty DMABUF import group");
991 for (i = 0; i < group->n_mem; i++) {
992 if (group->mem[i] == NULL) {
993 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
994 NULL, 0, 0, 0, 0, i, NULL, -1, group);
996 /* Take back the allocator reference */
997 gst_object_ref (allocator);
1000 group->mems_allocated++;
1003 gst_v4l2_allocator_clear_dmabufin (allocator, group);
1009 gst_v4l2_allocator_clear_userptr (GstV4l2Allocator * allocator,
1010 GstV4l2MemoryGroup * group)
1012 GstV4l2Object *obj = allocator->obj;
1016 g_return_if_fail (allocator->memory == V4L2_MEMORY_USERPTR);
1018 for (i = 0; i < group->n_mem; i++) {
1019 mem = (GstV4l2Memory *) group->mem[i];
1021 GST_LOG_OBJECT (allocator, "clearing USERPTR %p plane %d size %"
1022 G_GSIZE_FORMAT, mem->data, i, mem->mem.size);
1024 mem->mem.maxsize = 0;
1028 group->planes[i].length = 0;
1029 group->planes[i].bytesused = 0;
1030 group->planes[i].m.userptr = 0;
1033 if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1034 group->buffer.bytesused = 0;
1035 group->buffer.length = 0;
1036 group->buffer.m.userptr = 0;
1040 GstV4l2MemoryGroup *
1041 gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator)
1043 GstV4l2MemoryGroup *group;
1046 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, NULL);
1048 group = gst_v4l2_allocator_alloc (allocator);
1053 GST_LOG_OBJECT (allocator, "allocating empty USERPTR group");
1055 for (i = 0; i < group->n_mem; i++) {
1057 if (group->mem[i] == NULL) {
1058 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
1059 NULL, 0, 0, 0, 0, i, NULL, -1, group);
1061 /* Take back the allocator reference */
1062 gst_object_ref (allocator);
1065 group->mems_allocated++;
1068 gst_v4l2_allocator_clear_userptr (allocator, group);
1074 gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
1075 GstV4l2MemoryGroup * group, gint n_mem, GstMemory ** dma_mem)
1077 GstV4l2Object *obj = allocator->obj;
1081 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, FALSE);
1083 if (group->n_mem != n_mem)
1084 goto n_mem_missmatch;
1086 for (i = 0; i < group->n_mem; i++) {
1088 gsize size, offset, maxsize;
1090 if (!gst_is_dmabuf_memory (dma_mem[i]))
1093 size = gst_memory_get_sizes (dma_mem[i], &offset, &maxsize);
1095 if ((dmafd = dup (gst_dmabuf_memory_get_fd (dma_mem[i]))) < 0)
1098 GST_LOG_OBJECT (allocator, "imported DMABUF as fd %i plane %d", dmafd, i);
1100 mem = (GstV4l2Memory *) group->mem[i];
1103 mem->mem.maxsize = maxsize;
1104 mem->mem.offset = offset;
1105 mem->mem.size = size;
1108 /* Update v4l2 structure */
1109 group->planes[i].length = maxsize;
1110 group->planes[i].bytesused = size;
1111 group->planes[i].m.fd = dmafd;
1112 group->planes[i].data_offset = offset;
1115 /* Copy into buffer structure if not using planes */
1116 if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1117 group->buffer.bytesused = group->planes[0].bytesused;
1118 group->buffer.length = group->planes[0].length;
1119 group->buffer.m.fd = group->planes[0].m.userptr;
1121 group->buffer.length = group->n_mem;
1128 GST_ERROR_OBJECT (allocator, "Got %i dmabuf but needed %i", n_mem,
1134 GST_ERROR_OBJECT (allocator, "Memory %i is not of DMABUF", i);
1139 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
1140 g_strerror (errno));
1146 gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
1147 GstV4l2MemoryGroup * group, gsize img_size, int n_planes,
1148 gpointer * data, gsize * size)
1150 GstV4l2Object *obj = allocator->obj;
1154 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, FALSE);
1156 /* TODO Support passing N plane from 1 memory to MPLANE v4l2 format */
1157 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type) && n_planes != group->n_mem)
1158 goto n_mem_missmatch;
1160 for (i = 0; i < group->n_mem; i++) {
1161 gsize maxsize, psize;
1163 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1164 struct v4l2_pix_format_mplane *pix = &obj->format.fmt.pix_mp;
1165 maxsize = pix->plane_fmt[i].sizeimage;
1168 maxsize = obj->format.fmt.pix.sizeimage;
1172 g_assert (psize <= img_size);
1174 GST_LOG_OBJECT (allocator, "imported USERPTR %p plane %d size %"
1175 G_GSIZE_FORMAT, data[i], i, psize);
1177 mem = (GstV4l2Memory *) group->mem[i];
1179 mem->mem.maxsize = maxsize;
1180 mem->mem.size = psize;
1181 mem->data = data[i];
1183 group->planes[i].length = maxsize;
1184 group->planes[i].bytesused = psize;
1185 group->planes[i].m.userptr = (unsigned long) data[i];
1186 group->planes[i].data_offset = 0;
1189 /* Copy into buffer structure if not using planes */
1190 if (!V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1191 group->buffer.bytesused = group->planes[0].bytesused;
1192 group->buffer.length = group->planes[0].length;
1193 group->buffer.m.userptr = group->planes[0].m.userptr;
1195 group->buffer.length = group->n_mem;
1202 GST_ERROR_OBJECT (allocator, "Got %i userptr plane while driver need %i",
1203 n_planes, group->n_mem);
1209 gst_v4l2_allocator_flush (GstV4l2Allocator * allocator)
1213 GST_OBJECT_LOCK (allocator);
1215 if (!g_atomic_int_get (&allocator->active))
1218 for (i = 0; i < allocator->count; i++) {
1219 GstV4l2MemoryGroup *group = allocator->groups[i];
1222 if (IS_QUEUED (group->buffer)) {
1223 UNSET_QUEUED (group->buffer);
1225 gst_v4l2_allocator_reset_group (allocator, group);
1227 for (n = 0; n < group->n_mem; n++)
1228 gst_memory_unref (group->mem[n]);
1233 GST_OBJECT_UNLOCK (allocator);
1237 gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
1238 GstV4l2MemoryGroup * group)
1240 GstV4l2Object *obj = allocator->obj;
1241 gboolean ret = TRUE;
1244 g_return_val_if_fail (g_atomic_int_get (&allocator->active), FALSE);
1247 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1248 for (i = 0; i < group->n_mem; i++)
1249 group->planes[i].bytesused =
1250 gst_memory_get_sizes (group->mem[i], NULL, NULL);
1252 group->buffer.bytesused = gst_memory_get_sizes (group->mem[0], NULL, NULL);
1255 /* Ensure the memory will stay around and is RO */
1256 for (i = 0; i < group->n_mem; i++)
1257 gst_memory_ref (group->mem[i]);
1259 if (obj->ioctl (obj->video_fd, VIDIOC_QBUF, &group->buffer) < 0) {
1260 GST_ERROR_OBJECT (allocator, "failed queueing buffer %i: %s",
1261 group->buffer.index, g_strerror (errno));
1263 /* Release the memory, possibly making it RW again */
1264 for (i = 0; i < group->n_mem; i++)
1265 gst_memory_unref (group->mem[i]);
1268 if (IS_QUEUED (group->buffer)) {
1269 GST_DEBUG_OBJECT (allocator,
1270 "driver pretends buffer is queued even if queue failed");
1271 UNSET_QUEUED (group->buffer);
1276 GST_LOG_OBJECT (allocator, "queued buffer %i (flags 0x%X)",
1277 group->buffer.index, group->buffer.flags);
1279 if (!IS_QUEUED (group->buffer)) {
1280 GST_DEBUG_OBJECT (allocator,
1281 "driver pretends buffer is not queued even if queue succeeded");
1282 SET_QUEUED (group->buffer);
1290 gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator,
1291 GstV4l2MemoryGroup ** group_out)
1293 GstV4l2Object *obj = allocator->obj;
1294 struct v4l2_buffer buffer = { 0 };
1295 struct v4l2_plane planes[VIDEO_MAX_PLANES] = { {0} };
1298 GstV4l2MemoryGroup *group = NULL;
1300 g_return_val_if_fail (g_atomic_int_get (&allocator->active), GST_FLOW_ERROR);
1302 buffer.type = obj->type;
1303 buffer.memory = allocator->memory;
1305 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1306 buffer.length = obj->format.fmt.pix_mp.num_planes;
1307 buffer.m.planes = planes;
1310 if (obj->ioctl (obj->video_fd, VIDIOC_DQBUF, &buffer) < 0)
1313 group = allocator->groups[buffer.index];
1315 if (!IS_QUEUED (group->buffer)) {
1316 GST_ERROR_OBJECT (allocator,
1317 "buffer %i was not queued, this indicate a driver bug.", buffer.index);
1318 return GST_FLOW_ERROR;
1321 group->buffer = buffer;
1323 GST_LOG_OBJECT (allocator, "dequeued buffer %i (flags 0x%X)", buffer.index,
1326 if (IS_QUEUED (group->buffer)) {
1327 GST_DEBUG_OBJECT (allocator,
1328 "driver pretends buffer is queued even if dequeue succeeded");
1329 UNSET_QUEUED (group->buffer);
1332 if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
1333 group->buffer.m.planes = group->planes;
1334 memcpy (group->planes, buffer.m.planes, sizeof (planes));
1336 group->planes[0].bytesused = group->buffer.bytesused;
1337 group->planes[0].length = group->buffer.length;
1338 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
1339 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
1342 /* And update memory size */
1343 if (V4L2_TYPE_IS_OUTPUT (obj->type)) {
1344 gst_v4l2_allocator_reset_size (allocator, group);
1346 /* for capture, simply read the size */
1347 for (i = 0; i < group->n_mem; i++) {
1348 if (G_LIKELY (group->planes[i].bytesused <= group->mem[i]->maxsize))
1349 gst_memory_resize (group->mem[i], 0, group->planes[i].bytesused);
1351 GST_WARNING_OBJECT (allocator,
1352 "v4l2 provided buffer that is too big for the memory it was "
1353 "writing into. v4l2 claims %" G_GUINT32_FORMAT " bytes used but "
1354 "memory is only %" G_GSIZE_FORMAT "B. This is probably a driver "
1355 "bug.", group->planes[i].bytesused, group->mem[i]->maxsize);
1356 gst_memory_resize (group->mem[i], 0, group->mem[i]->maxsize);
1361 /* Release the memory, possibly making it RW again */
1362 for (i = 0; i < group->n_mem; i++)
1363 gst_memory_unref (group->mem[i]);
1369 if (errno == EPIPE) {
1370 GST_DEBUG_OBJECT (allocator, "broken pipe signals last buffer");
1371 return GST_FLOW_EOS;
1374 GST_ERROR_OBJECT (allocator, "failed dequeuing a %s buffer: %s",
1375 memory_type_to_str (allocator->memory), g_strerror (errno));
1379 GST_WARNING_OBJECT (allocator,
1380 "Non-blocking I/O has been selected using O_NONBLOCK and"
1381 " no buffer was in the outgoing queue.");
1384 GST_ERROR_OBJECT (allocator,
1385 "The buffer type is not supported, or the index is out of bounds, "
1386 "or no buffers have been allocated yet, or the userptr "
1387 "or length are invalid.");
1390 GST_ERROR_OBJECT (allocator,
1391 "insufficient memory to enqueue a user pointer buffer");
1394 GST_INFO_OBJECT (allocator,
1395 "VIDIOC_DQBUF failed due to an internal error."
1396 " Can also indicate temporary problems like signal loss."
1397 " Note the driver might dequeue an (empty) buffer despite"
1398 " returning an error, or even stop capturing.");
1399 /* have we de-queued a buffer ? */
1400 if (!IS_QUEUED (buffer)) {
1401 GST_DEBUG_OBJECT (allocator, "reenqueueing buffer");
1402 /* FIXME ... should we do something here? */
1406 GST_WARNING_OBJECT (allocator, "could not sync on a buffer on device");
1409 GST_WARNING_OBJECT (allocator,
1410 "Grabbing frame got interrupted unexpectedly. %d: %s.", errno,
1411 g_strerror (errno));
1415 return GST_FLOW_ERROR;
1419 gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
1420 GstV4l2MemoryGroup * group)
1422 switch (allocator->memory) {
1423 case V4L2_MEMORY_USERPTR:
1424 gst_v4l2_allocator_clear_userptr (allocator, group);
1426 case V4L2_MEMORY_DMABUF:
1427 gst_v4l2_allocator_clear_dmabufin (allocator, group);
1429 case V4L2_MEMORY_MMAP:
1432 g_assert_not_reached ();
1436 gst_v4l2_allocator_reset_size (allocator, group);