2 * Copyright (C) 2014 Collabora Ltd.
3 * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
22 #include "ext/videodev2.h"
23 #include "gstv4l2allocator.h"
24 #include "v4l2_calls.h"
26 #include <gst/allocators/gstdmabuf.h>
31 #include <sys/types.h>
33 #define GST_V4L2_MEMORY_TYPE "V4l2Memory"
35 #define gst_v4l2_allocator_parent_class parent_class
36 G_DEFINE_TYPE (GstV4l2Allocator, gst_v4l2_allocator, GST_TYPE_ALLOCATOR);
38 GST_DEBUG_CATEGORY_STATIC (v4l2allocator_debug);
39 #define GST_CAT_DEFAULT v4l2allocator_debug
41 #define UNSET_QUEUED(buffer) \
42 ((buffer).flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
44 #define SET_QUEUED(buffer) ((buffer).flags |= V4L2_BUF_FLAG_QUEUED)
46 #define IS_QUEUED(buffer) \
47 ((buffer).flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
55 static guint gst_v4l2_allocator_signals[LAST_SIGNAL] = { 0 };
57 static void gst_v4l2_allocator_release (GstV4l2Allocator * allocator,
61 memory_type_to_str (guint32 memory)
64 case V4L2_MEMORY_MMAP:
66 case V4L2_MEMORY_USERPTR:
68 case V4L2_MEMORY_DMABUF:
75 /*************************************/
76 /* GstV4lMemory implementation */
77 /*************************************/
80 _v4l2mem_map (GstV4l2Memory * mem, gsize maxsize, GstMapFlags flags)
84 switch (mem->group->buffer.memory) {
85 case V4L2_MEMORY_MMAP:
86 case V4L2_MEMORY_USERPTR:
89 case V4L2_MEMORY_DMABUF:
90 /* v4l2 dmabuf memory are not shared with downstream */
91 g_assert_not_reached ();
94 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
101 _v4l2mem_unmap (GstV4l2Memory * mem)
103 gboolean ret = FALSE;
105 switch (mem->group->buffer.memory) {
106 case V4L2_MEMORY_MMAP:
107 case V4L2_MEMORY_USERPTR:
110 case V4L2_MEMORY_DMABUF:
111 /* v4l2 dmabuf memory are not share with downstream */
112 g_assert_not_reached ();
115 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
122 _v4l2mem_dispose (GstV4l2Memory * mem)
124 GstV4l2Allocator *allocator = (GstV4l2Allocator *) mem->mem.allocator;
125 GstV4l2MemoryGroup *group = mem->group;
128 if (group->mem[mem->plane]) {
129 /* We may have a dmabuf, replace it with returned original memory */
130 group->mem[mem->plane] = gst_memory_ref ((GstMemory *) mem);
131 gst_v4l2_allocator_release (allocator, mem);
134 gst_object_ref (allocator);
142 _v4l2mem_free (GstV4l2Memory * mem)
146 g_slice_free (GstV4l2Memory, mem);
149 static inline GstV4l2Memory *
150 _v4l2mem_new (GstMemoryFlags flags, GstAllocator * allocator,
151 GstMemory * parent, gsize maxsize, gsize align, gsize offset, gsize size,
152 gint plane, gpointer data, int dmafd, GstV4l2MemoryGroup * group)
156 mem = g_slice_new0 (GstV4l2Memory);
157 gst_memory_init (GST_MEMORY_CAST (mem),
158 flags, allocator, parent, maxsize, align, offset, size);
161 mem->mem.mini_object.dispose =
162 (GstMiniObjectDisposeFunction) _v4l2mem_dispose;
172 static GstV4l2Memory *
173 _v4l2mem_share (GstV4l2Memory * mem, gssize offset, gsize size)
178 /* find the real parent */
179 if ((parent = mem->mem.parent) == NULL)
180 parent = (GstMemory *) mem;
183 size = mem->mem.size - offset;
185 /* the shared memory is always readonly */
186 sub = _v4l2mem_new (GST_MINI_OBJECT_FLAGS (parent) |
187 GST_MINI_OBJECT_FLAG_LOCK_READONLY, mem->mem.allocator, parent,
188 mem->mem.maxsize, mem->mem.align, offset, size, mem->plane, mem->data,
195 _v4l2mem_is_span (GstV4l2Memory * mem1, GstV4l2Memory * mem2, gsize * offset)
198 *offset = mem1->mem.offset - mem1->mem.parent->offset;
200 /* and memory is contiguous */
201 return mem1->mem.offset + mem1->mem.size == mem2->mem.offset;
205 _v4l2mem_parent_to_dmabuf (GstV4l2Memory * mem, GstMemory * dma_mem)
207 gst_memory_lock (&mem->mem, GST_LOCK_FLAG_EXCLUSIVE);
208 dma_mem->parent = gst_memory_ref (&mem->mem);
212 gst_is_v4l2_memory (GstMemory * mem)
214 return gst_memory_is_type (mem, GST_V4L2_MEMORY_TYPE);
218 /*************************************/
219 /* GstV4l2MemoryGroup implementation */
220 /*************************************/
223 gst_v4l2_memory_group_free (GstV4l2MemoryGroup * group)
227 for (i = 0; i < group->n_mem; i++) {
228 GstMemory *mem = group->mem[i];
229 group->mem[i] = NULL;
231 gst_memory_unref (mem);
234 g_slice_free (GstV4l2MemoryGroup, group);
237 static GstV4l2MemoryGroup *
238 gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index)
240 gint video_fd = allocator->video_fd;
241 guint32 memory = allocator->memory;
242 struct v4l2_format *format = &allocator->format;
243 GstV4l2MemoryGroup *group;
244 gsize img_size, buf_size;
246 group = g_slice_new0 (GstV4l2MemoryGroup);
248 group->buffer.type = format->type;
249 group->buffer.index = index;
250 group->buffer.memory = memory;
252 if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
253 group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes;
254 group->buffer.m.planes = group->planes;
259 if (v4l2_ioctl (video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0)
260 goto querybuf_failed;
262 /* Check that provided size matches the format we have negotiation. Failing
263 * there usually means a driver of libv4l bug. */
264 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
267 for (i = 0; i < group->n_mem; i++) {
268 img_size = allocator->format.fmt.pix_mp.plane_fmt[i].sizeimage;
269 buf_size = group->planes[i].length;
270 if (buf_size < img_size)
271 goto buffer_too_short;
274 img_size = allocator->format.fmt.pix.sizeimage;
275 buf_size = group->buffer.length;
276 if (buf_size < img_size)
277 goto buffer_too_short;
280 /* We save non planar buffer information into the multi-planar plane array
281 * to avoid duplicating the code later */
282 if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
283 group->planes[0].bytesused = group->buffer.bytesused;
284 group->planes[0].length = group->buffer.length;
285 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
286 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
289 GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory));
290 GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index);
291 GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type);
292 GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags);
293 GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field);
294 GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory);
295 GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);
297 #ifndef GST_DISABLE_GST_DEBUG
298 if (memory == V4L2_MEMORY_MMAP) {
300 for (i = 0; i < group->n_mem; i++) {
301 GST_LOG_OBJECT (allocator, " [%u] bytesused: %u, length: %u", i,
302 group->planes[i].bytesused, group->planes[i].length);
303 GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i,
304 group->planes[i].m.mem_offset);
313 GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno));
318 GST_ERROR ("buffer size %" G_GSIZE_FORMAT
319 " is smaller then negotiated size %" G_GSIZE_FORMAT
320 ", this is usually the result of a bug in the v4l2 driver or libv4l.",
325 gst_v4l2_memory_group_free (group);
330 /*************************************/
331 /* GstV4lAllocator implementation */
332 /*************************************/
335 gst_v4l2_allocator_release (GstV4l2Allocator * allocator, GstV4l2Memory * mem)
337 GstV4l2MemoryGroup *group = mem->group;
339 GST_LOG_OBJECT (allocator, "plane %i of buffer %u released",
340 mem->plane, group->buffer.index);
342 switch (allocator->memory) {
343 case V4L2_MEMORY_DMABUF:
347 case V4L2_MEMORY_USERPTR:
354 /* When all memory are back, put the group back in the free queue */
355 if (g_atomic_int_dec_and_test (&group->mems_allocated)) {
356 GST_LOG_OBJECT (allocator, "buffer %u released", group->buffer.index);
357 gst_atomic_queue_push (allocator->free_queue, group);
358 g_signal_emit (allocator, gst_v4l2_allocator_signals[GROUP_RELEASED], 0);
361 /* Keep last, allocator may be freed after this call */
362 g_object_unref (allocator);
366 gst_v4l2_allocator_free (GstAllocator * gallocator, GstMemory * gmem)
368 GstV4l2Allocator *allocator = (GstV4l2Allocator *) gallocator;
369 GstV4l2Memory *mem = (GstV4l2Memory *) gmem;
370 GstV4l2MemoryGroup *group = mem->group;
372 GST_LOG_OBJECT (allocator, "freeing plane %i of buffer %u",
373 mem->plane, group->buffer.index);
375 switch (allocator->memory) {
376 case V4L2_MEMORY_MMAP:
378 munmap (mem->data, group->planes[mem->plane].length);
379 } else if (group->planes[mem->plane].m.fd > 0) {
380 close (group->planes[mem->plane].m.fd);
392 gst_v4l2_allocator_dispose (GObject * obj)
394 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
397 GST_LOG_OBJECT (obj, "called");
399 for (i = 0; i < allocator->count; i++) {
400 GstV4l2MemoryGroup *group = allocator->groups[i];
401 allocator->groups[i] = NULL;
403 gst_v4l2_memory_group_free (group);
406 G_OBJECT_CLASS (parent_class)->dispose (obj);
410 gst_v4l2_allocator_finalize (GObject * obj)
412 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
414 GST_LOG_OBJECT (obj, "called");
416 v4l2_close (allocator->video_fd);
417 gst_atomic_queue_unref (allocator->free_queue);
419 G_OBJECT_CLASS (parent_class)->finalize (obj);
423 gst_v4l2_allocator_class_init (GstV4l2AllocatorClass * klass)
425 GObjectClass *object_class;
426 GstAllocatorClass *allocator_class;
428 allocator_class = (GstAllocatorClass *) klass;
429 object_class = (GObjectClass *) klass;
431 allocator_class->alloc = NULL;
432 allocator_class->free = gst_v4l2_allocator_free;
434 object_class->dispose = gst_v4l2_allocator_dispose;
435 object_class->finalize = gst_v4l2_allocator_finalize;
437 gst_v4l2_allocator_signals[GROUP_RELEASED] = g_signal_new ("group-released",
438 G_TYPE_FROM_CLASS (object_class), G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
441 GST_DEBUG_CATEGORY_INIT (v4l2allocator_debug, "v4l2allocator", 0,
446 gst_v4l2_allocator_init (GstV4l2Allocator * allocator)
448 GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
450 alloc->mem_type = GST_V4L2_MEMORY_TYPE;
451 alloc->mem_map = (GstMemoryMapFunction) _v4l2mem_map;
452 alloc->mem_unmap = (GstMemoryUnmapFunction) _v4l2mem_unmap;
453 alloc->mem_share = (GstMemoryShareFunction) _v4l2mem_share;
454 alloc->mem_is_span = (GstMemoryIsSpanFunction) _v4l2mem_is_span;
455 /* Use the default, fallback copy function */
457 allocator->free_queue = gst_atomic_queue_new (VIDEO_MAX_FRAME);
459 GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
462 #define GST_V4L2_ALLOCATOR_PROBE(obj,type) \
463 gst_v4l2_allocator_probe ((obj), V4L2_MEMORY_ ## type, \
464 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUF, \
465 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUF)
467 gst_v4l2_allocator_probe (GstV4l2Allocator * allocator, guint32 memory,
468 guint32 breq_flag, guint32 bcreate_flag)
470 struct v4l2_requestbuffers breq = { 0 };
473 breq.type = allocator->type;
475 breq.memory = memory;
477 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) == 0) {
478 struct v4l2_create_buffers bcreate = { 0 };
482 bcreate.memory = V4L2_MEMORY_MMAP;
483 bcreate.format = allocator->format;
485 if ((v4l2_ioctl (allocator->video_fd, VIDIOC_CREATE_BUFS, &bcreate) == 0))
486 flags |= bcreate_flag;
492 static GstV4l2MemoryGroup *
493 gst_v4l2_allocator_create_buf (GstV4l2Allocator * allocator)
495 struct v4l2_create_buffers bcreate = { 0 };
496 GstV4l2MemoryGroup *group = NULL;
498 GST_OBJECT_LOCK (allocator);
500 if (!allocator->active)
503 bcreate.memory = allocator->memory;
504 bcreate.format = allocator->format;
507 if (!allocator->can_allocate)
510 if (v4l2_ioctl (allocator->video_fd, VIDIOC_CREATE_BUFS, &bcreate) < 0)
511 goto create_bufs_failed;
513 group = gst_v4l2_memory_group_new (allocator, bcreate.index);
516 allocator->groups[bcreate.index] = group;
521 GST_OBJECT_UNLOCK (allocator);
526 GST_WARNING_OBJECT (allocator, "error creating a new buffer: %s",
532 static GstV4l2MemoryGroup *
533 gst_v4l2_allocator_alloc (GstV4l2Allocator * allocator)
535 GstV4l2MemoryGroup *group;
537 if (!g_atomic_int_get (&allocator->active))
540 group = gst_atomic_queue_pop (allocator->free_queue);
543 if (allocator->can_allocate) {
544 group = gst_v4l2_allocator_create_buf (allocator);
546 /* Don't hammer on CREATE_BUFS */
548 allocator->can_allocate = FALSE;
556 gst_v4l2_allocator_reset_size (GstV4l2Allocator * allocator,
557 GstV4l2MemoryGroup * group)
561 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
564 for (i = 0; i < group->n_mem; i++) {
565 size = allocator->format.fmt.pix_mp.plane_fmt[i].sizeimage;
566 gst_memory_resize (group->mem[i], 0, size);
570 size = allocator->format.fmt.pix.sizeimage;
571 gst_memory_resize (group->mem[0], 0, size);
576 _cleanup_failed_alloc (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group)
578 if (group->mems_allocated > 0) {
580 /* If one or more mmap worked, we need to unref the memory, otherwise
581 * they will keep a ref on the allocator and leak it. This will put back
582 * the group into the free_queue */
583 for (i = 0; i < group->n_mem; i++)
584 gst_memory_unref (group->mem[i]);
586 /* Otherwise, group has to be on free queue for _stop() to work */
587 gst_atomic_queue_push (allocator->free_queue, group);
594 gst_v4l2_allocator_new (GstObject * parent, gint video_fd,
595 struct v4l2_format *format)
597 GstV4l2Allocator *allocator;
599 gchar *name, *parent_name;
601 parent_name = gst_object_get_name (parent);
602 name = g_strconcat (parent_name, ":allocator", NULL);
603 g_free (parent_name);
605 allocator = g_object_new (GST_TYPE_V4L2_ALLOCATOR, "name", name, NULL);
608 /* Save everything */
609 allocator->video_fd = v4l2_dup (video_fd);
610 allocator->type = format->type;
611 allocator->format = *format;
613 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, MMAP);
614 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, USERPTR);
615 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, DMABUF);
624 GST_ERROR_OBJECT (allocator,
625 "No memory model supported by GStreamer for this device");
626 g_object_unref (allocator);
632 gst_v4l2_allocator_start (GstV4l2Allocator * allocator, guint32 count,
635 struct v4l2_requestbuffers breq = { count, allocator->type, memory };
636 gboolean can_allocate;
639 g_return_val_if_fail (count != 0, 0);
641 GST_OBJECT_LOCK (allocator);
643 if (allocator->active)
646 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0)
653 case V4L2_MEMORY_MMAP:
654 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, MMAP);
656 case V4L2_MEMORY_USERPTR:
657 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, USERPTR);
659 case V4L2_MEMORY_DMABUF:
660 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, DMABUF);
663 can_allocate = FALSE;
667 GST_DEBUG_OBJECT (allocator, "allocated %u %s buffers out of %u requested",
668 breq.count, memory_type_to_str (memory), count);
670 allocator->can_allocate = can_allocate;
671 allocator->count = breq.count;
672 allocator->memory = memory;
674 /* Create memory groups */
675 for (i = 0; i < allocator->count; i++) {
676 allocator->groups[i] = gst_v4l2_memory_group_new (allocator, i);
677 if (allocator->groups[i] == NULL)
680 gst_atomic_queue_push (allocator->free_queue, allocator->groups[i]);
683 g_atomic_int_set (&allocator->active, TRUE);
686 GST_OBJECT_UNLOCK (allocator);
691 GST_ERROR_OBJECT (allocator,
692 "error requesting %d buffers: %s", count, g_strerror (errno));
697 GST_ERROR_OBJECT (allocator,
698 "error requesting %d buffers: %s", count, g_strerror (errno));
703 GST_ERROR_OBJECT (allocator, "Not enough memory to allocate buffers");
714 gst_v4l2_allocator_stop (GstV4l2Allocator * allocator)
716 struct v4l2_requestbuffers breq = { 0, allocator->type, allocator->memory };
718 GstV4l2Return ret = GST_V4L2_OK;
720 GST_DEBUG_OBJECT (allocator, "stop allocator");
722 GST_OBJECT_LOCK (allocator);
724 if (!allocator->active)
727 if (gst_atomic_queue_length (allocator->free_queue) != allocator->count) {
728 GST_DEBUG_OBJECT (allocator, "allocator is still in use");
733 while (gst_atomic_queue_pop (allocator->free_queue)) {
737 for (i = 0; i < allocator->count; i++) {
738 GstV4l2MemoryGroup *group = allocator->groups[i];
739 allocator->groups[i] = NULL;
741 gst_v4l2_memory_group_free (group);
744 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0)
747 g_atomic_int_set (&allocator->active, FALSE);
750 GST_OBJECT_UNLOCK (allocator);
755 GST_ERROR_OBJECT (allocator,
756 "error releasing buffers buffers: %s", g_strerror (errno));
757 ret = GST_V4L2_ERROR;
763 gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator)
765 GstV4l2MemoryGroup *group;
768 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
770 group = gst_v4l2_allocator_alloc (allocator);
775 for (i = 0; i < group->n_mem; i++) {
776 if (group->mem[i] == NULL) {
778 data = v4l2_mmap (NULL, group->planes[i].length, PROT_READ | PROT_WRITE,
779 MAP_SHARED, allocator->video_fd, group->planes[i].m.mem_offset);
781 if (data == MAP_FAILED)
784 GST_LOG_OBJECT (allocator,
785 "mmap buffer length %d, data offset %d, plane %d",
786 group->planes[i].length, group->planes[i].data_offset, i);
788 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
789 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
792 /* Take back the allocator reference */
793 gst_object_ref (allocator);
796 group->mems_allocated++;
799 /* Ensure group size. Unlike GST, v4l2 have size (bytesused) initially set
800 * to 0. As length might be bigger then the expected size exposed in the
801 * format, we simply set bytesused initially and reset it here for
803 gst_v4l2_allocator_reset_size (allocator, group);
809 GST_ERROR_OBJECT (allocator, "Failed to mmap buffer: %s",
811 _cleanup_failed_alloc (allocator, group);
817 gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
818 GstAllocator * dmabuf_allocator)
820 GstV4l2MemoryGroup *group;
823 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
825 group = gst_v4l2_allocator_alloc (allocator);
830 for (i = 0; i < group->n_mem; i++) {
835 if (group->mem[i] == NULL) {
836 struct v4l2_exportbuffer expbuf = { 0 };
838 expbuf.type = allocator->type;
839 expbuf.index = group->buffer.index;
841 expbuf.flags = O_CLOEXEC | O_RDWR;
843 if (v4l2_ioctl (allocator->video_fd, VIDIOC_EXPBUF, &expbuf) < 0)
846 GST_LOG_OBJECT (allocator, "exported DMABUF as fd %i plane %d",
849 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
850 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
851 NULL, expbuf.fd, group);
853 /* Take back the allocator reference */
854 gst_object_ref (allocator);
857 g_assert (gst_is_v4l2_memory (group->mem[i]));
858 mem = (GstV4l2Memory *) group->mem[i];
860 if ((dmafd = dup (mem->dmafd)) < 0)
863 dma_mem = gst_dmabuf_allocator_alloc (dmabuf_allocator, dmafd,
865 _v4l2mem_parent_to_dmabuf (mem, dma_mem);
867 group->mem[i] = dma_mem;
868 group->mems_allocated++;
871 gst_v4l2_allocator_reset_size (allocator, group);
877 GST_ERROR_OBJECT (allocator, "Failed to export DMABUF: %s",
883 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
889 _cleanup_failed_alloc (allocator, group);
895 gst_v4l2_allocator_clear_dmabufin (GstV4l2Allocator * allocator,
896 GstV4l2MemoryGroup * group)
901 g_return_if_fail (allocator->memory == V4L2_MEMORY_DMABUF);
903 for (i = 0; i < group->n_mem; i++) {
905 mem = (GstV4l2Memory *) group->mem[i];
907 GST_LOG_OBJECT (allocator, "clearing DMABUF import, fd %i plane %d",
914 mem->mem.maxsize = 0;
919 /* Update v4l2 structure */
920 group->planes[i].length = 0;
921 group->planes[i].bytesused = 0;
922 group->planes[i].m.fd = -1;
923 group->planes[i].data_offset = 0;
926 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
927 group->buffer.bytesused = 0;
928 group->buffer.length = 0;
929 group->buffer.m.fd = -1;
934 gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator)
936 GstV4l2MemoryGroup *group;
939 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, NULL);
941 group = gst_v4l2_allocator_alloc (allocator);
946 for (i = 0; i < group->n_mem; i++) {
947 GST_LOG_OBJECT (allocator, "allocation empty DMABUF import group");
949 if (group->mem[i] == NULL) {
950 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
951 NULL, 0, 0, 0, 0, i, NULL, -1, group);
953 /* Take back the allocator reference */
954 gst_object_ref (allocator);
957 group->mems_allocated++;
960 gst_v4l2_allocator_clear_dmabufin (allocator, group);
966 gst_v4l2_allocator_clear_userptr (GstV4l2Allocator * allocator,
967 GstV4l2MemoryGroup * group)
972 g_return_if_fail (allocator->memory == V4L2_MEMORY_USERPTR);
974 for (i = 0; i < group->n_mem; i++) {
975 mem = (GstV4l2Memory *) group->mem[i];
977 GST_LOG_OBJECT (allocator, "clearing USERPTR %p plane %d size %"
978 G_GSIZE_FORMAT, mem->data, i, mem->mem.size);
980 mem->mem.maxsize = 0;
984 group->planes[i].length = 0;
985 group->planes[i].bytesused = 0;
986 group->planes[i].m.userptr = 0;
989 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
990 group->buffer.bytesused = 0;
991 group->buffer.length = 0;
992 group->buffer.m.userptr = 0;
997 gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator)
999 GstV4l2MemoryGroup *group;
1002 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, NULL);
1004 group = gst_v4l2_allocator_alloc (allocator);
1009 for (i = 0; i < group->n_mem; i++) {
1011 GST_LOG_OBJECT (allocator, "allocating empty USERPTR group");
1013 if (group->mem[i] == NULL) {
1014 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
1015 NULL, 0, 0, 0, 0, i, NULL, -1, group);
1017 /* Take back the allocator reference */
1018 gst_object_ref (allocator);
1021 group->mems_allocated++;
1024 gst_v4l2_allocator_clear_userptr (allocator, group);
1030 gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
1031 GstV4l2MemoryGroup * group, gint n_mem, GstMemory ** dma_mem)
1036 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, FALSE);
1038 if (group->n_mem != n_mem)
1039 goto n_mem_missmatch;
1041 for (i = 0; i < group->n_mem; i++) {
1043 gsize size, offset, maxsize;
1045 if (!gst_is_dmabuf_memory (dma_mem[i]))
1048 size = gst_memory_get_sizes (dma_mem[i], &offset, &maxsize);
1050 if ((dmafd = dup (gst_dmabuf_memory_get_fd (dma_mem[i]))) < 0)
1053 GST_LOG_OBJECT (allocator, "imported DMABUF as fd %i plane %d", dmafd, i);
1055 mem = (GstV4l2Memory *) group->mem[i];
1058 mem->mem.maxsize = maxsize;
1059 mem->mem.offset = offset;
1060 mem->mem.size = size;
1063 /* Update v4l2 structure */
1064 group->planes[i].length = maxsize;
1065 group->planes[i].bytesused = size;
1066 group->planes[i].m.fd = dmafd;
1067 group->planes[i].data_offset = offset;
1070 /* Copy into buffer structure if not using planes */
1071 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1072 group->buffer.bytesused = group->planes[0].bytesused;
1073 group->buffer.length = group->planes[0].length;
1074 group->buffer.m.fd = group->planes[0].m.userptr;
1076 group->buffer.length = group->n_mem;
1083 GST_ERROR_OBJECT (allocator, "Got %i dmabuf but needed %i", n_mem,
1089 GST_ERROR_OBJECT (allocator, "Memory %i is not of DMABUF", i);
1094 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
1095 g_strerror (errno));
1101 gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
1102 GstV4l2MemoryGroup * group, gsize img_size, int n_planes,
1103 gpointer * data, gsize * offset)
1108 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, FALSE);
1110 /* TODO Support passing N plane from 1 memory to MPLANE v4l2 format */
1111 if (n_planes != group->n_mem)
1112 goto n_mem_missmatch;
1114 for (i = 0; i < group->n_mem; i++) {
1115 gsize size, maxsize;
1117 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1118 struct v4l2_pix_format_mplane *pix = &allocator->format.fmt.pix_mp;
1119 maxsize = pix->plane_fmt[i].sizeimage;
1121 maxsize = allocator->format.fmt.pix.sizeimage;
1124 if ((i + 1) == n_planes) {
1125 size = img_size - offset[i];
1127 size = offset[i + 1] - offset[i];
1130 g_assert (size <= img_size);
1132 GST_LOG_OBJECT (allocator, "imported USERPTR %p plane %d size %"
1133 G_GSIZE_FORMAT, data[i], i, size);
1135 mem = (GstV4l2Memory *) group->mem[i];
1137 mem->mem.maxsize = maxsize;
1138 mem->mem.size = size;
1139 mem->data = data[i];
1141 group->planes[i].length = maxsize;
1142 group->planes[i].bytesused = size;
1143 group->planes[i].m.userptr = (unsigned long) data[i];
1144 group->planes[i].data_offset = 0;
1147 /* Copy into buffer structure if not using planes */
1148 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1149 group->buffer.bytesused = group->planes[0].bytesused;
1150 group->buffer.length = group->planes[0].length;
1151 group->buffer.m.userptr = group->planes[0].m.userptr;
1153 group->buffer.length = group->n_mem;
1160 GST_ERROR_OBJECT (allocator, "Got %i userptr plane while driver need %i",
1161 n_planes, group->n_mem);
1167 gst_v4l2_allocator_flush (GstV4l2Allocator * allocator)
1171 GST_OBJECT_LOCK (allocator);
1173 if (!allocator->active)
1176 for (i = 0; i < allocator->count; i++) {
1177 GstV4l2MemoryGroup *group = allocator->groups[i];
1180 if (IS_QUEUED (group->buffer)) {
1181 UNSET_QUEUED (group->buffer);
1183 for (n = 0; n < group->n_mem; n++)
1184 gst_memory_unref (group->mem[n]);
1189 GST_OBJECT_UNLOCK (allocator);
1193 gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
1194 GstV4l2MemoryGroup * group)
1196 gboolean ret = TRUE;
1200 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1201 for (i = 0; i < group->n_mem; i++)
1202 group->planes[i].bytesused =
1203 gst_memory_get_sizes (group->mem[i], NULL, NULL);
1205 group->buffer.bytesused = gst_memory_get_sizes (group->mem[0], NULL, NULL);
1208 if (v4l2_ioctl (allocator->video_fd, VIDIOC_QBUF, &group->buffer) < 0) {
1209 GST_ERROR_OBJECT (allocator, "failed queing buffer %i: %s",
1210 group->buffer.index, g_strerror (errno));
1212 if (IS_QUEUED (group->buffer)) {
1213 GST_DEBUG_OBJECT (allocator,
1214 "driver pretends buffer is queued even if queue failed");
1215 UNSET_QUEUED (group->buffer);
1220 GST_LOG_OBJECT (allocator, "queued buffer %i (flags 0x%X)",
1221 group->buffer.index, group->buffer.flags);
1223 if (!IS_QUEUED (group->buffer)) {
1224 GST_DEBUG_OBJECT (allocator,
1225 "driver pretends buffer is not queued even if queue succeeded");
1226 SET_QUEUED (group->buffer);
1229 /* Ensure the memory will stay around and is RO */
1230 for (i = 0; i < group->n_mem; i++)
1231 gst_memory_ref (group->mem[i]);
1237 GstV4l2MemoryGroup *
1238 gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator)
1240 struct v4l2_buffer buffer = { 0 };
1241 struct v4l2_plane planes[VIDEO_MAX_PLANES] = { {0} };
1244 GstV4l2MemoryGroup *group = NULL;
1246 buffer.type = allocator->type;
1247 buffer.memory = allocator->memory;
1249 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1250 buffer.length = allocator->format.fmt.pix_mp.num_planes;
1251 buffer.m.planes = planes;
1254 if (v4l2_ioctl (allocator->video_fd, VIDIOC_DQBUF, &buffer) < 0)
1257 group = allocator->groups[buffer.index];
1258 group->buffer = buffer;
1260 GST_LOG_OBJECT (allocator, "dequeued buffer %i (flags 0x%X)", buffer.index,
1263 if (IS_QUEUED (group->buffer)) {
1264 GST_DEBUG_OBJECT (allocator,
1265 "driver pretends buffer is queued even if dequeue succeeded");
1266 UNSET_QUEUED (group->buffer);
1269 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1270 group->buffer.m.planes = group->planes;
1271 memcpy (group->planes, buffer.m.planes, sizeof (planes));
1273 group->planes[0].bytesused = group->buffer.bytesused;
1274 group->planes[0].length = group->buffer.length;
1275 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
1276 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
1279 /* And update memory size */
1280 if (V4L2_TYPE_IS_OUTPUT (allocator->type)) {
1281 gst_v4l2_allocator_reset_size (allocator, group);
1283 /* for capture, simply read the size */
1284 for (i = 0; i < group->n_mem; i++) {
1285 gst_memory_resize (group->mem[i], 0, group->planes[i].bytesused);
1289 /* Release the memory, possibly making it RW again */
1290 for (i = 0; i < group->n_mem; i++)
1291 gst_memory_unref (group->mem[i]);
1296 GST_ERROR_OBJECT (allocator, "failed dequeuing a %s buffer: %s",
1297 memory_type_to_str (allocator->memory), g_strerror (errno));
1301 GST_WARNING_OBJECT (allocator,
1302 "Non-blocking I/O has been selected using O_NONBLOCK and"
1303 " no buffer was in the outgoing queue.");
1306 GST_ERROR_OBJECT (allocator,
1307 "The buffer type is not supported, or the index is out of bounds, "
1308 "or no buffers have been allocated yet, or the userptr "
1309 "or length are invalid.");
1312 GST_ERROR_OBJECT (allocator,
1313 "insufficient memory to enqueue a user pointer buffer");
1316 GST_INFO_OBJECT (allocator,
1317 "VIDIOC_DQBUF failed due to an internal error."
1318 " Can also indicate temporary problems like signal loss."
1319 " Note the driver might dequeue an (empty) buffer despite"
1320 " returning an error, or even stop capturing.");
1321 /* have we de-queued a buffer ? */
1322 if (!IS_QUEUED (buffer)) {
1323 GST_DEBUG_OBJECT (allocator, "reenqueing buffer");
1324 /* FIXME ... should we do something here? */
1328 GST_WARNING_OBJECT (allocator, "could not sync on a buffer on device");
1331 GST_WARNING_OBJECT (allocator,
1332 "Grabbing frame got interrupted unexpectedly. %d: %s.", errno,
1333 g_strerror (errno));
1337 if (!IS_QUEUED (group->buffer)) {
1338 GST_DEBUG_OBJECT (allocator,
1339 "driver pretends buffer is dequeued even if dequeue failed");
1340 SET_QUEUED (group->buffer);
1346 gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
1347 GstV4l2MemoryGroup * group)
1349 switch (allocator->memory) {
1350 case V4L2_MEMORY_USERPTR:
1351 gst_v4l2_allocator_clear_userptr (allocator, group);
1353 case V4L2_MEMORY_DMABUF:
1354 gst_v4l2_allocator_clear_dmabufin (allocator, group);
1356 case V4L2_MEMORY_MMAP:
1357 gst_v4l2_allocator_reset_size (allocator, group);
1360 g_assert_not_reached ();