2 * Copyright (C) 2014 Collabora Ltd.
3 * Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
24 #include "ext/videodev2.h"
25 #include "gstv4l2allocator.h"
26 #include "v4l2_calls.h"
28 #include <gst/allocators/gstdmabuf.h>
33 #include <sys/types.h>
36 #define GST_V4L2_MEMORY_TYPE "V4l2Memory"
38 #define gst_v4l2_allocator_parent_class parent_class
39 G_DEFINE_TYPE (GstV4l2Allocator, gst_v4l2_allocator, GST_TYPE_ALLOCATOR);
41 GST_DEBUG_CATEGORY_STATIC (v4l2allocator_debug);
42 #define GST_CAT_DEFAULT v4l2allocator_debug
44 #define UNSET_QUEUED(buffer) \
45 ((buffer).flags &= ~(V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
47 #define SET_QUEUED(buffer) ((buffer).flags |= V4L2_BUF_FLAG_QUEUED)
49 #define IS_QUEUED(buffer) \
50 ((buffer).flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))
58 static guint gst_v4l2_allocator_signals[LAST_SIGNAL] = { 0 };
60 static void gst_v4l2_allocator_release (GstV4l2Allocator * allocator,
64 memory_type_to_str (guint32 memory)
67 case V4L2_MEMORY_MMAP:
69 case V4L2_MEMORY_USERPTR:
71 case V4L2_MEMORY_DMABUF:
78 /*************************************/
79 /* GstV4lMemory implementation */
80 /*************************************/
83 _v4l2mem_map (GstV4l2Memory * mem, gsize maxsize, GstMapFlags flags)
87 switch (mem->group->buffer.memory) {
88 case V4L2_MEMORY_MMAP:
89 case V4L2_MEMORY_USERPTR:
92 case V4L2_MEMORY_DMABUF:
93 /* v4l2 dmabuf memory are not shared with downstream */
94 g_assert_not_reached ();
97 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
104 _v4l2mem_unmap (GstV4l2Memory * mem)
106 gboolean ret = FALSE;
108 switch (mem->group->buffer.memory) {
109 case V4L2_MEMORY_MMAP:
110 case V4L2_MEMORY_USERPTR:
113 case V4L2_MEMORY_DMABUF:
114 /* v4l2 dmabuf memory are not share with downstream */
115 g_assert_not_reached ();
118 GST_WARNING ("Unknown memory type %i", mem->group->buffer.memory);
125 _v4l2mem_dispose (GstV4l2Memory * mem)
127 GstV4l2Allocator *allocator = (GstV4l2Allocator *) mem->mem.allocator;
128 GstV4l2MemoryGroup *group = mem->group;
131 if (group->mem[mem->plane]) {
132 /* We may have a dmabuf, replace it with returned original memory */
133 group->mem[mem->plane] = gst_memory_ref ((GstMemory *) mem);
134 gst_v4l2_allocator_release (allocator, mem);
137 gst_object_ref (allocator);
145 _v4l2mem_free (GstV4l2Memory * mem)
149 g_slice_free (GstV4l2Memory, mem);
152 static inline GstV4l2Memory *
153 _v4l2mem_new (GstMemoryFlags flags, GstAllocator * allocator,
154 GstMemory * parent, gsize maxsize, gsize align, gsize offset, gsize size,
155 gint plane, gpointer data, int dmafd, GstV4l2MemoryGroup * group)
159 mem = g_slice_new0 (GstV4l2Memory);
160 gst_memory_init (GST_MEMORY_CAST (mem),
161 flags, allocator, parent, maxsize, align, offset, size);
164 mem->mem.mini_object.dispose =
165 (GstMiniObjectDisposeFunction) _v4l2mem_dispose;
175 static GstV4l2Memory *
176 _v4l2mem_share (GstV4l2Memory * mem, gssize offset, gsize size)
181 /* find the real parent */
182 if ((parent = mem->mem.parent) == NULL)
183 parent = (GstMemory *) mem;
186 size = mem->mem.size - offset;
188 /* the shared memory is always readonly */
189 sub = _v4l2mem_new (GST_MINI_OBJECT_FLAGS (parent) |
190 GST_MINI_OBJECT_FLAG_LOCK_READONLY, mem->mem.allocator, parent,
191 mem->mem.maxsize, mem->mem.align, offset, size, mem->plane, mem->data,
198 _v4l2mem_is_span (GstV4l2Memory * mem1, GstV4l2Memory * mem2, gsize * offset)
201 *offset = mem1->mem.offset - mem1->mem.parent->offset;
203 /* and memory is contiguous */
204 return mem1->mem.offset + mem1->mem.size == mem2->mem.offset;
208 _v4l2mem_parent_to_dmabuf (GstV4l2Memory * mem, GstMemory * dma_mem)
210 gst_memory_lock (&mem->mem, GST_LOCK_FLAG_EXCLUSIVE);
211 dma_mem->parent = gst_memory_ref (&mem->mem);
215 gst_is_v4l2_memory (GstMemory * mem)
217 return gst_memory_is_type (mem, GST_V4L2_MEMORY_TYPE);
221 /*************************************/
222 /* GstV4l2MemoryGroup implementation */
223 /*************************************/
226 gst_v4l2_memory_group_free (GstV4l2MemoryGroup * group)
230 for (i = 0; i < group->n_mem; i++) {
231 GstMemory *mem = group->mem[i];
232 group->mem[i] = NULL;
234 gst_memory_unref (mem);
237 g_slice_free (GstV4l2MemoryGroup, group);
240 static GstV4l2MemoryGroup *
241 gst_v4l2_memory_group_new (GstV4l2Allocator * allocator, guint32 index)
243 gint video_fd = allocator->video_fd;
244 guint32 memory = allocator->memory;
245 struct v4l2_format *format = &allocator->format;
246 GstV4l2MemoryGroup *group;
247 gsize img_size, buf_size;
249 group = g_slice_new0 (GstV4l2MemoryGroup);
251 group->buffer.type = format->type;
252 group->buffer.index = index;
253 group->buffer.memory = memory;
255 if (V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
256 group->n_mem = group->buffer.length = format->fmt.pix_mp.num_planes;
257 group->buffer.m.planes = group->planes;
262 if (v4l2_ioctl (video_fd, VIDIOC_QUERYBUF, &group->buffer) < 0)
263 goto querybuf_failed;
265 /* Check that provided size matches the format we have negotiation. Failing
266 * there usually means a driver of libv4l bug. */
267 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
270 for (i = 0; i < group->n_mem; i++) {
271 img_size = allocator->format.fmt.pix_mp.plane_fmt[i].sizeimage;
272 buf_size = group->planes[i].length;
273 if (buf_size < img_size)
274 goto buffer_too_short;
277 img_size = allocator->format.fmt.pix.sizeimage;
278 buf_size = group->buffer.length;
279 if (buf_size < img_size)
280 goto buffer_too_short;
283 /* We save non planar buffer information into the multi-planar plane array
284 * to avoid duplicating the code later */
285 if (!V4L2_TYPE_IS_MULTIPLANAR (format->type)) {
286 group->planes[0].bytesused = group->buffer.bytesused;
287 group->planes[0].length = group->buffer.length;
288 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
289 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
292 GST_LOG_OBJECT (allocator, "Got %s buffer", memory_type_to_str (memory));
293 GST_LOG_OBJECT (allocator, " index: %u", group->buffer.index);
294 GST_LOG_OBJECT (allocator, " type: %d", group->buffer.type);
295 GST_LOG_OBJECT (allocator, " flags: %08x", group->buffer.flags);
296 GST_LOG_OBJECT (allocator, " field: %d", group->buffer.field);
297 GST_LOG_OBJECT (allocator, " memory: %d", group->buffer.memory);
298 GST_LOG_OBJECT (allocator, " planes: %d", group->n_mem);
300 #ifndef GST_DISABLE_GST_DEBUG
301 if (memory == V4L2_MEMORY_MMAP) {
303 for (i = 0; i < group->n_mem; i++) {
304 GST_LOG_OBJECT (allocator, " [%u] bytesused: %u, length: %u", i,
305 group->planes[i].bytesused, group->planes[i].length);
306 GST_LOG_OBJECT (allocator, " [%u] MMAP offset: %u", i,
307 group->planes[i].m.mem_offset);
316 GST_ERROR ("error querying buffer %d: %s", index, g_strerror (errno));
321 GST_ERROR ("buffer size %" G_GSIZE_FORMAT
322 " is smaller then negotiated size %" G_GSIZE_FORMAT
323 ", this is usually the result of a bug in the v4l2 driver or libv4l.",
328 gst_v4l2_memory_group_free (group);
333 /*************************************/
334 /* GstV4lAllocator implementation */
335 /*************************************/
338 gst_v4l2_allocator_release (GstV4l2Allocator * allocator, GstV4l2Memory * mem)
340 GstV4l2MemoryGroup *group = mem->group;
342 GST_LOG_OBJECT (allocator, "plane %i of buffer %u released",
343 mem->plane, group->buffer.index);
345 switch (allocator->memory) {
346 case V4L2_MEMORY_DMABUF:
350 case V4L2_MEMORY_USERPTR:
357 /* When all memory are back, put the group back in the free queue */
358 if (g_atomic_int_dec_and_test (&group->mems_allocated)) {
359 GST_LOG_OBJECT (allocator, "buffer %u released", group->buffer.index);
360 gst_atomic_queue_push (allocator->free_queue, group);
361 g_signal_emit (allocator, gst_v4l2_allocator_signals[GROUP_RELEASED], 0);
364 /* Keep last, allocator may be freed after this call */
365 g_object_unref (allocator);
369 gst_v4l2_allocator_free (GstAllocator * gallocator, GstMemory * gmem)
371 GstV4l2Allocator *allocator = (GstV4l2Allocator *) gallocator;
372 GstV4l2Memory *mem = (GstV4l2Memory *) gmem;
373 GstV4l2MemoryGroup *group = mem->group;
375 GST_LOG_OBJECT (allocator, "freeing plane %i of buffer %u",
376 mem->plane, group->buffer.index);
378 switch (allocator->memory) {
379 case V4L2_MEMORY_MMAP:
381 v4l2_munmap (mem->data, group->planes[mem->plane].length);
382 } else if (group->planes[mem->plane].m.fd > 0) {
383 close (group->planes[mem->plane].m.fd);
395 gst_v4l2_allocator_dispose (GObject * obj)
397 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
400 GST_LOG_OBJECT (obj, "called");
402 for (i = 0; i < allocator->count; i++) {
403 GstV4l2MemoryGroup *group = allocator->groups[i];
404 allocator->groups[i] = NULL;
406 gst_v4l2_memory_group_free (group);
409 G_OBJECT_CLASS (parent_class)->dispose (obj);
413 gst_v4l2_allocator_finalize (GObject * obj)
415 GstV4l2Allocator *allocator = (GstV4l2Allocator *) obj;
417 GST_LOG_OBJECT (obj, "called");
419 v4l2_close (allocator->video_fd);
420 gst_atomic_queue_unref (allocator->free_queue);
422 G_OBJECT_CLASS (parent_class)->finalize (obj);
426 gst_v4l2_allocator_class_init (GstV4l2AllocatorClass * klass)
428 GObjectClass *object_class;
429 GstAllocatorClass *allocator_class;
431 allocator_class = (GstAllocatorClass *) klass;
432 object_class = (GObjectClass *) klass;
434 allocator_class->alloc = NULL;
435 allocator_class->free = gst_v4l2_allocator_free;
437 object_class->dispose = gst_v4l2_allocator_dispose;
438 object_class->finalize = gst_v4l2_allocator_finalize;
440 gst_v4l2_allocator_signals[GROUP_RELEASED] = g_signal_new ("group-released",
441 G_TYPE_FROM_CLASS (object_class), G_SIGNAL_RUN_LAST, 0, NULL, NULL, NULL,
444 GST_DEBUG_CATEGORY_INIT (v4l2allocator_debug, "v4l2allocator", 0,
449 gst_v4l2_allocator_init (GstV4l2Allocator * allocator)
451 GstAllocator *alloc = GST_ALLOCATOR_CAST (allocator);
453 alloc->mem_type = GST_V4L2_MEMORY_TYPE;
454 alloc->mem_map = (GstMemoryMapFunction) _v4l2mem_map;
455 alloc->mem_unmap = (GstMemoryUnmapFunction) _v4l2mem_unmap;
456 alloc->mem_share = (GstMemoryShareFunction) _v4l2mem_share;
457 alloc->mem_is_span = (GstMemoryIsSpanFunction) _v4l2mem_is_span;
458 /* Use the default, fallback copy function */
460 allocator->free_queue = gst_atomic_queue_new (VIDEO_MAX_FRAME);
462 GST_OBJECT_FLAG_SET (allocator, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
465 #define GST_V4L2_ALLOCATOR_PROBE(obj,type) \
466 gst_v4l2_allocator_probe ((obj), V4L2_MEMORY_ ## type, \
467 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _REQBUFS, \
468 GST_V4L2_ALLOCATOR_FLAG_ ## type ## _CREATE_BUFS)
470 gst_v4l2_allocator_probe (GstV4l2Allocator * allocator, guint32 memory,
471 guint32 breq_flag, guint32 bcreate_flag)
473 struct v4l2_requestbuffers breq = { 0 };
476 breq.type = allocator->type;
478 breq.memory = memory;
480 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) == 0) {
481 struct v4l2_create_buffers bcreate = { 0 };
485 bcreate.memory = V4L2_MEMORY_MMAP;
486 bcreate.format = allocator->format;
488 if ((v4l2_ioctl (allocator->video_fd, VIDIOC_CREATE_BUFS, &bcreate) == 0))
489 flags |= bcreate_flag;
495 static GstV4l2MemoryGroup *
496 gst_v4l2_allocator_create_buf (GstV4l2Allocator * allocator)
498 struct v4l2_create_buffers bcreate = { 0 };
499 GstV4l2MemoryGroup *group = NULL;
501 GST_OBJECT_LOCK (allocator);
503 if (!g_atomic_int_get (&allocator->active))
506 bcreate.memory = allocator->memory;
507 bcreate.format = allocator->format;
510 if (!allocator->can_allocate)
513 if (v4l2_ioctl (allocator->video_fd, VIDIOC_CREATE_BUFS, &bcreate) < 0)
514 goto create_bufs_failed;
516 group = gst_v4l2_memory_group_new (allocator, bcreate.index);
519 allocator->groups[bcreate.index] = group;
524 GST_OBJECT_UNLOCK (allocator);
529 GST_WARNING_OBJECT (allocator, "error creating a new buffer: %s",
535 static GstV4l2MemoryGroup *
536 gst_v4l2_allocator_alloc (GstV4l2Allocator * allocator)
538 GstV4l2MemoryGroup *group;
540 if (!g_atomic_int_get (&allocator->active))
543 group = gst_atomic_queue_pop (allocator->free_queue);
546 if (allocator->can_allocate) {
547 group = gst_v4l2_allocator_create_buf (allocator);
549 /* Don't hammer on CREATE_BUFS */
551 allocator->can_allocate = FALSE;
559 gst_v4l2_allocator_reset_size (GstV4l2Allocator * allocator,
560 GstV4l2MemoryGroup * group)
563 gboolean imported = FALSE;
565 switch (allocator->memory) {
566 case V4L2_MEMORY_USERPTR:
567 case V4L2_MEMORY_DMABUF:
572 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
575 for (i = 0; i < group->n_mem; i++) {
576 size = allocator->format.fmt.pix_mp.plane_fmt[i].sizeimage;
579 group->mem[i]->maxsize = size;
581 gst_memory_resize (group->mem[i], 0, size);
585 size = allocator->format.fmt.pix.sizeimage;
588 group->mem[0]->maxsize = size;
590 gst_memory_resize (group->mem[0], 0, size);
595 _cleanup_failed_alloc (GstV4l2Allocator * allocator, GstV4l2MemoryGroup * group)
597 if (group->mems_allocated > 0) {
599 /* If one or more mmap worked, we need to unref the memory, otherwise
600 * they will keep a ref on the allocator and leak it. This will put back
601 * the group into the free_queue */
602 for (i = 0; i < group->n_mem; i++)
603 gst_memory_unref (group->mem[i]);
605 /* Otherwise, group has to be on free queue for _stop() to work */
606 gst_atomic_queue_push (allocator->free_queue, group);
613 gst_v4l2_allocator_new (GstObject * parent, gint video_fd,
614 struct v4l2_format *format)
616 GstV4l2Allocator *allocator;
618 gchar *name, *parent_name;
620 parent_name = gst_object_get_name (parent);
621 name = g_strconcat (parent_name, ":allocator", NULL);
622 g_free (parent_name);
624 allocator = g_object_new (GST_TYPE_V4L2_ALLOCATOR, "name", name, NULL);
627 /* Save everything */
628 allocator->video_fd = v4l2_dup (video_fd);
629 allocator->type = format->type;
630 allocator->format = *format;
632 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, MMAP);
633 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, USERPTR);
634 flags |= GST_V4L2_ALLOCATOR_PROBE (allocator, DMABUF);
636 GST_OBJECT_FLAG_SET (allocator, flags);
645 GST_ERROR_OBJECT (allocator,
646 "No memory model supported by GStreamer for this device");
647 g_object_unref (allocator);
653 gst_v4l2_allocator_start (GstV4l2Allocator * allocator, guint32 count,
656 struct v4l2_requestbuffers breq = { count, allocator->type, memory };
657 gboolean can_allocate;
660 g_return_val_if_fail (count != 0, 0);
662 GST_OBJECT_LOCK (allocator);
664 if (g_atomic_int_get (&allocator->active))
667 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0)
674 case V4L2_MEMORY_MMAP:
675 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, MMAP);
677 case V4L2_MEMORY_USERPTR:
678 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, USERPTR);
680 case V4L2_MEMORY_DMABUF:
681 can_allocate = GST_V4L2_ALLOCATOR_CAN_ALLOCATE (allocator, DMABUF);
684 can_allocate = FALSE;
688 GST_DEBUG_OBJECT (allocator, "allocated %u %s buffers out of %u requested",
689 breq.count, memory_type_to_str (memory), count);
691 allocator->can_allocate = can_allocate;
692 allocator->count = breq.count;
693 allocator->memory = memory;
695 /* Create memory groups */
696 for (i = 0; i < allocator->count; i++) {
697 allocator->groups[i] = gst_v4l2_memory_group_new (allocator, i);
698 if (allocator->groups[i] == NULL)
701 gst_atomic_queue_push (allocator->free_queue, allocator->groups[i]);
704 g_atomic_int_set (&allocator->active, TRUE);
707 GST_OBJECT_UNLOCK (allocator);
712 GST_ERROR_OBJECT (allocator,
713 "error requesting %d buffers: %s", count, g_strerror (errno));
718 GST_ERROR_OBJECT (allocator,
719 "error requesting %d buffers: %s", count, g_strerror (errno));
724 GST_ERROR_OBJECT (allocator, "Not enough memory to allocate buffers");
735 gst_v4l2_allocator_stop (GstV4l2Allocator * allocator)
737 struct v4l2_requestbuffers breq = { 0, allocator->type, allocator->memory };
739 GstV4l2Return ret = GST_V4L2_OK;
741 GST_DEBUG_OBJECT (allocator, "stop allocator");
743 GST_OBJECT_LOCK (allocator);
745 if (!g_atomic_int_get (&allocator->active))
748 if (gst_atomic_queue_length (allocator->free_queue) != allocator->count) {
749 GST_DEBUG_OBJECT (allocator, "allocator is still in use");
754 while (gst_atomic_queue_pop (allocator->free_queue)) {
758 for (i = 0; i < allocator->count; i++) {
759 GstV4l2MemoryGroup *group = allocator->groups[i];
760 allocator->groups[i] = NULL;
762 gst_v4l2_memory_group_free (group);
765 if (v4l2_ioctl (allocator->video_fd, VIDIOC_REQBUFS, &breq) < 0)
768 allocator->count = 0;
770 g_atomic_int_set (&allocator->active, FALSE);
773 GST_OBJECT_UNLOCK (allocator);
778 GST_ERROR_OBJECT (allocator,
779 "error releasing buffers buffers: %s", g_strerror (errno));
780 ret = GST_V4L2_ERROR;
786 gst_v4l2_allocator_alloc_mmap (GstV4l2Allocator * allocator)
788 GstV4l2MemoryGroup *group;
791 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
793 group = gst_v4l2_allocator_alloc (allocator);
798 for (i = 0; i < group->n_mem; i++) {
799 if (group->mem[i] == NULL) {
801 data = v4l2_mmap (NULL, group->planes[i].length, PROT_READ | PROT_WRITE,
802 MAP_SHARED, allocator->video_fd, group->planes[i].m.mem_offset);
804 if (data == MAP_FAILED)
807 GST_LOG_OBJECT (allocator,
808 "mmap buffer length %d, data offset %d, plane %d",
809 group->planes[i].length, group->planes[i].data_offset, i);
811 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
812 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
815 /* Take back the allocator reference */
816 gst_object_ref (allocator);
819 group->mems_allocated++;
822 /* Ensure group size. Unlike GST, v4l2 have size (bytesused) initially set
823 * to 0. As length might be bigger then the expected size exposed in the
824 * format, we simply set bytesused initially and reset it here for
826 gst_v4l2_allocator_reset_size (allocator, group);
832 GST_ERROR_OBJECT (allocator, "Failed to mmap buffer: %s",
834 _cleanup_failed_alloc (allocator, group);
840 gst_v4l2_allocator_alloc_dmabuf (GstV4l2Allocator * allocator,
841 GstAllocator * dmabuf_allocator)
843 GstV4l2MemoryGroup *group;
846 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_MMAP, NULL);
848 group = gst_v4l2_allocator_alloc (allocator);
853 for (i = 0; i < group->n_mem; i++) {
858 if (group->mem[i] == NULL) {
859 struct v4l2_exportbuffer expbuf = { 0 };
861 expbuf.type = allocator->type;
862 expbuf.index = group->buffer.index;
864 expbuf.flags = O_CLOEXEC | O_RDWR;
866 if (v4l2_ioctl (allocator->video_fd, VIDIOC_EXPBUF, &expbuf) < 0)
869 GST_LOG_OBJECT (allocator, "exported DMABUF as fd %i plane %d",
872 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
873 NULL, group->planes[i].length, 0, 0, group->planes[i].length, i,
874 NULL, expbuf.fd, group);
876 /* Take back the allocator reference */
877 gst_object_ref (allocator);
880 g_assert (gst_is_v4l2_memory (group->mem[i]));
881 mem = (GstV4l2Memory *) group->mem[i];
883 if ((dmafd = dup (mem->dmafd)) < 0)
886 dma_mem = gst_dmabuf_allocator_alloc (dmabuf_allocator, dmafd,
888 _v4l2mem_parent_to_dmabuf (mem, dma_mem);
890 group->mem[i] = dma_mem;
891 group->mems_allocated++;
894 gst_v4l2_allocator_reset_size (allocator, group);
900 GST_ERROR_OBJECT (allocator, "Failed to export DMABUF: %s",
906 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
912 _cleanup_failed_alloc (allocator, group);
918 gst_v4l2_allocator_clear_dmabufin (GstV4l2Allocator * allocator,
919 GstV4l2MemoryGroup * group)
924 g_return_if_fail (allocator->memory == V4L2_MEMORY_DMABUF);
926 for (i = 0; i < group->n_mem; i++) {
928 mem = (GstV4l2Memory *) group->mem[i];
930 GST_LOG_OBJECT (allocator, "clearing DMABUF import, fd %i plane %d",
937 mem->mem.maxsize = 0;
942 /* Update v4l2 structure */
943 group->planes[i].length = 0;
944 group->planes[i].bytesused = 0;
945 group->planes[i].m.fd = -1;
946 group->planes[i].data_offset = 0;
949 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
950 group->buffer.bytesused = 0;
951 group->buffer.length = 0;
952 group->buffer.m.fd = -1;
957 gst_v4l2_allocator_alloc_dmabufin (GstV4l2Allocator * allocator)
959 GstV4l2MemoryGroup *group;
962 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, NULL);
964 group = gst_v4l2_allocator_alloc (allocator);
969 GST_LOG_OBJECT (allocator, "allocating empty DMABUF import group");
971 for (i = 0; i < group->n_mem; i++) {
972 if (group->mem[i] == NULL) {
973 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
974 NULL, 0, 0, 0, 0, i, NULL, -1, group);
976 /* Take back the allocator reference */
977 gst_object_ref (allocator);
980 group->mems_allocated++;
983 gst_v4l2_allocator_clear_dmabufin (allocator, group);
989 gst_v4l2_allocator_clear_userptr (GstV4l2Allocator * allocator,
990 GstV4l2MemoryGroup * group)
995 g_return_if_fail (allocator->memory == V4L2_MEMORY_USERPTR);
997 for (i = 0; i < group->n_mem; i++) {
998 mem = (GstV4l2Memory *) group->mem[i];
1000 GST_LOG_OBJECT (allocator, "clearing USERPTR %p plane %d size %"
1001 G_GSIZE_FORMAT, mem->data, i, mem->mem.size);
1003 mem->mem.maxsize = 0;
1007 group->planes[i].length = 0;
1008 group->planes[i].bytesused = 0;
1009 group->planes[i].m.userptr = 0;
1012 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1013 group->buffer.bytesused = 0;
1014 group->buffer.length = 0;
1015 group->buffer.m.userptr = 0;
1019 GstV4l2MemoryGroup *
1020 gst_v4l2_allocator_alloc_userptr (GstV4l2Allocator * allocator)
1022 GstV4l2MemoryGroup *group;
1025 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, NULL);
1027 group = gst_v4l2_allocator_alloc (allocator);
1032 GST_LOG_OBJECT (allocator, "allocating empty USERPTR group");
1034 for (i = 0; i < group->n_mem; i++) {
1036 if (group->mem[i] == NULL) {
1037 group->mem[i] = (GstMemory *) _v4l2mem_new (0, GST_ALLOCATOR (allocator),
1038 NULL, 0, 0, 0, 0, i, NULL, -1, group);
1040 /* Take back the allocator reference */
1041 gst_object_ref (allocator);
1044 group->mems_allocated++;
1047 gst_v4l2_allocator_clear_userptr (allocator, group);
1053 gst_v4l2_allocator_import_dmabuf (GstV4l2Allocator * allocator,
1054 GstV4l2MemoryGroup * group, gint n_mem, GstMemory ** dma_mem)
1059 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_DMABUF, FALSE);
1061 if (group->n_mem != n_mem)
1062 goto n_mem_missmatch;
1064 for (i = 0; i < group->n_mem; i++) {
1066 gsize size, offset, maxsize;
1068 if (!gst_is_dmabuf_memory (dma_mem[i]))
1071 size = gst_memory_get_sizes (dma_mem[i], &offset, &maxsize);
1073 if ((dmafd = dup (gst_dmabuf_memory_get_fd (dma_mem[i]))) < 0)
1076 GST_LOG_OBJECT (allocator, "imported DMABUF as fd %i plane %d", dmafd, i);
1078 mem = (GstV4l2Memory *) group->mem[i];
1081 mem->mem.maxsize = maxsize;
1082 mem->mem.offset = offset;
1083 mem->mem.size = size;
1086 /* Update v4l2 structure */
1087 group->planes[i].length = maxsize;
1088 group->planes[i].bytesused = size;
1089 group->planes[i].m.fd = dmafd;
1090 group->planes[i].data_offset = offset;
1093 /* Copy into buffer structure if not using planes */
1094 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1095 group->buffer.bytesused = group->planes[0].bytesused;
1096 group->buffer.length = group->planes[0].length;
1097 group->buffer.m.fd = group->planes[0].m.userptr;
1099 group->buffer.length = group->n_mem;
1106 GST_ERROR_OBJECT (allocator, "Got %i dmabuf but needed %i", n_mem,
1112 GST_ERROR_OBJECT (allocator, "Memory %i is not of DMABUF", i);
1117 GST_ERROR_OBJECT (allocator, "Failed to dup DMABUF descriptor: %s",
1118 g_strerror (errno));
1124 gst_v4l2_allocator_import_userptr (GstV4l2Allocator * allocator,
1125 GstV4l2MemoryGroup * group, gsize img_size, int n_planes,
1126 gpointer * data, gsize * offset)
1131 g_return_val_if_fail (allocator->memory == V4L2_MEMORY_USERPTR, FALSE);
1133 /* TODO Support passing N plane from 1 memory to MPLANE v4l2 format */
1134 if (n_planes != group->n_mem)
1135 goto n_mem_missmatch;
1137 for (i = 0; i < group->n_mem; i++) {
1138 gsize size, maxsize;
1140 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1141 struct v4l2_pix_format_mplane *pix = &allocator->format.fmt.pix_mp;
1142 maxsize = pix->plane_fmt[i].sizeimage;
1144 maxsize = allocator->format.fmt.pix.sizeimage;
1147 if ((i + 1) == n_planes) {
1148 size = img_size - offset[i];
1150 size = offset[i + 1] - offset[i];
1153 g_assert (size <= img_size);
1155 GST_LOG_OBJECT (allocator, "imported USERPTR %p plane %d size %"
1156 G_GSIZE_FORMAT, data[i], i, size);
1158 mem = (GstV4l2Memory *) group->mem[i];
1160 mem->mem.maxsize = maxsize;
1161 mem->mem.size = size;
1162 mem->data = data[i];
1164 group->planes[i].length = maxsize;
1165 group->planes[i].bytesused = size;
1166 group->planes[i].m.userptr = (unsigned long) data[i];
1167 group->planes[i].data_offset = 0;
1170 /* Copy into buffer structure if not using planes */
1171 if (!V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1172 group->buffer.bytesused = group->planes[0].bytesused;
1173 group->buffer.length = group->planes[0].length;
1174 group->buffer.m.userptr = group->planes[0].m.userptr;
1176 group->buffer.length = group->n_mem;
1183 GST_ERROR_OBJECT (allocator, "Got %i userptr plane while driver need %i",
1184 n_planes, group->n_mem);
1190 gst_v4l2_allocator_flush (GstV4l2Allocator * allocator)
1194 GST_OBJECT_LOCK (allocator);
1196 if (!g_atomic_int_get (&allocator->active))
1199 for (i = 0; i < allocator->count; i++) {
1200 GstV4l2MemoryGroup *group = allocator->groups[i];
1203 if (IS_QUEUED (group->buffer)) {
1204 UNSET_QUEUED (group->buffer);
1206 gst_v4l2_allocator_reset_group (allocator, group);
1208 for (n = 0; n < group->n_mem; n++)
1209 gst_memory_unref (group->mem[n]);
1214 GST_OBJECT_UNLOCK (allocator);
1218 gst_v4l2_allocator_qbuf (GstV4l2Allocator * allocator,
1219 GstV4l2MemoryGroup * group)
1221 gboolean ret = TRUE;
1224 g_return_val_if_fail (g_atomic_int_get (&allocator->active), FALSE);
1227 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1228 for (i = 0; i < group->n_mem; i++)
1229 group->planes[i].bytesused =
1230 gst_memory_get_sizes (group->mem[i], NULL, NULL);
1232 group->buffer.bytesused = gst_memory_get_sizes (group->mem[0], NULL, NULL);
1235 if (v4l2_ioctl (allocator->video_fd, VIDIOC_QBUF, &group->buffer) < 0) {
1236 GST_ERROR_OBJECT (allocator, "failed queing buffer %i: %s",
1237 group->buffer.index, g_strerror (errno));
1239 if (IS_QUEUED (group->buffer)) {
1240 GST_DEBUG_OBJECT (allocator,
1241 "driver pretends buffer is queued even if queue failed");
1242 UNSET_QUEUED (group->buffer);
1247 GST_LOG_OBJECT (allocator, "queued buffer %i (flags 0x%X)",
1248 group->buffer.index, group->buffer.flags);
1250 if (!IS_QUEUED (group->buffer)) {
1251 GST_DEBUG_OBJECT (allocator,
1252 "driver pretends buffer is not queued even if queue succeeded");
1253 SET_QUEUED (group->buffer);
1256 /* Ensure the memory will stay around and is RO */
1257 for (i = 0; i < group->n_mem; i++)
1258 gst_memory_ref (group->mem[i]);
1264 GstV4l2MemoryGroup *
1265 gst_v4l2_allocator_dqbuf (GstV4l2Allocator * allocator)
1267 struct v4l2_buffer buffer = { 0 };
1268 struct v4l2_plane planes[VIDEO_MAX_PLANES] = { {0} };
1271 GstV4l2MemoryGroup *group = NULL;
1273 g_return_val_if_fail (g_atomic_int_get (&allocator->active), FALSE);
1275 buffer.type = allocator->type;
1276 buffer.memory = allocator->memory;
1278 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1279 buffer.length = allocator->format.fmt.pix_mp.num_planes;
1280 buffer.m.planes = planes;
1283 if (v4l2_ioctl (allocator->video_fd, VIDIOC_DQBUF, &buffer) < 0)
1286 group = allocator->groups[buffer.index];
1287 group->buffer = buffer;
1289 GST_LOG_OBJECT (allocator, "dequeued buffer %i (flags 0x%X)", buffer.index,
1292 if (IS_QUEUED (group->buffer)) {
1293 GST_DEBUG_OBJECT (allocator,
1294 "driver pretends buffer is queued even if dequeue succeeded");
1295 UNSET_QUEUED (group->buffer);
1298 if (V4L2_TYPE_IS_MULTIPLANAR (allocator->type)) {
1299 group->buffer.m.planes = group->planes;
1300 memcpy (group->planes, buffer.m.planes, sizeof (planes));
1302 group->planes[0].bytesused = group->buffer.bytesused;
1303 group->planes[0].length = group->buffer.length;
1304 g_assert (sizeof (group->planes[0].m) == sizeof (group->buffer.m));
1305 memcpy (&group->planes[0].m, &group->buffer.m, sizeof (group->buffer.m));
1308 /* And update memory size */
1309 if (V4L2_TYPE_IS_OUTPUT (allocator->type)) {
1310 gst_v4l2_allocator_reset_size (allocator, group);
1312 /* for capture, simply read the size */
1313 for (i = 0; i < group->n_mem; i++) {
1314 gst_memory_resize (group->mem[i], 0, group->planes[i].bytesused);
1318 /* Release the memory, possibly making it RW again */
1319 for (i = 0; i < group->n_mem; i++)
1320 gst_memory_unref (group->mem[i]);
1325 GST_ERROR_OBJECT (allocator, "failed dequeuing a %s buffer: %s",
1326 memory_type_to_str (allocator->memory), g_strerror (errno));
1330 GST_WARNING_OBJECT (allocator,
1331 "Non-blocking I/O has been selected using O_NONBLOCK and"
1332 " no buffer was in the outgoing queue.");
1335 GST_ERROR_OBJECT (allocator,
1336 "The buffer type is not supported, or the index is out of bounds, "
1337 "or no buffers have been allocated yet, or the userptr "
1338 "or length are invalid.");
1341 GST_ERROR_OBJECT (allocator,
1342 "insufficient memory to enqueue a user pointer buffer");
1345 GST_INFO_OBJECT (allocator,
1346 "VIDIOC_DQBUF failed due to an internal error."
1347 " Can also indicate temporary problems like signal loss."
1348 " Note the driver might dequeue an (empty) buffer despite"
1349 " returning an error, or even stop capturing.");
1350 /* have we de-queued a buffer ? */
1351 if (!IS_QUEUED (buffer)) {
1352 GST_DEBUG_OBJECT (allocator, "reenqueing buffer");
1353 /* FIXME ... should we do something here? */
1357 GST_WARNING_OBJECT (allocator, "could not sync on a buffer on device");
1360 GST_WARNING_OBJECT (allocator,
1361 "Grabbing frame got interrupted unexpectedly. %d: %s.", errno,
1362 g_strerror (errno));
1370 gst_v4l2_allocator_reset_group (GstV4l2Allocator * allocator,
1371 GstV4l2MemoryGroup * group)
1373 switch (allocator->memory) {
1374 case V4L2_MEMORY_USERPTR:
1375 gst_v4l2_allocator_clear_userptr (allocator, group);
1377 case V4L2_MEMORY_DMABUF:
1378 gst_v4l2_allocator_clear_dmabufin (allocator, group);
1380 case V4L2_MEMORY_MMAP:
1383 g_assert_not_reached ();
1387 gst_v4l2_allocator_reset_size (allocator, group);
1391 gst_v4l2_allocator_num_allocated (GstV4l2Allocator * allocator)
1393 gsize num_allocated;
1395 GST_OBJECT_LOCK (allocator);
1397 num_allocated = allocator->count;
1399 GST_OBJECT_UNLOCK (allocator);
1401 return num_allocated;