2 * Copyright (C) 2020 Igalia, S.L.
3 * Author: Víctor Jáquez <vjaquez@igalia.com>
5 * This library is free software; you can redistribute it and/or
6 * modify it under the terms of the GNU Library General Public
7 * License as published by the Free Software Foundation; either
8 * version 2 of the License, or (at your option) any later version.
10 * This library is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13 * Library General Public License for more details.
15 * You should have received a copy of the GNU Library General Public
16 * License along with this library; if not, write to the
17 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
18 * Boston, MA 02110-1301, USA.
25 #include "gstvaallocator.h"
27 #include <sys/types.h>
29 #include <va/va_drmcommon.h>
31 #include "gstvacaps.h"
32 #include "gstvavideoformat.h"
34 #define GST_CAT_DEFAULT gst_va_memory_debug
35 GST_DEBUG_CATEGORY_STATIC (gst_va_memory_debug);
38 _init_debug_category (void)
40 #ifndef GST_DISABLE_GST_DEBUG
41 static volatile gsize _init = 0;
43 if (g_once_init_enter (&_init)) {
44 GST_DEBUG_CATEGORY_INIT (gst_va_memory_debug, "vamemory", 0, "VA memory");
45 g_once_init_leave (&_init, 1);
51 _destroy_surfaces (GstVaDisplay * display, VASurfaceID * surfaces,
54 VADisplay dpy = gst_va_display_get_va_dpy (display);
57 g_return_val_if_fail (num_surfaces > 0, FALSE);
59 gst_va_display_lock (display);
60 status = vaDestroySurfaces (dpy, surfaces, num_surfaces);
61 gst_va_display_unlock (display);
62 if (status != VA_STATUS_SUCCESS) {
63 GST_ERROR ("vaDestroySurfaces: %s", vaErrorStr (status));
72 _create_surfaces (GstVaDisplay * display, guint rt_format, guint fourcc,
73 guint width, guint height, gint usage_hint,
74 VASurfaceAttribExternalBuffers * ext_buf, VASurfaceID * surfaces,
77 VADisplay dpy = gst_va_display_get_va_dpy (display);
79 VASurfaceAttrib attrs[5] = {
81 .type = VASurfaceAttribUsageHint,
82 .flags = VA_SURFACE_ATTRIB_SETTABLE,
83 .value.type = VAGenericValueTypeInteger,
84 .value.value.i = usage_hint,
87 .type = VASurfaceAttribMemoryType,
88 .flags = VA_SURFACE_ATTRIB_SETTABLE,
89 .value.type = VAGenericValueTypeInteger,
90 .value.value.i = ext_buf ? VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME
91 : VA_SURFACE_ATTRIB_MEM_TYPE_VA,
98 g_return_val_if_fail (num_surfaces > 0, FALSE);
102 attrs[num_attrs++] = (VASurfaceAttrib) {
103 .type = VASurfaceAttribPixelFormat,
104 .flags = VA_SURFACE_ATTRIB_SETTABLE,
105 .value.type = VAGenericValueTypeInteger,
106 .value.value.i = fourcc,
113 attrs[num_attrs++] = (VASurfaceAttrib) {
114 .type = VASurfaceAttribExternalBufferDescriptor,
115 .flags = VA_SURFACE_ATTRIB_SETTABLE,
116 .value.type = VAGenericValueTypePointer,
117 .value.value.p = ext_buf,
122 gst_va_display_lock (display);
123 status = vaCreateSurfaces (dpy, rt_format, width, height, surfaces,
124 num_surfaces, attrs, num_attrs);
125 gst_va_display_unlock (display);
126 if (status != VA_STATUS_SUCCESS) {
127 GST_ERROR ("vaCreateSurfaces: %s", vaErrorStr (status));
135 _export_surface_to_dmabuf (GstVaDisplay * display, VASurfaceID surface,
136 guint32 flags, VADRMPRIMESurfaceDescriptor * desc)
138 VADisplay dpy = gst_va_display_get_va_dpy (display);
141 gst_va_display_lock (display);
142 status = vaExportSurfaceHandle (dpy, surface,
143 VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2, flags, desc);
144 gst_va_display_unlock (display);
145 if (status != VA_STATUS_SUCCESS) {
146 GST_ERROR ("vaExportSurfaceHandle: %s", vaErrorStr (status));
154 _destroy_image (GstVaDisplay * display, VAImageID image_id)
156 VADisplay dpy = gst_va_display_get_va_dpy (display);
159 gst_va_display_lock (display);
160 status = vaDestroyImage (dpy, image_id);
161 gst_va_display_unlock (display);
162 if (status != VA_STATUS_SUCCESS) {
163 GST_ERROR ("vaDestroyImage: %s", vaErrorStr (status));
170 _get_derive_image (GstVaDisplay * display, VASurfaceID surface, VAImage * image)
172 VADisplay dpy = gst_va_display_get_va_dpy (display);
175 gst_va_display_lock (display);
176 status = vaDeriveImage (dpy, surface, image);
177 gst_va_display_unlock (display);
178 if (status != VA_STATUS_SUCCESS) {
179 GST_WARNING ("vaDeriveImage: %s", vaErrorStr (status));
187 _create_image (GstVaDisplay * display, GstVideoFormat format, gint width,
188 gint height, VAImage * image)
190 VADisplay dpy = gst_va_display_get_va_dpy (display);
191 const VAImageFormat *va_format;
194 va_format = gst_va_image_format_from_video_format (format);
198 gst_va_display_lock (display);
200 vaCreateImage (dpy, (VAImageFormat *) va_format, width, height, image);
201 gst_va_display_unlock (display);
202 if (status != VA_STATUS_SUCCESS) {
203 GST_ERROR ("vaCreateImage: %s", vaErrorStr (status));
210 _get_image (GstVaDisplay * display, VASurfaceID surface, VAImage * image)
212 VADisplay dpy = gst_va_display_get_va_dpy (display);
215 gst_va_display_lock (display);
216 status = vaGetImage (dpy, surface, 0, 0, image->width, image->height,
218 gst_va_display_unlock (display);
219 if (status != VA_STATUS_SUCCESS) {
220 GST_ERROR ("vaGetImage: %s", vaErrorStr (status));
228 _sync_surface (GstVaDisplay * display, VASurfaceID surface)
230 VADisplay dpy = gst_va_display_get_va_dpy (display);
233 gst_va_display_lock (display);
234 status = vaSyncSurface (dpy, surface);
235 gst_va_display_unlock (display);
236 if (status != VA_STATUS_SUCCESS) {
237 GST_WARNING ("vaSyncSurface: %s", vaErrorStr (status));
244 _map_buffer (GstVaDisplay * display, VABufferID buffer, gpointer * data)
246 VADisplay dpy = gst_va_display_get_va_dpy (display);
249 gst_va_display_lock (display);
250 status = vaMapBuffer (dpy, buffer, data);
251 gst_va_display_unlock (display);
252 if (status != VA_STATUS_SUCCESS) {
253 GST_WARNING ("vaMapBuffer: %s", vaErrorStr (status));
260 _unmap_buffer (GstVaDisplay * display, VABufferID buffer)
262 VADisplay dpy = gst_va_display_get_va_dpy (display);
265 gst_va_display_lock (display);
266 status = vaUnmapBuffer (dpy, buffer);
267 gst_va_display_unlock (display);
268 if (status != VA_STATUS_SUCCESS) {
269 GST_WARNING ("vaUnmapBuffer: %s", vaErrorStr (status));
276 _put_image (GstVaDisplay * display, VASurfaceID surface, VAImage * image)
278 VADisplay dpy = gst_va_display_get_va_dpy (display);
281 if (!_sync_surface (display, surface))
284 gst_va_display_lock (display);
285 status = vaPutImage (dpy, surface, image->image_id, 0, 0, image->width,
286 image->height, 0, 0, image->width, image->height);
287 gst_va_display_unlock (display);
288 if (status != VA_STATUS_SUCCESS) {
289 GST_ERROR ("vaPutImage: %s", vaErrorStr (status));
295 /*=========================== Quarks for GstMemory ===========================*/
298 gst_va_buffer_surface_quark (void)
300 static gsize surface_quark = 0;
302 if (g_once_init_enter (&surface_quark)) {
303 GQuark quark = g_quark_from_string ("GstVaBufferSurface");
304 g_once_init_leave (&surface_quark, quark);
307 return surface_quark;
311 gst_va_drm_mod_quark (void)
313 static gsize drm_mod_quark = 0;
315 if (g_once_init_enter (&drm_mod_quark)) {
316 GQuark quark = g_quark_from_string ("DRMModifier");
317 g_once_init_leave (&drm_mod_quark, quark);
320 return drm_mod_quark;
324 gst_va_buffer_aux_surface_quark (void)
326 static gsize surface_quark = 0;
328 if (g_once_init_enter (&surface_quark)) {
329 GQuark quark = g_quark_from_string ("GstVaBufferAuxSurface");
330 g_once_init_leave (&surface_quark, quark);
333 return surface_quark;
336 /*========================= GstVaBufferSurface ===============================*/
338 typedef struct _GstVaBufferSurface GstVaBufferSurface;
339 struct _GstVaBufferSurface
341 GstVaDisplay *display;
344 GstMemory *mems[GST_VIDEO_MAX_PLANES];
345 volatile gint ref_count;
346 volatile gint ref_mems_count;
350 gst_va_buffer_surface_unref (gpointer data)
352 GstVaBufferSurface *buf = data;
354 g_return_if_fail (buf && GST_IS_VA_DISPLAY (buf->display));
356 if (g_atomic_int_dec_and_test (&buf->ref_count)) {
357 GST_LOG_OBJECT (buf->display, "Destroying surface %#x", buf->surface);
358 _destroy_surfaces (buf->display, &buf->surface, 1);
359 gst_clear_object (&buf->display);
360 g_slice_free (GstVaBufferSurface, buf);
364 static GstVaBufferSurface *
365 gst_va_buffer_surface_new (VASurfaceID surface, GstVideoFormat format,
366 gint width, gint height)
368 GstVaBufferSurface *buf = g_slice_new (GstVaBufferSurface);
370 g_atomic_int_set (&buf->ref_count, 0);
371 g_atomic_int_set (&buf->ref_mems_count, 0);
372 buf->surface = surface;
379 /*=========================== GstVaMemoryPool ================================*/
381 /* queue for disposed surfaces */
382 typedef struct _GstVaMemoryPool GstVaMemoryPool;
383 struct _GstVaMemoryPool
385 GstAtomicQueue *queue;
391 #define GST_VA_MEMORY_POOL_CAST(obj) ((GstVaMemoryPool *)obj)
392 #define GST_VA_MEMORY_POOL_LOCK(obj) g_mutex_lock (&GST_VA_MEMORY_POOL_CAST(obj)->lock)
393 #define GST_VA_MEMORY_POOL_UNLOCK(obj) g_mutex_unlock (&GST_VA_MEMORY_POOL_CAST(obj)->lock)
396 gst_va_memory_pool_init (GstVaMemoryPool * self)
398 self->queue = gst_atomic_queue_new (2);
400 g_mutex_init (&self->lock);
402 self->surface_count = 0;
406 gst_va_memory_pool_finalize (GstVaMemoryPool * self)
408 g_mutex_clear (&self->lock);
410 gst_atomic_queue_unref (self->queue);
414 gst_va_memory_pool_flush_unlocked (GstVaMemoryPool * self,
415 GstVaDisplay * display)
418 GstVaBufferSurface *buf;
420 while ((mem = gst_atomic_queue_pop (self->queue))) {
421 /* destroy the surface */
422 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
423 gst_va_buffer_surface_quark ());
425 if (g_atomic_int_dec_and_test (&buf->ref_count)) {
426 GST_LOG ("Destroying surface %#x", buf->surface);
427 _destroy_surfaces (display, &buf->surface, 1);
428 self->surface_count -= 1; /* GstVaDmabufAllocator */
429 g_slice_free (GstVaBufferSurface, buf);
432 self->surface_count -= 1; /* GstVaAllocator */
435 GST_MINI_OBJECT_CAST (mem)->dispose = NULL;
436 /* when mem are pushed available queue its allocator is unref,
437 * then now it is required to ref the allocator here because
438 * memory's finalize will unref it again */
439 gst_object_ref (mem->allocator);
440 gst_memory_unref (mem);
445 gst_va_memory_pool_flush (GstVaMemoryPool * self, GstVaDisplay * display)
447 GST_VA_MEMORY_POOL_LOCK (self);
448 gst_va_memory_pool_flush_unlocked (self, display);
449 GST_VA_MEMORY_POOL_UNLOCK (self);
453 gst_va_memory_pool_push (GstVaMemoryPool * self, GstMemory * mem)
455 gst_atomic_queue_push (self->queue, gst_memory_ref (mem));
458 static inline GstMemory *
459 gst_va_memory_pool_pop (GstVaMemoryPool * self)
461 return gst_atomic_queue_pop (self->queue);
464 static inline GstMemory *
465 gst_va_memory_pool_peek (GstVaMemoryPool * self)
467 return gst_atomic_queue_peek (self->queue);
471 gst_va_memory_pool_surface_count (GstVaMemoryPool * self)
473 return g_atomic_int_get (&self->surface_count);
477 gst_va_memory_pool_surface_inc (GstVaMemoryPool * self)
479 g_atomic_int_inc (&self->surface_count);
482 /*=========================== GstVaDmabufAllocator ===========================*/
484 struct _GstVaDmabufAllocator
486 GstDmaBufAllocator parent;
488 GstVaDisplay *display;
490 GstMemoryMapFunction parent_map;
495 GstVaMemoryPool pool;
498 #define gst_va_dmabuf_allocator_parent_class dmabuf_parent_class
499 G_DEFINE_TYPE_WITH_CODE (GstVaDmabufAllocator, gst_va_dmabuf_allocator,
500 GST_TYPE_DMABUF_ALLOCATOR, _init_debug_category ());
503 gst_va_dmabuf_mem_map (GstMemory * gmem, gsize maxsize, GstMapFlags flags)
505 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (gmem->allocator);
506 VASurfaceID surface = gst_va_memory_get_surface (gmem);
508 _sync_surface (self->display, surface);
510 /* @TODO: if mapping with flag GST_MAP_VASURFACE return the
512 * if mapping and drm_modifers are not lineal, use vaDeriveImage */
513 #ifndef GST_DISABLE_GST_DEBUG
517 drm_mod = gst_mini_object_get_qdata (GST_MINI_OBJECT (gmem),
518 gst_va_drm_mod_quark ());
519 GST_TRACE_OBJECT (self, "DRM modifiers: %#lx", *drm_mod);
523 return self->parent_map (gmem, maxsize, flags);
527 gst_va_dmabuf_allocator_finalize (GObject * object)
529 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (object);
531 gst_va_memory_pool_finalize (&self->pool);
532 gst_clear_object (&self->display);
534 G_OBJECT_CLASS (dmabuf_parent_class)->finalize (object);
538 gst_va_dmabuf_allocator_dispose (GObject * object)
540 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (object);
542 gst_va_memory_pool_flush_unlocked (&self->pool, self->display);
543 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
544 GST_WARNING_OBJECT (self, "Surfaces leaked: %d",
545 gst_va_memory_pool_surface_count (&self->pool));
548 G_OBJECT_CLASS (dmabuf_parent_class)->dispose (object);
552 gst_va_dmabuf_allocator_class_init (GstVaDmabufAllocatorClass * klass)
554 GObjectClass *object_class = G_OBJECT_CLASS (klass);
556 object_class->dispose = gst_va_dmabuf_allocator_dispose;
557 object_class->finalize = gst_va_dmabuf_allocator_finalize;
561 gst_va_dmabuf_allocator_init (GstVaDmabufAllocator * self)
563 gst_va_memory_pool_init (&self->pool);
564 self->parent_map = GST_ALLOCATOR (self)->mem_map;
565 GST_ALLOCATOR (self)->mem_map = gst_va_dmabuf_mem_map;
569 gst_va_dmabuf_allocator_new (GstVaDisplay * display)
571 GstVaDmabufAllocator *self;
573 g_return_val_if_fail (GST_IS_VA_DISPLAY (display), NULL);
575 self = g_object_new (GST_TYPE_VA_DMABUF_ALLOCATOR, NULL);
576 self->display = gst_object_ref (display);
577 gst_object_ref_sink (self);
579 return GST_ALLOCATOR (self);
582 static inline goffset
583 _get_fd_size (gint fd)
585 return lseek (fd, 0, SEEK_END);
589 gst_va_dmabuf_memory_release (GstMiniObject * mini_object)
591 GstMemory *mem = GST_MEMORY_CAST (mini_object);
592 GstVaBufferSurface *buf;
593 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (mem->allocator);
596 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
597 gst_va_buffer_surface_quark ());
599 return TRUE; /* free this unknown buffer */
601 /* if this is the last reference to the GstVaBufferSurface, iterates
602 * its array of memories to push them into the queue with thread
604 GST_VA_MEMORY_POOL_LOCK (&self->pool);
605 if (g_atomic_int_dec_and_test (&buf->ref_mems_count)) {
606 for (i = 0; i < buf->n_mems; i++) {
607 GST_LOG_OBJECT (self, "releasing %p: dmabuf %d, va surface %#x",
608 buf->mems[i], gst_dmabuf_memory_get_fd (buf->mems[i]), buf->surface);
609 gst_va_memory_pool_push (&self->pool, buf->mems[i]);
612 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
614 /* note: if ref_mem_count doesn't reach zero, that memory will
615 * "float" until it's pushed back into the pool by the last va
616 * buffer surface ref */
618 /* Keep last in case we are holding on the last allocator ref */
619 gst_object_unref (mem->allocator);
621 /* don't call mini_object's free */
625 /* Creates an exported VASurface and adds it as @buffer's memories
628 * If @info is not NULL, a dummy (non-pooled) buffer is created to
629 * update offsets and strides, and it has to be unrefed immediately.
632 gst_va_dmabuf_allocator_setup_buffer_full (GstAllocator * allocator,
633 GstBuffer * buffer, GstVideoInfo * info)
635 GstVaBufferSurface *buf;
636 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
637 GstVideoFormat format;
638 VADRMPRIMESurfaceDescriptor desc = { 0, };
640 guint32 i, fourcc, rt_format, export_flags;
641 GDestroyNotify buffer_destroy = NULL;
643 g_return_val_if_fail (GST_IS_VA_DMABUF_ALLOCATOR (allocator), FALSE);
645 format = GST_VIDEO_INFO_FORMAT (&self->info);
646 fourcc = gst_va_fourcc_from_video_format (format);
647 rt_format = gst_va_chroma_from_video_format (format);
648 if (fourcc == 0 || rt_format == 0) {
649 GST_ERROR_OBJECT (allocator, "Unsupported format: %s",
650 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->info)));
654 if (!_create_surfaces (self->display, rt_format, fourcc,
655 GST_VIDEO_INFO_WIDTH (&self->info),
656 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
660 /* workaround for missing layered dmabuf formats in i965 */
661 if (gst_va_display_is_implementation (self->display,
662 GST_VA_IMPLEMENTATION_INTEL_I965)
663 && (fourcc == VA_FOURCC_YUY2 || fourcc == VA_FOURCC_UYVY)) {
664 /* These are not representable as separate planes */
665 export_flags = VA_EXPORT_SURFACE_COMPOSED_LAYERS;
667 /* Each layer will contain exactly one plane. For example, an NV12
668 * surface will be exported as two layers */
669 export_flags = VA_EXPORT_SURFACE_SEPARATE_LAYERS;
672 export_flags |= VA_EXPORT_SURFACE_READ_WRITE;
674 if (!_export_surface_to_dmabuf (self->display, surface, export_flags, &desc))
677 g_assert (GST_VIDEO_INFO_N_PLANES (&self->info) == desc.num_layers);
679 if (fourcc != desc.fourcc) {
680 GST_ERROR ("Unsupported fourcc: %" GST_FOURCC_FORMAT,
681 GST_FOURCC_ARGS (desc.fourcc));
685 buf = gst_va_buffer_surface_new (surface, format, desc.width, desc.height);
686 if (G_UNLIKELY (info)) {
688 GST_VIDEO_INFO_SIZE (info) = 0;
691 buf->n_mems = desc.num_objects;
693 for (i = 0; i < desc.num_objects; i++) {
694 gint fd = desc.objects[i].fd;
695 gsize size = desc.objects[i].size > 0 ?
696 desc.objects[i].size : _get_fd_size (fd);
697 GstMemory *mem = gst_dmabuf_allocator_alloc (allocator, fd, size);
698 guint64 *drm_mod = g_new (guint64, 1);
700 gst_buffer_append_memory (buffer, mem);
703 if (G_LIKELY (!info)) {
704 GST_MINI_OBJECT (mem)->dispose = gst_va_dmabuf_memory_release;
705 g_atomic_int_add (&buf->ref_mems_count, 1);
707 /* if no @info, surface will be destroyed as soon as buffer is
708 * destroyed (e.g. gst_va_dmabuf_allocator_try()) */
709 buf->display = gst_object_ref (self->display);
710 buffer_destroy = gst_va_buffer_surface_unref;
713 g_atomic_int_add (&buf->ref_count, 1);
714 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
715 gst_va_buffer_surface_quark (), buf, buffer_destroy);
717 *drm_mod = desc.objects[i].drm_format_modifier;
718 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem), gst_va_drm_mod_quark (),
721 if (G_UNLIKELY (info))
722 GST_VIDEO_INFO_SIZE (info) += size;
724 GST_LOG_OBJECT (self, "buffer %p: new dmabuf %d / surface %#x [%dx%d] "
725 "size %" G_GSIZE_FORMAT, buffer, fd, surface,
726 GST_VIDEO_INFO_WIDTH (&self->info), GST_VIDEO_INFO_HEIGHT (&self->info),
727 GST_VIDEO_INFO_SIZE (&self->info));
730 if (G_UNLIKELY (info)) {
731 for (i = 0; i < desc.num_layers; i++) {
732 g_assert (desc.layers[i].num_planes == 1);
733 GST_VIDEO_INFO_PLANE_OFFSET (info, i) = desc.layers[i].offset[0];
734 GST_VIDEO_INFO_PLANE_STRIDE (info, i) = desc.layers[i].pitch[0];
737 gst_va_memory_pool_surface_inc (&self->pool);
744 _destroy_surfaces (self->display, &surface, 1);
750 gst_va_dmabuf_allocator_setup_buffer (GstAllocator * allocator,
753 return gst_va_dmabuf_allocator_setup_buffer_full (allocator, buffer, NULL);
757 gst_va_dmabuf_allocator_prepare_buffer_unlocked (GstVaDmabufAllocator * self,
760 GstMemory *mems[GST_VIDEO_MAX_PLANES] = { 0, };
761 GstVaBufferSurface *buf;
764 mems[0] = gst_va_memory_pool_pop (&self->pool);
766 return VA_INVALID_ID;
768 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mems[0]),
769 gst_va_buffer_surface_quark ());
771 return VA_INVALID_ID;
773 if (buf->surface == VA_INVALID_ID)
774 return VA_INVALID_ID;
776 for (idx = 1; idx < buf->n_mems; idx++) {
777 /* grab next memory from queue */
780 GstVaBufferSurface *pbuf;
782 mem = gst_va_memory_pool_peek (&self->pool);
784 return VA_INVALID_ID;
786 pbuf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
787 gst_va_buffer_surface_quark ());
789 return VA_INVALID_ID;
791 if (pbuf->surface != buf->surface) {
792 GST_WARNING_OBJECT (self,
793 "expecting memory with surface %#x but got %#x: "
794 "possible memory interweaving", buf->surface, pbuf->surface);
795 return VA_INVALID_ID;
799 mems[idx] = gst_va_memory_pool_pop (&self->pool);
802 /* append memories */
803 for (i = 0; i < buf->n_mems; i++) {
804 gboolean found = FALSE;
806 /* find next memory to append */
807 for (j = 0; j < idx; j++) {
808 if (buf->mems[i] == mems[j]) {
814 /* if not found, free all the popped memories and bail */
817 buf->display = gst_object_ref (self->display);
818 for (j = 0; j < idx; j++) {
819 gst_object_ref (buf->mems[j]->allocator);
820 GST_MINI_OBJECT (mems[j])->dispose = NULL;
821 gst_memory_unref (mems[j]);
823 return VA_INVALID_ID;
826 g_atomic_int_add (&buf->ref_mems_count, 1);
827 gst_object_ref (buf->mems[i]->allocator);
828 gst_buffer_append_memory (buffer, buf->mems[i]);
830 GST_LOG ("bufer %p: memory %p - dmabuf %d / surface %#x", buffer,
831 buf->mems[i], gst_dmabuf_memory_get_fd (buf->mems[i]),
832 gst_va_memory_get_surface (buf->mems[i]));
839 gst_va_dmabuf_allocator_prepare_buffer (GstAllocator * allocator,
842 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
845 GST_VA_MEMORY_POOL_LOCK (&self->pool);
846 surface = gst_va_dmabuf_allocator_prepare_buffer_unlocked (self, buffer);
847 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
849 return (surface != VA_INVALID_ID);
853 gst_va_dmabuf_allocator_flush (GstAllocator * allocator)
855 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
857 gst_va_memory_pool_flush (&self->pool, self->display);
861 gst_va_dmabuf_allocator_try (GstAllocator * allocator)
864 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
865 GstVideoInfo info = self->info;
868 buffer = gst_buffer_new ();
869 ret = gst_va_dmabuf_allocator_setup_buffer_full (allocator, buffer, &info);
870 gst_buffer_unref (buffer);
879 gst_va_dmabuf_allocator_set_format (GstAllocator * allocator,
880 GstVideoInfo * info, guint usage_hint)
882 GstVaDmabufAllocator *self;
885 g_return_val_if_fail (GST_IS_VA_DMABUF_ALLOCATOR (allocator), FALSE);
886 g_return_val_if_fail (info, FALSE);
888 self = GST_VA_DMABUF_ALLOCATOR (allocator);
890 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
891 if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_INFO_FORMAT (&self->info)
892 && GST_VIDEO_INFO_WIDTH (info) == GST_VIDEO_INFO_WIDTH (&self->info)
893 && GST_VIDEO_INFO_HEIGHT (info) == GST_VIDEO_INFO_HEIGHT (&self->info)
894 && usage_hint == self->usage_hint) {
895 *info = self->info; /* update callee info (offset & stride) */
901 self->usage_hint = usage_hint;
904 ret = gst_va_dmabuf_allocator_try (allocator);
913 gst_va_dmabuf_allocator_get_format (GstAllocator * allocator,
914 GstVideoInfo * info, guint * usage_hint)
916 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
918 if (GST_VIDEO_INFO_FORMAT (&self->info) == GST_VIDEO_FORMAT_UNKNOWN)
924 *usage_hint = self->usage_hint;
929 /* XXX: use a surface pool to control the created surfaces */
931 gst_va_dmabuf_memories_setup (GstVaDisplay * display, GstVideoInfo * info,
932 guint n_planes, GstMemory * mem[GST_VIDEO_MAX_PLANES],
933 uintptr_t * fds, gsize offset[GST_VIDEO_MAX_PLANES], guint usage_hint)
935 GstVideoFormat format;
936 GstVaBufferSurface *buf;
938 VASurfaceAttribExternalBuffers ext_buf = {
939 .width = GST_VIDEO_INFO_WIDTH (info),
940 .height = GST_VIDEO_INFO_HEIGHT (info),
941 .data_size = GST_VIDEO_INFO_SIZE (info),
942 .num_planes = GST_VIDEO_INFO_N_PLANES (info),
944 .num_buffers = GST_VIDEO_INFO_N_PLANES (info),
948 guint32 fourcc, rt_format;
952 g_return_val_if_fail (GST_IS_VA_DISPLAY (display), FALSE);
953 g_return_val_if_fail (n_planes <= GST_VIDEO_MAX_PLANES, FALSE);
955 format = GST_VIDEO_INFO_FORMAT (info);
956 if (format == GST_VIDEO_FORMAT_UNKNOWN)
959 rt_format = gst_va_chroma_from_video_format (format);
963 fourcc = gst_va_fourcc_from_video_format (format);
967 ext_buf.pixel_format = fourcc;
969 for (i = 0; i < n_planes; i++) {
970 ext_buf.pitches[i] = GST_VIDEO_INFO_PLANE_STRIDE (info, i);
971 ext_buf.offsets[i] = offset[i];
974 ret = _create_surfaces (display, rt_format, ext_buf.pixel_format,
975 ext_buf.width, ext_buf.height, usage_hint, &ext_buf, &surface, 1);
979 GST_LOG_OBJECT (display, "Created surface %#x [%dx%d]", surface,
980 ext_buf.width, ext_buf.height);
982 buf = gst_va_buffer_surface_new (surface, rt_format, ext_buf.width,
984 buf->display = gst_object_ref (display);
985 buf->n_mems = n_planes;
986 memcpy (buf->mems, mem, sizeof (buf->mems));
988 for (i = 0; i < n_planes; i++) {
989 g_atomic_int_add (&buf->ref_count, 1);
990 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem[i]),
991 gst_va_buffer_surface_quark (), buf, gst_va_buffer_surface_unref);
992 GST_INFO_OBJECT (display, "setting surface %#x to dmabuf fd %d",
993 buf->surface, gst_dmabuf_memory_get_fd (mem[i]));
999 /*===================== GstVaAllocator / GstVaMemory =========================*/
1001 struct _GstVaAllocator
1003 GstAllocator parent;
1005 GstVaDisplay *display;
1007 gboolean use_derived;
1008 GArray *surface_formats;
1010 GstVideoFormat surface_format;
1011 GstVideoFormat img_format;
1018 GstVaMemoryPool pool;
1021 typedef struct _GstVaMemory GstVaMemory;
1026 VASurfaceID surface;
1027 GstVideoFormat surface_format;
1029 gpointer mapped_data;
1031 GstMapFlags prev_mapflags;
1032 volatile gint map_count;
1034 gboolean is_derived;
1039 G_DEFINE_TYPE_WITH_CODE (GstVaAllocator, gst_va_allocator, GST_TYPE_ALLOCATOR,
1040 _init_debug_category ());
1042 static gboolean _va_unmap (GstVaMemory * mem);
1045 gst_va_allocator_finalize (GObject * object)
1047 GstVaAllocator *self = GST_VA_ALLOCATOR (object);
1049 gst_va_memory_pool_finalize (&self->pool);
1050 g_clear_pointer (&self->surface_formats, g_array_unref);
1051 gst_clear_object (&self->display);
1053 G_OBJECT_CLASS (gst_va_allocator_parent_class)->finalize (object);
1057 gst_va_allocator_dispose (GObject * object)
1059 GstVaAllocator *self = GST_VA_ALLOCATOR (object);
1061 gst_va_memory_pool_flush_unlocked (&self->pool, self->display);
1062 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
1063 GST_WARNING_OBJECT (self, "Surfaces leaked: %d",
1064 gst_va_memory_pool_surface_count (&self->pool));
1067 G_OBJECT_CLASS (gst_va_allocator_parent_class)->dispose (object);
1071 _va_free (GstAllocator * allocator, GstMemory * mem)
1073 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1074 GstVaMemory *va_mem = (GstVaMemory *) mem;
1076 if (va_mem->mapped_data) {
1077 g_warning (G_STRLOC ":%s: Freeing memory %p still mapped", G_STRFUNC,
1082 if (va_mem->surface != VA_INVALID_ID && mem->parent == NULL) {
1083 GST_LOG_OBJECT (self, "Destroying surface %#x", va_mem->surface);
1084 _destroy_surfaces (self->display, &va_mem->surface, 1);
1087 g_mutex_clear (&va_mem->lock);
1089 g_slice_free (GstVaMemory, va_mem);
1093 gst_va_allocator_class_init (GstVaAllocatorClass * klass)
1095 GstAllocatorClass *allocator_class = GST_ALLOCATOR_CLASS (klass);
1096 GObjectClass *object_class = G_OBJECT_CLASS (klass);
1098 object_class->dispose = gst_va_allocator_dispose;
1099 object_class->finalize = gst_va_allocator_finalize;
1100 allocator_class->free = _va_free;
1104 _clean_mem (GstVaMemory * mem)
1106 memset (&mem->image, 0, sizeof (mem->image));
1107 mem->image.image_id = VA_INVALID_ID;
1108 mem->image.buf = VA_INVALID_ID;
1110 mem->is_derived = TRUE;
1111 mem->is_dirty = FALSE;
1112 mem->prev_mapflags = 0;
1113 mem->mapped_data = NULL;
1117 _reset_mem (GstVaMemory * mem, GstAllocator * allocator, gsize size)
1120 g_atomic_int_set (&mem->map_count, 0);
1121 g_mutex_init (&mem->lock);
1123 gst_memory_init (GST_MEMORY_CAST (mem), 0, allocator, NULL, size,
1124 0 /* align */ , 0 /* offset */ , size);
1127 static inline gboolean
1128 _ensure_image (GstVaDisplay * display, VASurfaceID surface,
1129 GstVideoInfo * info, VAImage * image, gboolean derived)
1131 gboolean ret = TRUE;
1133 if (image->image_id != VA_INVALID_ID)
1136 if (!_sync_surface (display, surface))
1140 ret = _get_derive_image (display, surface, image);
1142 ret = _create_image (display, GST_VIDEO_INFO_FORMAT (info),
1143 GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info), image);
1149 static inline gboolean
1150 _update_image_info (GstVaAllocator * va_allocator)
1152 VASurfaceID surface;
1153 VAImage image = {.image_id = VA_INVALID_ID, };
1157 /* Create a test surface first */
1158 if (!_create_surfaces (va_allocator->display, va_allocator->rt_format,
1159 va_allocator->fourcc, GST_VIDEO_INFO_WIDTH (&va_allocator->info),
1160 GST_VIDEO_INFO_HEIGHT (&va_allocator->info), va_allocator->usage_hint,
1161 NULL, &surface, 1)) {
1162 GST_ERROR_OBJECT (va_allocator, "Failed to create a test surface");
1166 GST_DEBUG_OBJECT (va_allocator, "Created surface %#x [%dx%d]", surface,
1167 GST_VIDEO_INFO_WIDTH (&va_allocator->info),
1168 GST_VIDEO_INFO_HEIGHT (&va_allocator->info));
1170 /* Try derived first, but different formats can never derive */
1171 if (va_allocator->surface_format == va_allocator->img_format) {
1173 if (_get_derive_image (va_allocator->display, surface, &image))
1177 /* Then we try to create a image. */
1179 if (!_create_image (va_allocator->display, va_allocator->img_format,
1180 GST_VIDEO_INFO_WIDTH (&va_allocator->info),
1181 GST_VIDEO_INFO_HEIGHT (&va_allocator->info), &image)) {
1182 _destroy_surfaces (va_allocator->display, &surface, 1);
1187 va_allocator->use_derived = derived;
1189 for (i = 0; i < image.num_planes; i++) {
1190 GST_VIDEO_INFO_PLANE_OFFSET (&va_allocator->info, i) = image.offsets[i];
1191 GST_VIDEO_INFO_PLANE_STRIDE (&va_allocator->info, i) = image.pitches[i];
1194 GST_VIDEO_INFO_SIZE (&va_allocator->info) = image.data_size;
1196 _destroy_image (va_allocator->display, image.image_id);
1197 _destroy_surfaces (va_allocator->display, &surface, 1);
1203 _va_map_unlocked (GstVaMemory * mem, GstMapFlags flags)
1205 GstAllocator *allocator = GST_MEMORY_CAST (mem)->allocator;
1206 GstVaAllocator *va_allocator;
1207 GstVaDisplay *display;
1209 g_return_val_if_fail (mem->surface != VA_INVALID_ID, NULL);
1210 g_return_val_if_fail (GST_IS_VA_ALLOCATOR (allocator), NULL);
1212 if (g_atomic_int_get (&mem->map_count) > 0) {
1213 if (mem->prev_mapflags != flags || !mem->mapped_data)
1219 va_allocator = GST_VA_ALLOCATOR (allocator);
1220 display = va_allocator->display;
1222 if (flags & GST_MAP_WRITE) {
1223 mem->is_dirty = TRUE;
1224 } else { /* GST_MAP_READ only */
1225 mem->is_dirty = FALSE;
1228 if (flags & GST_MAP_VA) {
1229 mem->mapped_data = &mem->surface;
1233 if (!_ensure_image (display, mem->surface, &va_allocator->info, &mem->image,
1234 va_allocator->use_derived))
1237 mem->is_derived = va_allocator->use_derived;
1239 if (!mem->is_derived) {
1240 if (!_get_image (display, mem->surface, &mem->image))
1244 if (!_map_buffer (display, mem->image.buf, &mem->mapped_data))
1249 mem->prev_mapflags = flags;
1250 g_atomic_int_add (&mem->map_count, 1);
1251 return mem->mapped_data;
1256 _destroy_image (display, mem->image.image_id);
1263 _va_map (GstVaMemory * mem, gsize maxsize, GstMapFlags flags)
1267 g_mutex_lock (&mem->lock);
1268 data = _va_map_unlocked (mem, flags);
1269 g_mutex_unlock (&mem->lock);
1275 _va_unmap_unlocked (GstVaMemory * mem)
1277 GstAllocator *allocator = GST_MEMORY_CAST (mem)->allocator;
1278 GstVaDisplay *display;
1279 gboolean ret = TRUE;
1281 if (!g_atomic_int_dec_and_test (&mem->map_count))
1284 if (mem->prev_mapflags & GST_MAP_VA)
1287 display = GST_VA_ALLOCATOR (allocator)->display;
1289 if (mem->image.image_id != VA_INVALID_ID) {
1290 if (mem->is_dirty && !mem->is_derived) {
1291 ret = _put_image (display, mem->surface, &mem->image);
1292 mem->is_dirty = FALSE;
1294 /* XXX(victor): if is derived and is dirty, create another surface
1295 * an replace it in mem */
1298 ret &= _unmap_buffer (display, mem->image.buf);
1299 ret &= _destroy_image (display, mem->image.image_id);
1308 _va_unmap (GstVaMemory * mem)
1312 g_mutex_lock (&mem->lock);
1313 ret = _va_unmap_unlocked (mem);
1314 g_mutex_unlock (&mem->lock);
1320 _va_share (GstMemory * mem, gssize offset, gssize size)
1322 GstVaMemory *vamem = (GstVaMemory *) mem;
1325 GST_DEBUG ("%p: share %" G_GSSIZE_FORMAT ", %" G_GSIZE_FORMAT, mem, offset,
1328 /* find real parent */
1329 if ((parent = vamem->mem.parent) == NULL)
1330 parent = (GstMemory *) vamem;
1333 size = mem->maxsize - offset;
1335 sub = g_slice_new (GstVaMemory);
1336 /* the shared memory is alwyas readonly */
1337 gst_memory_init (GST_MEMORY_CAST (sub), GST_MINI_OBJECT_FLAGS (parent) |
1338 GST_MINI_OBJECT_FLAG_LOCK_READONLY, vamem->mem.allocator, parent,
1339 vamem->mem.maxsize, vamem->mem.align, vamem->mem.offset + offset, size);
1341 sub->surface = vamem->surface;
1342 sub->surface_format = vamem->surface_format;
1346 g_atomic_int_set (&sub->map_count, 0);
1347 g_mutex_init (&sub->lock);
1349 return GST_MEMORY_CAST (sub);
1353 gst_va_allocator_init (GstVaAllocator * self)
1355 GstAllocator *allocator = GST_ALLOCATOR (self);
1357 allocator->mem_type = GST_ALLOCATOR_VASURFACE;
1358 allocator->mem_map = (GstMemoryMapFunction) _va_map;
1359 allocator->mem_unmap = (GstMemoryUnmapFunction) _va_unmap;
1360 allocator->mem_share = _va_share;
1362 self->use_derived = TRUE;
1364 gst_va_memory_pool_init (&self->pool);
1366 GST_OBJECT_FLAG_SET (self, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
1370 gst_va_memory_release (GstMiniObject * mini_object)
1372 GstMemory *mem = GST_MEMORY_CAST (mini_object);
1373 GstVaAllocator *self = GST_VA_ALLOCATOR (mem->allocator);
1375 GST_LOG ("releasing %p: surface %#x", mem, gst_va_memory_get_surface (mem));
1377 gst_va_memory_pool_push (&self->pool, mem);
1379 /* Keep last in case we are holding on the last allocator ref */
1380 gst_object_unref (mem->allocator);
1382 /* don't call mini_object's free */
1387 gst_va_allocator_alloc (GstAllocator * allocator)
1389 GstVaAllocator *self;
1391 VASurfaceID surface;
1393 g_return_val_if_fail (GST_IS_VA_ALLOCATOR (allocator), NULL);
1395 self = GST_VA_ALLOCATOR (allocator);
1397 if (self->rt_format == 0) {
1398 GST_ERROR_OBJECT (self, "Unknown fourcc or chroma format");
1402 if (!_create_surfaces (self->display, self->rt_format, self->fourcc,
1403 GST_VIDEO_INFO_WIDTH (&self->info),
1404 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
1408 mem = g_slice_new (GstVaMemory);
1410 mem->surface = surface;
1411 mem->surface_format = self->surface_format;
1413 _reset_mem (mem, allocator, GST_VIDEO_INFO_SIZE (&self->info));
1415 GST_MINI_OBJECT (mem)->dispose = gst_va_memory_release;
1416 gst_va_memory_pool_surface_inc (&self->pool);
1418 GST_LOG_OBJECT (self, "Created surface %#x [%dx%d]", mem->surface,
1419 GST_VIDEO_INFO_WIDTH (&self->info), GST_VIDEO_INFO_HEIGHT (&self->info));
1421 return GST_MEMORY_CAST (mem);
1425 gst_va_allocator_new (GstVaDisplay * display, GArray * surface_formats)
1427 GstVaAllocator *self;
1429 g_return_val_if_fail (GST_IS_VA_DISPLAY (display), NULL);
1431 self = g_object_new (GST_TYPE_VA_ALLOCATOR, NULL);
1432 self->display = gst_object_ref (display);
1433 self->surface_formats = surface_formats;
1434 gst_object_ref_sink (self);
1436 return GST_ALLOCATOR (self);
1440 gst_va_allocator_setup_buffer (GstAllocator * allocator, GstBuffer * buffer)
1442 GstMemory *mem = gst_va_allocator_alloc (allocator);
1446 gst_buffer_append_memory (buffer, mem);
1451 gst_va_allocator_prepare_buffer_unlocked (GstVaAllocator * self,
1455 VASurfaceID surface;
1457 mem = gst_va_memory_pool_pop (&self->pool);
1459 return VA_INVALID_ID;
1461 gst_object_ref (mem->allocator);
1462 surface = gst_va_memory_get_surface (mem);
1463 gst_buffer_append_memory (buffer, mem);
1465 GST_LOG ("buffer %p: memory %p - surface %#x", buffer, mem, surface);
1471 gst_va_allocator_prepare_buffer (GstAllocator * allocator, GstBuffer * buffer)
1473 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1474 VASurfaceID surface;
1476 GST_VA_MEMORY_POOL_LOCK (&self->pool);
1477 surface = gst_va_allocator_prepare_buffer_unlocked (self, buffer);
1478 GST_VA_MEMORY_POOL_UNLOCK (&self->pool);
1480 return (surface != VA_INVALID_ID);
1484 gst_va_allocator_flush (GstAllocator * allocator)
1486 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1488 gst_va_memory_pool_flush (&self->pool, self->display);
1492 gst_va_allocator_try (GstAllocator * allocator)
1494 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1497 self->rt_format = 0;
1498 self->use_derived = FALSE;
1499 self->img_format = GST_VIDEO_INFO_FORMAT (&self->info);
1501 self->surface_format =
1502 gst_va_video_surface_format_from_image_format (self->img_format,
1503 self->surface_formats);
1504 if (self->surface_format == GST_VIDEO_FORMAT_UNKNOWN) {
1505 /* try a surface without fourcc but rt_format only */
1507 self->rt_format = gst_va_chroma_from_video_format (self->img_format);
1509 self->fourcc = gst_va_fourcc_from_video_format (self->surface_format);
1510 self->rt_format = gst_va_chroma_from_video_format (self->surface_format);
1513 if (self->rt_format == 0) {
1514 GST_ERROR_OBJECT (allocator, "Unsupported image format: %s",
1515 gst_video_format_to_string (self->img_format));
1519 if (!_update_image_info (self)) {
1520 GST_ERROR_OBJECT (allocator, "Failed to update allocator info");
1524 GST_INFO_OBJECT (self,
1525 "va allocator info, surface format: %s, image format: %s, "
1526 "use derived: %s, rt format: 0x%x, fourcc: %" GST_FOURCC_FORMAT,
1527 (self->surface_format == GST_VIDEO_FORMAT_UNKNOWN) ? "unknown"
1528 : gst_video_format_to_string (self->surface_format),
1529 gst_video_format_to_string (self->img_format),
1530 self->use_derived ? "true" : "false", self->rt_format,
1531 GST_FOURCC_ARGS (self->fourcc));
1536 gst_va_allocator_set_format (GstAllocator * allocator, GstVideoInfo * info,
1539 GstVaAllocator *self;
1542 g_return_val_if_fail (GST_IS_VA_ALLOCATOR (allocator), FALSE);
1543 g_return_val_if_fail (info, FALSE);
1545 self = GST_VA_ALLOCATOR (allocator);
1547 if (gst_va_memory_pool_surface_count (&self->pool) != 0) {
1548 if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_INFO_FORMAT (&self->info)
1549 && GST_VIDEO_INFO_WIDTH (info) == GST_VIDEO_INFO_WIDTH (&self->info)
1550 && GST_VIDEO_INFO_HEIGHT (info) == GST_VIDEO_INFO_HEIGHT (&self->info)
1551 && usage_hint == self->usage_hint) {
1552 *info = self->info; /* update callee info (offset & stride) */
1558 self->usage_hint = usage_hint;
1561 ret = gst_va_allocator_try (allocator);
1569 gst_va_allocator_get_format (GstAllocator * allocator, GstVideoInfo * info,
1572 GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
1574 if (GST_VIDEO_INFO_FORMAT (&self->info) == GST_VIDEO_FORMAT_UNKNOWN)
1580 *usage_hint = self->usage_hint;
1585 /*============ Utilities =====================================================*/
1588 gst_va_memory_get_surface (GstMemory * mem)
1590 VASurfaceID surface = VA_INVALID_ID;
1592 if (!mem->allocator)
1593 return VA_INVALID_ID;
1595 if (GST_IS_DMABUF_ALLOCATOR (mem->allocator)) {
1596 GstVaBufferSurface *buf;
1598 buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
1599 gst_va_buffer_surface_quark ());
1601 surface = buf->surface;
1602 } else if (GST_IS_VA_ALLOCATOR (mem->allocator)) {
1603 GstVaMemory *va_mem = (GstVaMemory *) mem;
1604 surface = va_mem->surface;
1611 gst_va_buffer_get_surface (GstBuffer * buffer)
1615 mem = gst_buffer_peek_memory (buffer, 0);
1617 return VA_INVALID_ID;
1619 return gst_va_memory_get_surface (mem);
1623 gst_va_buffer_create_aux_surface (GstBuffer * buffer)
1626 VASurfaceID surface = VA_INVALID_ID;
1627 GstVaDisplay *display = NULL;
1628 GstVideoFormat format;
1630 GstVaBufferSurface *surface_buffer;
1632 mem = gst_buffer_peek_memory (buffer, 0);
1636 /* Already created it. */
1637 surface_buffer = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
1638 gst_va_buffer_aux_surface_quark ());
1642 if (!mem->allocator)
1645 if (GST_IS_VA_DMABUF_ALLOCATOR (mem->allocator)) {
1646 GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (mem->allocator);
1647 guint32 fourcc, rt_format;
1649 format = GST_VIDEO_INFO_FORMAT (&self->info);
1650 fourcc = gst_va_fourcc_from_video_format (format);
1651 rt_format = gst_va_chroma_from_video_format (format);
1652 if (fourcc == 0 || rt_format == 0) {
1653 GST_ERROR_OBJECT (self, "Unsupported format: %s",
1654 gst_video_format_to_string (GST_VIDEO_INFO_FORMAT (&self->info)));
1658 display = self->display;
1659 width = GST_VIDEO_INFO_WIDTH (&self->info);
1660 height = GST_VIDEO_INFO_HEIGHT (&self->info);
1661 if (!_create_surfaces (self->display, rt_format, fourcc,
1662 GST_VIDEO_INFO_WIDTH (&self->info),
1663 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
1666 } else if (GST_IS_VA_ALLOCATOR (mem->allocator)) {
1667 GstVaAllocator *self = GST_VA_ALLOCATOR (mem->allocator);
1669 if (self->rt_format == 0) {
1670 GST_ERROR_OBJECT (self, "Unknown fourcc or chroma format");
1674 display = self->display;
1675 width = GST_VIDEO_INFO_WIDTH (&self->info);
1676 height = GST_VIDEO_INFO_HEIGHT (&self->info);
1677 format = GST_VIDEO_INFO_FORMAT (&self->info);
1678 if (!_create_surfaces (self->display, self->rt_format, self->fourcc,
1679 GST_VIDEO_INFO_WIDTH (&self->info),
1680 GST_VIDEO_INFO_HEIGHT (&self->info), self->usage_hint, NULL,
1684 g_assert_not_reached ();
1687 if (!display || surface == VA_INVALID_ID)
1690 surface_buffer = gst_va_buffer_surface_new (surface, format, width, height);
1691 surface_buffer->display = gst_object_ref (display);
1692 g_atomic_int_add (&surface_buffer->ref_count, 1);
1694 gst_mini_object_set_qdata (GST_MINI_OBJECT (mem),
1695 gst_va_buffer_aux_surface_quark (), surface_buffer,
1696 gst_va_buffer_surface_unref);
1702 gst_va_buffer_get_aux_surface (GstBuffer * buffer)
1704 GstVaBufferSurface *surface_buffer;
1707 mem = gst_buffer_peek_memory (buffer, 0);
1709 return VA_INVALID_ID;
1711 surface_buffer = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
1712 gst_va_buffer_aux_surface_quark ());
1713 if (!surface_buffer)
1714 return VA_INVALID_ID;
1716 /* No one increments it, and its lifetime is the same with the
1718 g_assert (g_atomic_int_get (&surface_buffer->ref_count) == 1);
1720 return surface_buffer->surface;