2 * Copyright (C) 2011 Wim Taymans <wim.taymans@gmail.be>
4 * gstmemory.c: memory block handling
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 * Boston, MA 02111-1307, USA.
24 * @short_description: refcounted wrapper for memory blocks
25 * @see_also: #GstBuffer
27 * GstMemory is a lightweight refcounted object that wraps a region of memory.
28 * They are typically used to manage the data of a #GstBuffer.
30 * A GstMemory object has an allocated region of memory of maxsize. The maximum
31 * size does not change during the lifetime of the memory object. The memory
32 * also has an offset and size property that specifies the valid range of memory
33 * in the allocated region.
35 * Memory is usually created by allocators with a gst_allocator_alloc()
36 * method call. When NULL is used as the allocator, the default allocator will
39 * New allocators can be registered with gst_allocator_register().
40 * Allocators are identified by name and can be retrieved with
41 * gst_allocator_find(). gst_allocator_set_default() can be used to change the
44 * New memory can be created with gst_memory_new_wrapped() that wraps the memory
45 * allocated elsewhere.
47 * Refcounting of the memory block is performed with gst_memory_ref() and
50 * The size of the memory can be retrieved and changed with
51 * gst_memory_get_sizes() and gst_memory_resize() respectively.
53 * Getting access to the data of the memory is performed with gst_memory_map().
54 * The call will return a pointer to offset bytes into the region of memory.
55 * After the memory access is completed, gst_memory_unmap() should be called.
57 * Memory can be copied with gst_memory_copy(), which will return a writable
58 * copy. gst_memory_share() will create a new memory block that shares the
59 * memory with an existing memory block at a custom offset and with a custom
62 * Memory can be efficiently merged when gst_memory_is_span() returns TRUE.
64 * Last reviewed on 2012-03-28 (0.11.3)
71 #include "gst_private.h"
72 #include "gstmemory.h"
74 GST_DEFINE_MINI_OBJECT_TYPE (GstMemory, gst_memory);
76 GST_DEFINE_MINI_OBJECT_TYPE (GstAllocator, gst_allocator);
78 G_DEFINE_BOXED_TYPE (GstAllocationParams, gst_allocation_params,
79 (GBoxedCopyFunc) gst_allocation_params_copy,
80 (GBoxedFreeFunc) gst_allocation_params_free);
82 #if defined(MEMORY_ALIGNMENT_MALLOC)
83 size_t gst_memory_alignment = 7;
84 #elif defined(MEMORY_ALIGNMENT_PAGESIZE)
85 /* we fill this in in the _init method */
86 size_t gst_memory_alignment = 0;
87 #elif defined(MEMORY_ALIGNMENT)
88 size_t gst_memory_alignment = MEMORY_ALIGNMENT - 1;
90 #error "No memory alignment configured"
91 size_t gst_memory_alignment = 0;
96 GstMiniObject mini_object;
101 GDestroyNotify notify;
104 /* default memory implementation */
111 GDestroyNotify notify;
114 /* the default allocator */
115 static GstAllocator *_default_allocator;
117 /* our predefined allocators */
118 static GstAllocator *_default_mem_impl;
120 #define SHARE_ONE (1 << 16)
121 #define LOCK_ONE (GST_LOCK_FLAG_LAST)
122 #define FLAG_MASK (GST_LOCK_FLAG_LAST - 1)
123 #define LOCK_MASK ((SHARE_ONE - 1) - FLAG_MASK)
124 #define LOCK_FLAG_MASK (SHARE_ONE - 1)
127 _gst_memory_copy (GstMemory * mem)
129 return gst_memory_copy (mem, 0, -1);
133 _gst_memory_free (GstMemory * mem)
135 /* there should be no outstanding mappings */
136 g_return_if_fail ((g_atomic_int_get (&mem->state) & LOCK_MASK) < 4);
137 mem->allocator->info.mem_free (mem);
140 /* initialize the fields */
142 _default_mem_init (GstMemoryDefault * mem, GstMemoryFlags flags,
143 GstMemory * parent, gsize slice_size, gpointer data,
144 gsize maxsize, gsize offset, gsize size, gsize align,
145 gpointer user_data, GDestroyNotify notify)
147 gst_mini_object_init (GST_MINI_OBJECT_CAST (mem), GST_TYPE_MEMORY,
148 (GstMiniObjectCopyFunction) _gst_memory_copy, NULL,
149 (GstMiniObjectFreeFunction) _gst_memory_free);
151 mem->mem.mini_object.flags = flags;
153 mem->mem.allocator = _default_mem_impl;
154 mem->mem.parent = parent ? gst_memory_ref (parent) : NULL;
155 mem->mem.state = (flags & GST_MEMORY_FLAG_READONLY ? GST_LOCK_FLAG_READ : 0);
156 mem->mem.state |= (flags & GST_MEMORY_FLAG_NO_SHARE ? SHARE_ONE : 0);
157 mem->mem.maxsize = maxsize;
158 mem->mem.align = align;
159 mem->mem.offset = offset;
160 mem->mem.size = size;
161 mem->slice_size = slice_size;
163 mem->user_data = user_data;
164 mem->notify = notify;
166 GST_CAT_DEBUG (GST_CAT_MEMORY, "new memory %p, maxsize:%" G_GSIZE_FORMAT
167 " offset:%" G_GSIZE_FORMAT " size:%" G_GSIZE_FORMAT, mem, maxsize,
171 /* create a new memory block that manages the given memory */
172 static GstMemoryDefault *
173 _default_mem_new (GstMemoryFlags flags, GstMemory * parent, gpointer data,
174 gsize maxsize, gsize offset, gsize size, gsize align, gpointer user_data,
175 GDestroyNotify notify)
177 GstMemoryDefault *mem;
180 slice_size = sizeof (GstMemoryDefault);
182 mem = g_slice_alloc (slice_size);
183 _default_mem_init (mem, flags, parent, slice_size,
184 data, maxsize, offset, size, align, user_data, notify);
189 /* allocate the memory and structure in one block */
190 static GstMemoryDefault *
191 _default_mem_new_block (GstMemoryFlags flags, gsize maxsize, gsize align,
192 gsize offset, gsize size)
194 GstMemoryDefault *mem;
195 gsize aoffset, slice_size, padding;
198 /* ensure configured alignment */
199 align |= gst_memory_alignment;
200 /* allocate more to compensate for alignment */
202 /* alloc header and data in one block */
203 slice_size = sizeof (GstMemoryDefault) + maxsize;
205 mem = g_slice_alloc (slice_size);
209 data = (guint8 *) mem + sizeof (GstMemoryDefault);
212 if ((aoffset = ((guintptr) data & align))) {
213 aoffset = (align + 1) - aoffset;
218 if (offset && (flags & GST_MEMORY_FLAG_ZERO_PREFIXED))
219 memset (data, 0, offset);
221 padding = maxsize - (offset + size);
222 if (padding && (flags & GST_MEMORY_FLAG_ZERO_PADDED))
223 memset (data + offset + size, 0, padding);
225 _default_mem_init (mem, flags, NULL, slice_size, data, maxsize,
226 offset, size, align, NULL, NULL);
232 _default_alloc_alloc (GstAllocator * allocator, gsize size,
233 GstAllocationParams * params, gpointer user_data)
235 gsize maxsize = size + params->prefix + params->padding;
237 return (GstMemory *) _default_mem_new_block (params->flags,
238 maxsize, params->align, params->prefix, size);
242 _default_mem_map (GstMemoryDefault * mem, gsize maxsize, GstMapFlags flags)
248 _default_mem_unmap (GstMemoryDefault * mem)
254 _default_mem_free (GstMemoryDefault * mem)
256 GST_CAT_DEBUG (GST_CAT_MEMORY, "free memory %p", mem);
259 gst_memory_unref (mem->mem.parent);
262 mem->notify (mem->user_data);
264 g_slice_free1 (mem->slice_size, mem);
267 static GstMemoryDefault *
268 _default_mem_copy (GstMemoryDefault * mem, gssize offset, gsize size)
270 GstMemoryDefault *copy;
273 size = mem->mem.size > offset ? mem->mem.size - offset : 0;
276 _default_mem_new_block (0, mem->mem.maxsize, 0, mem->mem.offset + offset,
278 GST_CAT_DEBUG (GST_CAT_PERFORMANCE,
279 "memcpy %" G_GSIZE_FORMAT " memory %p -> %p", mem->mem.maxsize, mem,
281 memcpy (copy->data, mem->data, mem->mem.maxsize);
286 static GstMemoryDefault *
287 _default_mem_share (GstMemoryDefault * mem, gssize offset, gsize size)
289 GstMemoryDefault *sub;
292 /* find the real parent */
293 if ((parent = mem->mem.parent) == NULL)
294 parent = (GstMemory *) mem;
297 size = mem->mem.size - offset;
300 _default_mem_new (GST_MINI_OBJECT_FLAGS (parent), parent, mem->data,
301 mem->mem.maxsize, mem->mem.offset + offset, size, mem->mem.align, NULL,
308 _default_mem_is_span (GstMemoryDefault * mem1, GstMemoryDefault * mem2,
313 GstMemoryDefault *parent;
315 parent = (GstMemoryDefault *) mem1->mem.parent;
317 *offset = mem1->mem.offset - parent->mem.offset;
320 /* and memory is contiguous */
321 return mem1->data + mem1->mem.offset + mem1->mem.size ==
322 mem2->data + mem2->mem.offset;
326 _fallback_mem_copy (GstMemory * mem, gssize offset, gssize size)
329 GstMapInfo sinfo, dinfo;
330 GstAllocationParams params = { 0, 0, 0, mem->align, };
332 if (!gst_memory_map (mem, &sinfo, GST_MAP_READ))
336 size = sinfo.size > offset ? sinfo.size - offset : 0;
338 /* use the same allocator as the memory we copy */
339 copy = gst_allocator_alloc (mem->allocator, size, ¶ms);
340 if (!gst_memory_map (copy, &dinfo, GST_MAP_WRITE)) {
341 GST_CAT_WARNING (GST_CAT_MEMORY, "could not write map memory %p", copy);
342 gst_memory_unmap (mem, &sinfo);
346 GST_CAT_DEBUG (GST_CAT_PERFORMANCE,
347 "memcpy %" G_GSSIZE_FORMAT " memory %p -> %p", size, mem, copy);
348 memcpy (dinfo.data, sinfo.data + offset, size);
349 gst_memory_unmap (copy, &dinfo);
350 gst_memory_unmap (mem, &sinfo);
356 _fallback_mem_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
362 static GHashTable *allocators;
365 _priv_sysmem_notify (gpointer user_data)
367 g_warning ("The default memory allocator was freed!");
371 _priv_gst_memory_initialize (void)
373 static const GstMemoryInfo _mem_info = {
374 GST_ALLOCATOR_SYSMEM,
375 (GstAllocatorAllocFunction) _default_alloc_alloc,
376 (GstMemoryMapFunction) _default_mem_map,
377 (GstMemoryUnmapFunction) _default_mem_unmap,
378 (GstMemoryFreeFunction) _default_mem_free,
379 (GstMemoryCopyFunction) _default_mem_copy,
380 (GstMemoryShareFunction) _default_mem_share,
381 (GstMemoryIsSpanFunction) _default_mem_is_span,
384 g_rw_lock_init (&lock);
385 allocators = g_hash_table_new (g_str_hash, g_str_equal);
387 #ifdef HAVE_GETPAGESIZE
388 #ifdef MEMORY_ALIGNMENT_PAGESIZE
389 gst_memory_alignment = getpagesize () - 1;
393 GST_CAT_DEBUG (GST_CAT_MEMORY, "memory alignment: %" G_GSIZE_FORMAT,
394 gst_memory_alignment);
396 _default_mem_impl = gst_allocator_new (&_mem_info, NULL, _priv_sysmem_notify);
398 _default_allocator = gst_allocator_ref (_default_mem_impl);
399 gst_allocator_register (GST_ALLOCATOR_SYSMEM,
400 gst_allocator_ref (_default_mem_impl));
404 * gst_memory_new_wrapped:
405 * @flags: #GstMemoryFlags
406 * @data: data to wrap
407 * @maxsize: allocated size of @data
408 * @offset: offset in @data
409 * @size: size of valid data
410 * @user_data: user_data
411 * @notify: called with @user_data when the memory is freed
413 * Allocate a new memory block that wraps the given @data.
415 * The prefix/padding must be filled with 0 if @flags contains
416 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
418 * Returns: a new #GstMemory.
421 gst_memory_new_wrapped (GstMemoryFlags flags, gpointer data,
422 gsize maxsize, gsize offset, gsize size, gpointer user_data,
423 GDestroyNotify notify)
425 GstMemoryDefault *mem;
427 g_return_val_if_fail (data != NULL, NULL);
428 g_return_val_if_fail (offset + size <= maxsize, NULL);
431 _default_mem_new (flags, NULL, data, maxsize, offset, size, 0, user_data,
434 return (GstMemory *) mem;
438 * gst_memory_is_exclusive:
441 * Check if the current ref to @mem is exclusive, this means that no other
442 * references exist other than @mem.
445 gst_memory_is_exclusive (GstMemory * mem)
447 g_return_val_if_fail (mem != NULL, FALSE);
449 return GST_MINI_OBJECT_REFCOUNT_VALUE (mem) == 1;
453 * gst_memory_get_sizes:
455 * @offset: pointer to offset
456 * @maxsize: pointer to maxsize
458 * Get the current @size, @offset and @maxsize of @mem.
460 * Returns: the current sizes of @mem
463 gst_memory_get_sizes (GstMemory * mem, gsize * offset, gsize * maxsize)
465 g_return_val_if_fail (mem != NULL, 0);
468 *offset = mem->offset;
470 *maxsize = mem->maxsize;
478 * @offset: a new offset
481 * Resize the memory region. @mem should be writable and offset + size should be
482 * less than the maxsize of @mem.
484 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED will be
485 * cleared when offset or padding is increased respectively.
488 gst_memory_resize (GstMemory * mem, gssize offset, gsize size)
490 g_return_if_fail (mem != NULL);
491 g_return_if_fail (offset >= 0 || mem->offset >= -offset);
492 g_return_if_fail (size + mem->offset + offset <= mem->maxsize);
494 /* if we increase the prefix, we can't guarantee it is still 0 filled */
495 if ((offset > 0) && GST_MEMORY_IS_ZERO_PREFIXED (mem))
496 GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PREFIXED);
498 /* if we increase the padding, we can't guarantee it is still 0 filled */
499 if ((offset + size < mem->size) && GST_MEMORY_IS_ZERO_PADDED (mem))
500 GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PADDED);
502 mem->offset += offset;
509 * @flags: #GstLockFlags
511 * Lock the memory with the specified access mode in @flags.
513 * Returns: %TRUE if the memory could be locked.
516 gst_memory_lock (GstMemory * mem, GstLockFlags flags)
518 gint access_mode, state, newstate;
520 access_mode = flags & FLAG_MASK;
523 newstate = state = g_atomic_int_get (&mem->state);
525 GST_CAT_TRACE (GST_CAT_MEMORY, "lock %p: state %08x, access_mode %d",
526 mem, state, access_mode);
528 if (access_mode & GST_LOCK_FLAG_EXCLUSIVE) {
530 newstate += SHARE_ONE;
531 access_mode &= ~GST_LOCK_FLAG_EXCLUSIVE;
535 if ((state & LOCK_FLAG_MASK) == 0) {
536 /* shared counter > 1 and write access */
537 if (state > SHARE_ONE && access_mode & GST_LOCK_FLAG_WRITE)
539 /* nothing mapped, set access_mode */
540 newstate |= access_mode;
542 /* access_mode must match */
543 if ((state & access_mode) != access_mode)
546 /* increase refcount */
547 newstate += LOCK_ONE;
549 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
555 GST_CAT_DEBUG (GST_CAT_MEMORY, "lock failed %p: state %08x, access_mode %d",
556 mem, state, access_mode);
564 * @flags: #GstLockFlags
566 * Unlock the memory with the specified access mode in @flags.
569 gst_memory_unlock (GstMemory * mem, GstLockFlags flags)
571 gint access_mode, state, newstate;
573 access_mode = flags & FLAG_MASK;
576 newstate = state = g_atomic_int_get (&mem->state);
578 GST_CAT_TRACE (GST_CAT_MEMORY, "unlock %p: state %08x, access_mode %d",
579 mem, state, access_mode);
581 if (access_mode & GST_LOCK_FLAG_EXCLUSIVE) {
583 g_return_if_fail (state >= SHARE_ONE);
584 newstate = state - SHARE_ONE;
585 access_mode &= ~GST_LOCK_FLAG_EXCLUSIVE;
589 g_return_if_fail ((state & access_mode) == access_mode);
590 /* decrease the refcount */
591 newstate -= LOCK_ONE;
592 /* last refcount, unset access_mode */
593 if ((newstate & LOCK_FLAG_MASK) == access_mode)
594 newstate &= ~LOCK_FLAG_MASK;
596 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
600 * gst_memory_make_mapped:
601 * @mem: (transfer full): a #GstMemory
602 * @info: (out): pointer for info
603 * @flags: mapping flags
605 * Create a #GstMemory object that is mapped with @flags. If @mem is mappable
606 * with @flags, this function returns the mapped @mem directly. Otherwise a
607 * mapped copy of @mem is returned.
609 * This function takes ownership of old @mem and returns a reference to a new
612 * Returns: (transfer full): a #GstMemory object mapped with @flags or NULL when
613 * a mapping is not possible.
616 gst_memory_make_mapped (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
620 if (gst_memory_map (mem, info, flags)) {
623 result = gst_memory_copy (mem, 0, -1);
624 gst_memory_unref (mem);
629 if (!gst_memory_map (result, info, flags))
637 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot copy memory %p", mem);
642 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot map memory %p with flags %d", mem,
644 gst_memory_unref (result);
652 * @info: (out): pointer for info
653 * @flags: mapping flags
655 * Fill @info with the pointer and sizes of the memory in @mem that can be
656 * accessed according to @flags.
658 * This function can return %FALSE for various reasons:
659 * - the memory backed by @mem is not accessible with the given @flags.
660 * - the memory was already mapped with a different mapping.
662 * @info and its contents remain valid for as long as @mem is valid and
663 * until gst_memory_unmap() is called.
665 * For each gst_memory_map() call, a corresponding gst_memory_unmap() call
668 * Returns: %TRUE if the map operation was successful.
671 gst_memory_map (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
673 g_return_val_if_fail (mem != NULL, FALSE);
674 g_return_val_if_fail (info != NULL, FALSE);
676 if (!gst_memory_lock (mem, flags))
679 info->data = mem->allocator->info.mem_map (mem, mem->maxsize, flags);
681 if (G_UNLIKELY (info->data == NULL))
686 info->size = mem->size;
687 info->maxsize = mem->maxsize - mem->offset;
688 info->data = info->data + mem->offset;
695 GST_CAT_DEBUG (GST_CAT_MEMORY, "mem %p: lock %d failed", mem, flags);
700 /* something went wrong, restore the orginal state again */
701 GST_CAT_ERROR (GST_CAT_MEMORY, "mem %p: map failed", mem);
702 gst_memory_unlock (mem, flags);
710 * @info: a #GstMapInfo
712 * Release the memory obtained with gst_memory_map()
715 gst_memory_unmap (GstMemory * mem, GstMapInfo * info)
717 g_return_if_fail (mem != NULL);
718 g_return_if_fail (info != NULL);
719 g_return_if_fail (info->memory == mem);
720 /* there must be a ref */
721 g_return_if_fail (g_atomic_int_get (&mem->state) >= 4);
723 mem->allocator->info.mem_unmap (mem);
724 gst_memory_unlock (mem, info->flags);
730 * @offset: an offset to copy
731 * @size: size to copy or -1 to copy all bytes from offset
733 * Return a copy of @size bytes from @mem starting from @offset. This copy is
734 * guaranteed to be writable. @size can be set to -1 to return a copy all bytes
737 * Returns: a new #GstMemory.
740 gst_memory_copy (GstMemory * mem, gssize offset, gssize size)
744 g_return_val_if_fail (mem != NULL, NULL);
746 copy = mem->allocator->info.mem_copy (mem, offset, size);
754 * @offset: an offset to share
755 * @size: size to share or -1 to share bytes from offset
757 * Return a shared copy of @size bytes from @mem starting from @offset. No
758 * memory copy is performed and the memory region is simply shared. The result
759 * is guaranteed to be not-writable. @size can be set to -1 to return a share
760 * all bytes from @offset.
762 * Returns: a new #GstMemory.
765 gst_memory_share (GstMemory * mem, gssize offset, gssize size)
769 g_return_val_if_fail (mem != NULL, NULL);
770 g_return_val_if_fail (!GST_MEMORY_FLAG_IS_SET (mem, GST_MEMORY_FLAG_NO_SHARE),
773 shared = mem->allocator->info.mem_share (mem, offset, size);
779 * gst_memory_is_span:
780 * @mem1: a #GstMemory
781 * @mem2: a #GstMemory
782 * @offset: a pointer to a result offset
784 * Check if @mem1 and mem2 share the memory with a common parent memory object
785 * and that the memory is contiguous.
787 * If this is the case, the memory of @mem1 and @mem2 can be merged
788 * efficiently by performing gst_memory_share() on the parent object from
789 * the returned @offset.
791 * Returns: %TRUE if the memory is contiguous and of a common parent.
794 gst_memory_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
796 g_return_val_if_fail (mem1 != NULL, FALSE);
797 g_return_val_if_fail (mem2 != NULL, FALSE);
799 /* need to have the same allocators */
800 if (mem1->allocator != mem2->allocator)
803 /* need to have the same parent */
804 if (mem1->parent == NULL || mem1->parent != mem2->parent)
807 /* and memory is contiguous */
808 if (!mem1->allocator->info.mem_is_span (mem1, mem2, offset))
815 _gst_allocator_free (GstAllocator * allocator)
817 if (allocator->notify)
818 allocator->notify (allocator->user_data);
820 g_slice_free1 (sizeof (GstAllocator), allocator);
823 static GstAllocator *
824 _gst_allocator_copy (GstAllocator * allocator)
826 return gst_allocator_ref (allocator);
831 * @info: a #GstMemoryInfo
832 * @user_data: user data
833 * @notify: a #GDestroyNotify for @user_data
835 * Create a new memory allocator with @info and @user_data.
837 * All functions in @info are mandatory exept the copy and is_span
838 * functions, which will have a default implementation when left NULL.
840 * The @user_data will be passed to all calls of the alloc function. @notify
841 * will be called with @user_data when the allocator is freed.
843 * Returns: a new #GstAllocator.
846 gst_allocator_new (const GstMemoryInfo * info, gpointer user_data,
847 GDestroyNotify notify)
849 GstAllocator *allocator;
851 g_return_val_if_fail (info != NULL, NULL);
852 g_return_val_if_fail (info->alloc != NULL, NULL);
853 g_return_val_if_fail (info->mem_map != NULL, NULL);
854 g_return_val_if_fail (info->mem_unmap != NULL, NULL);
855 g_return_val_if_fail (info->mem_free != NULL, NULL);
856 g_return_val_if_fail (info->mem_share != NULL, NULL);
858 allocator = g_slice_new0 (GstAllocator);
860 gst_mini_object_init (GST_MINI_OBJECT_CAST (allocator), GST_TYPE_ALLOCATOR,
861 (GstMiniObjectCopyFunction) _gst_allocator_copy, NULL,
862 (GstMiniObjectFreeFunction) _gst_allocator_free);
864 allocator->info = *info;
865 allocator->user_data = user_data;
866 allocator->notify = notify;
868 #define INSTALL_FALLBACK(_t) \
869 if (allocator->info._t == NULL) allocator->info._t = _fallback_ ##_t;
870 INSTALL_FALLBACK (mem_copy);
871 INSTALL_FALLBACK (mem_is_span);
872 #undef INSTALL_FALLBACK
874 GST_CAT_DEBUG (GST_CAT_MEMORY, "new allocator %p", allocator);
880 * gst_allocator_get_memory_type:
881 * @allocator: a #GstAllocator
883 * Get the memory type allocated by this allocator
885 * Returns: the memory type provided by @allocator
888 gst_allocator_get_memory_type (GstAllocator * allocator)
890 g_return_val_if_fail (allocator != NULL, NULL);
892 return allocator->info.mem_type;
896 * gst_allocator_register:
897 * @name: the name of the allocator
898 * @allocator: (transfer full): #GstAllocator
900 * Registers the memory @allocator with @name. This function takes ownership of
904 gst_allocator_register (const gchar * name, GstAllocator * allocator)
906 g_return_if_fail (name != NULL);
907 g_return_if_fail (allocator != NULL);
909 GST_CAT_DEBUG (GST_CAT_MEMORY, "registering allocator %p with name \"%s\"",
912 g_rw_lock_writer_lock (&lock);
913 g_hash_table_insert (allocators, (gpointer) name, (gpointer) allocator);
914 g_rw_lock_writer_unlock (&lock);
918 * gst_allocator_find:
919 * @name: the name of the allocator
921 * Find a previously registered allocator with @name. When @name is NULL, the
922 * default allocator will be returned.
924 * Returns: (transfer full): a #GstAllocator or NULL when the allocator with @name was not
925 * registered. Use gst_allocator_unref() to release the allocator after usage.
928 gst_allocator_find (const gchar * name)
930 GstAllocator *allocator;
932 g_rw_lock_reader_lock (&lock);
934 allocator = g_hash_table_lookup (allocators, (gconstpointer) name);
936 allocator = _default_allocator;
939 gst_allocator_ref (allocator);
940 g_rw_lock_reader_unlock (&lock);
946 * gst_allocator_set_default:
947 * @allocator: (transfer full): a #GstAllocator
949 * Set the default allocator. This function takes ownership of @allocator.
952 gst_allocator_set_default (GstAllocator * allocator)
955 g_return_if_fail (allocator != NULL);
957 g_rw_lock_writer_lock (&lock);
958 old = _default_allocator;
959 _default_allocator = allocator;
960 g_rw_lock_writer_unlock (&lock);
963 gst_allocator_unref (old);
967 * gst_allocation_params_init:
968 * @params: a #GstAllocationParams
970 * Initialize @params to its default values
973 gst_allocation_params_init (GstAllocationParams * params)
975 g_return_if_fail (params != NULL);
977 memset (params, 0, sizeof (GstAllocationParams));
981 * gst_allocation_params_copy:
982 * @params: (transfer none): a #GstAllocationParams
984 * Create a copy of @params.
986 * Free-function: gst_allocation_params_free
988 * Returns: (transfer full): a new ##GstAllocationParams, free with
989 * gst_allocation_params_free().
991 GstAllocationParams *
992 gst_allocation_params_copy (const GstAllocationParams * params)
994 GstAllocationParams *result = NULL;
998 (GstAllocationParams *) g_slice_copy (sizeof (GstAllocationParams),
1005 * gst_allocation_params_free:
1006 * @params: (in) (transfer full): a #GstAllocationParams
1011 gst_allocation_params_free (GstAllocationParams * params)
1013 g_slice_free (GstAllocationParams, params);
1017 * gst_allocator_alloc:
1018 * @allocator: (transfer none) (allow-none): a #GstAllocator to use
1019 * @size: size of the visible memory area
1020 * @params: (transfer none) (allow-none): optional parameters
1022 * Use @allocator to allocate a new memory block with memory that is at least
1025 * The optional @params can specify the prefix and padding for the memory. If
1026 * NULL is passed, no flags, no extra prefix/padding and a default alignment is
1029 * The prefix/padding will be filled with 0 if flags contains
1030 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
1032 * When @allocator is NULL, the default allocator will be used.
1034 * The alignment in @params is given as a bitmask so that @align + 1 equals
1035 * the amount of bytes to align to. For example, to align to 8 bytes,
1036 * use an alignment of 7.
1038 * Returns: (transfer full): a new #GstMemory.
1041 gst_allocator_alloc (GstAllocator * allocator, gsize size,
1042 GstAllocationParams * params)
1045 static GstAllocationParams defparams = { 0, 0, 0, 0, };
1048 g_return_val_if_fail (((params->align + 1) & params->align) == 0, NULL);
1050 params = &defparams;
1053 if (allocator == NULL)
1054 allocator = _default_allocator;
1056 mem = allocator->info.alloc (allocator, size, params, allocator->user_data);