2 * Copyright (C) 2011 Wim Taymans <wim.taymans@gmail.be>
4 * gstmemory.c: memory block handling
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 * Boston, MA 02111-1307, USA.
24 * @short_description: refcounted wrapper for memory blocks
25 * @see_also: #GstBuffer
27 * GstMemory is a lightweight refcounted object that wraps a region of memory.
28 * They are typically used to manage the data of a #GstBuffer.
30 * A GstMemory object has an allocated region of memory of maxsize. The maximum
31 * size does not change during the lifetime of the memory object. The memory
32 * also has an offset and size property that specifies the valid range of memory
33 * in the allocated region.
35 * Memory is usually created by allocators with a gst_allocator_alloc()
36 * method call. When NULL is used as the allocator, the default allocator will
39 * New allocators can be registered with gst_allocator_register().
40 * Allocators are identified by name and can be retrieved with
41 * gst_allocator_find().
43 * New memory can be created with gst_memory_new_wrapped() that wraps the memory
44 * allocated elsewhere.
46 * Refcounting of the memory block is performed with gst_memory_ref() and
49 * The size of the memory can be retrieved and changed with
50 * gst_memory_get_sizes() and gst_memory_resize() respectively.
52 * Getting access to the data of the memory is performed with gst_memory_map().
53 * The call will return a pointer to offset bytes into the region of memory.
54 * After the memory access is completed, gst_memory_unmap() should be called.
56 * Memory can be copied with gst_memory_copy(), which will returnn a writable
57 * copy. gst_memory_share() will create a new memory block that shares the
58 * memory with an existing memory block at a custom offset and with a custom
61 * Memory can be efficiently merged when gst_memory_is_span() returns TRUE.
63 * Last reviewed on 2011-06-08 (0.11.0)
70 #include "gst_private.h"
71 #include "gstmemory.h"
73 #ifndef GST_DISABLE_TRACE
75 static GstAllocTrace *_gst_memory_trace;
76 static GstAllocTrace *_gst_allocator_trace;
79 G_DEFINE_BOXED_TYPE (GstMemory, gst_memory, (GBoxedCopyFunc) gst_memory_ref,
80 (GBoxedFreeFunc) gst_memory_unref);
82 G_DEFINE_BOXED_TYPE (GstAllocator, gst_allocator,
83 (GBoxedCopyFunc) gst_allocator_ref, (GBoxedFreeFunc) gst_allocator_unref);
86 * gst_memory_alignment:
88 * The default memory alignment in bytes - 1
89 * an alignment of 7 would be the same as what malloc() guarantees.
91 #if defined(MEMORY_ALIGNMENT_MALLOC)
92 size_t gst_memory_alignment = 7;
93 #elif defined(MEMORY_ALIGNMENT_PAGESIZE)
94 /* we fill this in in the _init method */
95 size_t gst_memory_alignment = 0;
96 #elif defined(MEMORY_ALIGNMENT)
97 size_t gst_memory_alignment = MEMORY_ALIGNMENT - 1;
99 #error "No memory alignment configured"
100 size_t gst_memory_alignment = 0;
110 GDestroyNotify notify;
113 /* default memory implementation */
122 /* the default allocator */
123 static GstAllocator *_default_allocator;
125 /* our predefined allocators */
126 static GstAllocator *_default_mem_impl;
128 /* initialize the fields */
130 _default_mem_init (GstMemoryDefault * mem, GstMemoryFlags flags,
131 GstMemory * parent, gsize slice_size, gpointer data,
132 GFreeFunc free_func, gsize maxsize, gsize offset, gsize size)
134 mem->mem.allocator = _default_mem_impl;
135 mem->mem.flags = flags;
136 mem->mem.refcount = 1;
137 mem->mem.parent = parent ? gst_memory_ref (parent) : NULL;
138 mem->mem.state = (flags & GST_MEMORY_FLAG_READONLY ? 0x5 : 0);
139 mem->mem.maxsize = maxsize;
140 mem->mem.offset = offset;
141 mem->mem.size = size;
142 mem->slice_size = slice_size;
144 mem->free_func = free_func;
146 GST_CAT_DEBUG (GST_CAT_MEMORY, "new memory %p", mem);
149 /* create a new memory block that manages the given memory */
150 static GstMemoryDefault *
151 _default_mem_new (GstMemoryFlags flags, GstMemory * parent, gpointer data,
152 GFreeFunc free_func, gsize maxsize, gsize offset, gsize size)
154 GstMemoryDefault *mem;
157 slice_size = sizeof (GstMemoryDefault);
159 mem = g_slice_alloc (slice_size);
160 _default_mem_init (mem, flags, parent, slice_size,
161 data, free_func, maxsize, offset, size);
166 /* allocate the memory and structure in one block */
167 static GstMemoryDefault *
168 _default_mem_new_block (gsize maxsize, gsize align, gsize offset, gsize size)
170 GstMemoryDefault *mem;
171 gsize aoffset, slice_size;
174 /* ensure configured alignment */
175 align |= gst_memory_alignment;
176 /* allocate more to compensate for alignment */
178 /* alloc header and data in one block */
179 slice_size = sizeof (GstMemoryDefault) + maxsize;
181 mem = g_slice_alloc (slice_size);
185 data = (guint8 *) mem + sizeof (GstMemoryDefault);
187 if ((aoffset = ((guintptr) data & align)))
188 aoffset = (align + 1) - aoffset;
190 _default_mem_init (mem, 0, NULL, slice_size, data, NULL, maxsize,
191 aoffset + offset, size);
197 _default_alloc_alloc (GstAllocator * allocator, gsize maxsize, gsize align,
200 return (GstMemory *) _default_mem_new_block (maxsize, align, 0, maxsize);
204 _default_mem_map (GstMemoryDefault * mem, GstMapFlags flags)
210 _default_mem_unmap (GstMemoryDefault * mem)
216 _default_mem_free (GstMemoryDefault * mem)
218 GST_CAT_DEBUG (GST_CAT_MEMORY, "free memory %p", mem);
221 gst_memory_unref (mem->mem.parent);
224 mem->free_func (mem->data);
226 g_slice_free1 (mem->slice_size, mem);
229 static GstMemoryDefault *
230 _default_mem_copy (GstMemoryDefault * mem, gssize offset, gsize size)
232 GstMemoryDefault *copy;
235 size = mem->mem.size > offset ? mem->mem.size - offset : 0;
238 _default_mem_new_block (mem->mem.maxsize, 0, mem->mem.offset + offset,
240 memcpy (copy->data, mem->data, mem->mem.maxsize);
241 GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy memory %p -> %p", mem, copy);
246 static GstMemoryDefault *
247 _default_mem_share (GstMemoryDefault * mem, gssize offset, gsize size)
249 GstMemoryDefault *sub;
252 /* find the real parent */
253 if ((parent = mem->mem.parent) == NULL)
254 parent = (GstMemory *) mem;
257 size = mem->mem.size - offset;
260 _default_mem_new (parent->flags, parent, mem->data, NULL,
261 mem->mem.maxsize, mem->mem.offset + offset, size);
267 _default_mem_is_span (GstMemoryDefault * mem1, GstMemoryDefault * mem2,
272 GstMemoryDefault *parent;
274 parent = (GstMemoryDefault *) mem1->mem.parent;
276 *offset = mem1->mem.offset - parent->mem.offset;
279 /* and memory is contiguous */
280 return mem1->data + mem1->mem.offset + mem1->mem.size ==
281 mem2->data + mem2->mem.offset;
285 _fallback_mem_copy (GstMemory * mem, gssize offset, gssize size)
288 GstMapInfo sinfo, dinfo;
290 if (!gst_memory_map (mem, &sinfo, GST_MAP_READ))
294 size = sinfo.size > offset ? sinfo.size - offset : 0;
296 /* use the same allocator as the memory we copy */
297 copy = gst_allocator_alloc (mem->allocator, size, mem->align);
298 if (!gst_memory_map (copy, &dinfo, GST_MAP_WRITE)) {
299 GST_CAT_WARNING (GST_CAT_MEMORY, "could not write map memory %p", copy);
300 gst_memory_unmap (mem, &sinfo);
304 memcpy (dinfo.data, sinfo.data + offset, size);
305 GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy memory %p -> %p", mem, copy);
306 gst_memory_unmap (copy, &dinfo);
307 gst_memory_unmap (mem, &sinfo);
313 _fallback_mem_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
319 static GHashTable *allocators;
322 _priv_sysmem_notify (gpointer user_data)
324 g_warning ("The default memory allocator was freed!");
328 _priv_gst_memory_initialize (void)
330 static const GstMemoryInfo _mem_info = {
331 GST_ALLOCATOR_SYSMEM,
332 (GstAllocatorAllocFunction) _default_alloc_alloc,
333 (GstMemoryMapFunction) _default_mem_map,
334 (GstMemoryUnmapFunction) _default_mem_unmap,
335 (GstMemoryFreeFunction) _default_mem_free,
336 (GstMemoryCopyFunction) _default_mem_copy,
337 (GstMemoryShareFunction) _default_mem_share,
338 (GstMemoryIsSpanFunction) _default_mem_is_span,
341 #ifndef GST_DISABLE_TRACE
342 _gst_memory_trace = _gst_alloc_trace_register ("GstMemory", -1);
343 _gst_allocator_trace = _gst_alloc_trace_register ("GstAllocator", -1);
346 g_rw_lock_init (&lock);
347 allocators = g_hash_table_new (g_str_hash, g_str_equal);
349 #ifdef HAVE_GETPAGESIZE
350 #ifdef MEMORY_ALIGNMENT_PAGESIZE
351 gst_memory_alignment = getpagesize () - 1;
355 GST_CAT_DEBUG (GST_CAT_MEMORY, "memory alignment: %" G_GSIZE_FORMAT,
356 gst_memory_alignment);
358 _default_mem_impl = gst_allocator_new (&_mem_info, NULL, _priv_sysmem_notify);
360 _default_allocator = gst_allocator_ref (_default_mem_impl);
361 gst_allocator_register (GST_ALLOCATOR_SYSMEM,
362 gst_allocator_ref (_default_mem_impl));
366 * gst_memory_new_wrapped:
367 * @flags: #GstMemoryFlags
368 * @data: data to wrap
369 * @free_func: function to free @data
370 * @maxsize: allocated size of @data
371 * @offset: offset in @data
372 * @size: size of valid data
374 * Allocate a new memory block that wraps the given @data.
376 * Returns: a new #GstMemory.
379 gst_memory_new_wrapped (GstMemoryFlags flags, gpointer data,
380 GFreeFunc free_func, gsize maxsize, gsize offset, gsize size)
382 GstMemoryDefault *mem;
384 g_return_val_if_fail (data != NULL, NULL);
385 g_return_val_if_fail (offset + size <= maxsize, NULL);
387 mem = _default_mem_new (flags, NULL, data, free_func, maxsize, offset, size);
389 #ifndef GST_DISABLE_TRACE
390 _gst_alloc_trace_new (_gst_memory_trace, mem);
393 return (GstMemory *) mem;
400 * Increases the refcount of @mem.
402 * Returns: @mem with increased refcount
405 gst_memory_ref (GstMemory * mem)
407 g_return_val_if_fail (mem != NULL, NULL);
409 GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
412 g_atomic_int_inc (&mem->refcount);
421 * Decreases the refcount of @mem. When the refcount reaches 0, the free
422 * function of @mem will be called.
425 gst_memory_unref (GstMemory * mem)
427 g_return_if_fail (mem != NULL);
428 g_return_if_fail (mem->allocator != NULL);
430 GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
433 if (g_atomic_int_dec_and_test (&mem->refcount)) {
434 #ifndef GST_DISABLE_TRACE
435 _gst_alloc_trace_free (_gst_memory_trace, mem);
437 mem->allocator->info.mem_free (mem);
442 * gst_memory_get_sizes:
444 * @offset: pointer to offset
445 * @maxsize: pointer to maxsize
447 * Get the current @size, @offset and @maxsize of @mem.
449 * Returns: the current sizes of @mem
452 gst_memory_get_sizes (GstMemory * mem, gsize * offset, gsize * maxsize)
454 g_return_val_if_fail (mem != NULL, 0);
457 *offset = mem->offset;
459 *maxsize = mem->maxsize;
467 * @offset: a new offset
470 * Resize the memory region. @mem should be writable and offset + size should be
471 * less than the maxsize of @mem.
474 gst_memory_resize (GstMemory * mem, gssize offset, gsize size)
476 g_return_if_fail (mem != NULL);
477 g_return_if_fail (gst_memory_is_writable (mem));
478 g_return_if_fail (offset >= 0 || mem->offset >= -offset);
479 g_return_if_fail (size + mem->offset + offset <= mem->maxsize);
481 mem->offset += offset;
486 * gst_memory_is_writable:
489 * Check if @mem is writable.
491 * Returns: %TRUE is @mem is writable.
494 gst_memory_is_writable (GstMemory * mem)
496 g_return_val_if_fail (mem != NULL, FALSE);
498 return (mem->refcount == 1) &&
499 ((mem->parent == NULL) || (mem->parent->refcount == 1)) &&
500 ((mem->flags & GST_MEMORY_FLAG_READONLY) == 0);
504 gst_memory_lock (GstMemory * mem, GstMapFlags flags)
506 gint access_mode, state, newstate;
508 access_mode = flags & 3;
511 state = g_atomic_int_get (&mem->state);
513 /* nothing mapped, set access_mode and refcount */
514 newstate = 4 | access_mode;
516 /* access_mode must match */
517 if ((state & access_mode) != access_mode)
519 /* increase refcount */
520 newstate = state + 4;
522 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
528 GST_CAT_DEBUG (GST_CAT_MEMORY, "lock failed %p: state %d, access_mode %d",
529 mem, state, access_mode);
535 gst_memory_unlock (GstMemory * mem)
537 gint state, newstate;
540 state = g_atomic_int_get (&mem->state);
541 /* decrease the refcount */
542 newstate = state - 4;
543 /* last refcount, unset access_mode */
546 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
551 * gst_memory_make_mapped:
552 * @mem: (transfer full): a #GstMemory
553 * @info: (out): pointer for info
554 * @flags: mapping flags
556 * Create a #GstMemory object that is mapped with @flags. If @mem is mappable
557 * with @flags, this function returns the mapped @mem directly. Otherwise a
558 * mapped copy of @mem is returned.
560 * This function takes ownership of old @mem and returns a reference to a new
563 * Returns: (transfer full): a #GstMemory object mapped with @flags or NULL when
564 * a mapping is not possible.
567 gst_memory_make_mapped (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
571 if (gst_memory_map (mem, info, flags)) {
574 result = gst_memory_copy (mem, 0, -1);
575 gst_memory_unref (mem);
580 if (!gst_memory_map (result, info, flags))
588 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot copy memory %p", mem);
593 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot map memory %p with flags %d", mem,
595 gst_memory_unref (result);
603 * @info: (out): pointer for info
604 * @flags: mapping flags
606 * Fill @info with the pointer and sizes of the memory in @mem that can be
607 * accessed according to @flags.
609 * This function can return %FALSE for various reasons:
610 * - the memory backed by @mem is not accessible with the given @flags.
611 * - the memory was already mapped with a different mapping.
613 * @info and its contents remains valid for as long as @mem is alive and until
614 * gst_memory_unmap() is called.
616 * For each gst_memory_map() call, a corresponding gst_memory_unmap() call
619 * Returns: %TRUE if the map operation was successful.
622 gst_memory_map (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
624 g_return_val_if_fail (mem != NULL, FALSE);
625 g_return_val_if_fail (info != NULL, FALSE);
627 if (!gst_memory_lock (mem, flags))
630 info->data = mem->allocator->info.mem_map (mem, mem->maxsize, flags);
632 if (G_UNLIKELY (info->data == NULL))
637 info->size = mem->size;
638 info->maxsize = mem->maxsize - mem->offset;
639 info->data = info->data + mem->offset;
646 GST_CAT_DEBUG (GST_CAT_MEMORY, "mem %p: lock %d failed", mem, flags);
651 /* something went wrong, restore the orginal state again */
652 GST_CAT_ERROR (GST_CAT_MEMORY, "mem %p: map failed", mem);
653 gst_memory_unlock (mem);
661 * @info: a #GstMapInfo
663 * Release the memory obtained with gst_memory_map()
666 gst_memory_unmap (GstMemory * mem, GstMapInfo * info)
668 g_return_if_fail (mem != NULL);
669 g_return_if_fail (info != NULL);
670 g_return_if_fail (info->memory == mem);
671 /* there must be a ref */
672 g_return_if_fail (g_atomic_int_get (&mem->state) >= 4);
674 mem->allocator->info.mem_unmap (mem);
675 gst_memory_unlock (mem);
681 * @offset: an offset to copy
682 * @size: size to copy or -1 to copy all bytes from offset
684 * Return a copy of @size bytes from @mem starting from @offset. This copy is
685 * guaranteed to be writable. @size can be set to -1 to return a copy all bytes
688 * Returns: a new #GstMemory.
691 gst_memory_copy (GstMemory * mem, gssize offset, gssize size)
695 g_return_val_if_fail (mem != NULL, NULL);
697 copy = mem->allocator->info.mem_copy (mem, offset, size);
699 #ifndef GST_DISABLE_TRACE
700 _gst_alloc_trace_new (_gst_memory_trace, copy);
709 * @offset: an offset to share
710 * @size: size to share or -1 to share bytes from offset
712 * Return a shared copy of @size bytes from @mem starting from @offset. No
713 * memory copy is performed and the memory region is simply shared. The result
714 * is guaranteed to be not-writable. @size can be set to -1 to return a share
715 * all bytes from @offset.
717 * Returns: a new #GstMemory.
720 gst_memory_share (GstMemory * mem, gssize offset, gssize size)
724 g_return_val_if_fail (mem != NULL, NULL);
726 shared = mem->allocator->info.mem_share (mem, offset, size);
728 #ifndef GST_DISABLE_TRACE
729 _gst_alloc_trace_new (_gst_memory_trace, shared);
736 * gst_memory_is_span:
737 * @mem1: a #GstMemory
738 * @mem2: a #GstMemory
739 * @offset: a pointer to a result offset
741 * Check if @mem1 and mem2 share the memory with a common parent memory object
742 * and that the memory is contiguous.
744 * If this is the case, the memory of @mem1 and @mem2 can be merged
745 * efficiently by performing gst_memory_share() on the parent object from
746 * the returned @offset.
748 * Returns: %TRUE if the memory is contiguous and of a common parent.
751 gst_memory_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
753 g_return_val_if_fail (mem1 != NULL, FALSE);
754 g_return_val_if_fail (mem2 != NULL, FALSE);
756 /* need to have the same allocators */
757 if (mem1->allocator != mem2->allocator)
760 /* need to have the same parent */
761 if (mem1->parent == NULL || mem1->parent != mem2->parent)
764 /* and memory is contiguous */
765 if (!mem1->allocator->info.mem_is_span (mem1, mem2, offset))
772 * gst_allocator_register:
773 * @info: a #GstMemoryInfo
774 * @user_data: user data
775 * @notify: a #GDestroyNotify for @user_data
777 * Create a new memory allocator with @info and @user_data.
779 * All functions in @info are mandatory exept the copy and is_span
780 * functions, which will have a default implementation when left NULL.
782 * The @user_data will be passed to all calls of the alloc function and the
785 * Returns: a new #GstAllocator.
788 gst_allocator_new (const GstMemoryInfo * info, gpointer user_data,
789 GDestroyNotify notify)
791 GstAllocator *allocator;
793 #define INSTALL_FALLBACK(_t) \
794 if (allocator->info._t == NULL) allocator->info._t = _fallback_ ##_t;
796 g_return_val_if_fail (info != NULL, NULL);
797 g_return_val_if_fail (info->alloc != NULL, NULL);
798 g_return_val_if_fail (info->mem_map != NULL, NULL);
799 g_return_val_if_fail (info->mem_unmap != NULL, NULL);
800 g_return_val_if_fail (info->mem_free != NULL, NULL);
801 g_return_val_if_fail (info->mem_share != NULL, NULL);
803 allocator = g_slice_new (GstAllocator);
804 allocator->refcount = 1;
805 allocator->info = *info;
806 allocator->user_data = user_data;
807 allocator->notify = notify;
808 INSTALL_FALLBACK (mem_copy);
809 INSTALL_FALLBACK (mem_is_span);
810 #undef INSTALL_FALLBACK
812 GST_CAT_DEBUG (GST_CAT_MEMORY, "new allocator %p", allocator);
814 #ifndef GST_DISABLE_TRACE
815 _gst_alloc_trace_new (_gst_allocator_trace, allocator);
822 * gst_alocator_get_memory_type:
823 * @allocator: a #GstAllocator
825 * Get the memory type allocated by this allocator
827 * Returns: @allocator with increased refcount
830 gst_allocator_get_memory_type (GstAllocator * allocator)
832 g_return_val_if_fail (allocator != NULL, NULL);
834 return allocator->info.mem_type;
839 * @allocator: a #GstAllocator
841 * Increases the refcount of @allocator.
843 * Returns: @allocator with increased refcount
846 gst_allocator_ref (GstAllocator * allocator)
848 g_return_val_if_fail (allocator != NULL, NULL);
850 GST_CAT_TRACE (GST_CAT_MEMORY, "alocator %p, %d->%d", allocator,
851 allocator->refcount, allocator->refcount + 1);
853 g_atomic_int_inc (&allocator->refcount);
859 * gst_allocator_unref:
860 * @allocator: a #GstAllocator
862 * Decreases the refcount of @allocator. When the refcount reaches 0, the free
863 * function of @allocator will be called.
866 gst_allocator_unref (GstAllocator * allocator)
868 g_return_if_fail (allocator != NULL);
870 GST_CAT_TRACE (GST_CAT_MEMORY, "allocator %p, %d->%d", allocator,
871 allocator->refcount, allocator->refcount - 1);
873 if (g_atomic_int_dec_and_test (&allocator->refcount)) {
874 if (allocator->notify)
875 allocator->notify (allocator->user_data);
876 #ifndef GST_DISABLE_TRACE
877 _gst_alloc_trace_free (_gst_allocator_trace, allocator);
879 g_slice_free1 (sizeof (GstAllocator), allocator);
884 * gst_allocator_register:
885 * @name: the name of the allocator
886 * @allocator: (transfer full): #GstAllocator
888 * Registers the memory @allocator with @name. This function takes ownership of
892 gst_allocator_register (const gchar * name, GstAllocator * allocator)
894 g_return_if_fail (name != NULL);
895 g_return_if_fail (allocator != NULL);
897 GST_CAT_DEBUG (GST_CAT_MEMORY, "registering allocator %p with name \"%s\"",
900 g_rw_lock_writer_lock (&lock);
901 g_hash_table_insert (allocators, (gpointer) name, (gpointer) allocator);
902 g_rw_lock_writer_unlock (&lock);
906 * gst_allocator_find:
907 * @name: the name of the allocator
909 * Find a previously registered allocator with @name. When @name is NULL, the
910 * default allocator will be returned.
912 * Returns: (transfer full): a #GstAllocator or NULL when the allocator with @name was not
913 * registered. Use gst_allocator_unref() to release the allocator after usage.
916 gst_allocator_find (const gchar * name)
918 GstAllocator *allocator;
920 g_rw_lock_reader_lock (&lock);
922 allocator = g_hash_table_lookup (allocators, (gconstpointer) name);
924 allocator = _default_allocator;
927 gst_allocator_ref (allocator);
928 g_rw_lock_reader_unlock (&lock);
934 * gst_allocator_set_default:
935 * @allocator: (transfer full): a #GstAllocator
937 * Set the default allocator. This function takes ownership of @allocator.
940 gst_allocator_set_default (GstAllocator * allocator)
943 g_return_if_fail (allocator != NULL);
945 g_rw_lock_writer_lock (&lock);
946 old = _default_allocator;
947 _default_allocator = allocator;
948 g_rw_lock_writer_unlock (&lock);
951 gst_allocator_unref (old);
955 * gst_allocator_alloc:
956 * @allocator: (transfer none) (allow-none): a #GstAllocator to use
957 * @maxsize: allocated size of @data
958 * @align: alignment for the data
960 * Use @allocator to allocate a new memory block with memory that is at least
961 * @maxsize big and has the given alignment.
963 * When @allocator is NULL, the default allocator will be used.
965 * @align is given as a bitmask so that @align + 1 equals the amount of bytes to
966 * align to. For example, to align to 8 bytes, use an alignment of 7.
968 * Returns: (transfer full): a new #GstMemory.
971 gst_allocator_alloc (GstAllocator * allocator, gsize maxsize, gsize align)
975 g_return_val_if_fail (((align + 1) & align) == 0, NULL);
977 if (allocator == NULL)
978 allocator = _default_allocator;
980 mem = allocator->info.alloc (allocator, maxsize, align, allocator->user_data);
981 #ifndef GST_DISABLE_TRACE
982 _gst_alloc_trace_new (_gst_memory_trace, mem);