2 * Copyright (C) 2011 Wim Taymans <wim.taymans@gmail.be>
4 * gstmemory.c: memory block handling
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 * Boston, MA 02111-1307, USA.
24 * @short_description: refcounted wrapper for memory blocks
25 * @see_also: #GstBuffer
27 * GstMemory is a lightweight refcounted object that wraps a region of memory.
28 * They are typically used to manage the data of a #GstBuffer.
30 * A GstMemory object has an allocated region of memory of maxsize. The maximum
31 * size does not change during the lifetime of the memory object. The memory
32 * also has an offset and size property that specifies the valid range of memory
33 * in the allocated region.
35 * Memory is usually created by allocators with a gst_allocator_alloc()
36 * method call. When NULL is used as the allocator, the default allocator will
39 * New allocators can be registered with gst_allocator_register().
40 * Allocators are identified by name and can be retrieved with
41 * gst_allocator_find(). gst_allocator_set_default() can be used to change the
44 * New memory can be created with gst_memory_new_wrapped() that wraps the memory
45 * allocated elsewhere.
47 * Refcounting of the memory block is performed with gst_memory_ref() and
50 * The size of the memory can be retrieved and changed with
51 * gst_memory_get_sizes() and gst_memory_resize() respectively.
53 * Getting access to the data of the memory is performed with gst_memory_map().
54 * The call will return a pointer to offset bytes into the region of memory.
55 * After the memory access is completed, gst_memory_unmap() should be called.
57 * Memory can be copied with gst_memory_copy(), which will return a writable
58 * copy. gst_memory_share() will create a new memory block that shares the
59 * memory with an existing memory block at a custom offset and with a custom
62 * Memory can be efficiently merged when gst_memory_is_span() returns TRUE.
64 * Last reviewed on 2012-03-28 (0.11.3)
71 #include "gst_private.h"
72 #include "gstmemory.h"
74 #ifndef GST_DISABLE_TRACE
76 static GstAllocTrace *_gst_memory_trace;
77 static GstAllocTrace *_gst_allocator_trace;
80 G_DEFINE_BOXED_TYPE (GstMemory, gst_memory, (GBoxedCopyFunc) gst_memory_ref,
81 (GBoxedFreeFunc) gst_memory_unref);
83 G_DEFINE_BOXED_TYPE (GstAllocator, gst_allocator,
84 (GBoxedCopyFunc) gst_allocator_ref, (GBoxedFreeFunc) gst_allocator_unref);
86 G_DEFINE_BOXED_TYPE (GstAllocationParams, gst_allocation_params,
87 (GBoxedCopyFunc) gst_allocation_params_copy,
88 (GBoxedFreeFunc) gst_allocation_params_free);
90 #if defined(MEMORY_ALIGNMENT_MALLOC)
91 size_t gst_memory_alignment = 7;
92 #elif defined(MEMORY_ALIGNMENT_PAGESIZE)
93 /* we fill this in in the _init method */
94 size_t gst_memory_alignment = 0;
95 #elif defined(MEMORY_ALIGNMENT)
96 size_t gst_memory_alignment = MEMORY_ALIGNMENT - 1;
98 #error "No memory alignment configured"
99 size_t gst_memory_alignment = 0;
109 GDestroyNotify notify;
112 /* default memory implementation */
119 GDestroyNotify notify;
122 /* the default allocator */
123 static GstAllocator *_default_allocator;
125 /* our predefined allocators */
126 static GstAllocator *_default_mem_impl;
128 /* initialize the fields */
130 _default_mem_init (GstMemoryDefault * mem, GstMemoryFlags flags,
131 GstMemory * parent, gsize slice_size, gpointer data,
132 gsize maxsize, gsize offset, gsize size, gsize align,
133 gpointer user_data, GDestroyNotify notify)
135 mem->mem.allocator = _default_mem_impl;
136 mem->mem.flags = flags;
137 mem->mem.refcount = 1;
138 mem->mem.parent = parent ? gst_memory_ref (parent) : NULL;
139 mem->mem.state = (flags & GST_MEMORY_FLAG_READONLY ? 0x1 : 0);
140 mem->mem.maxsize = maxsize;
141 mem->mem.align = align;
142 mem->mem.offset = offset;
143 mem->mem.size = size;
144 mem->slice_size = slice_size;
146 mem->user_data = user_data;
147 mem->notify = notify;
149 GST_CAT_DEBUG (GST_CAT_MEMORY, "new memory %p, maxsize:%" G_GSIZE_FORMAT
150 " offset:%" G_GSIZE_FORMAT " size:%" G_GSIZE_FORMAT, mem, maxsize,
154 /* create a new memory block that manages the given memory */
155 static GstMemoryDefault *
156 _default_mem_new (GstMemoryFlags flags, GstMemory * parent, gpointer data,
157 gsize maxsize, gsize offset, gsize size, gsize align, gpointer user_data,
158 GDestroyNotify notify)
160 GstMemoryDefault *mem;
163 slice_size = sizeof (GstMemoryDefault);
165 mem = g_slice_alloc (slice_size);
166 _default_mem_init (mem, flags, parent, slice_size,
167 data, maxsize, offset, size, align, user_data, notify);
172 /* allocate the memory and structure in one block */
173 static GstMemoryDefault *
174 _default_mem_new_block (GstMemoryFlags flags, gsize maxsize, gsize align,
175 gsize offset, gsize size)
177 GstMemoryDefault *mem;
178 gsize aoffset, slice_size, padding;
181 /* ensure configured alignment */
182 align |= gst_memory_alignment;
183 /* allocate more to compensate for alignment */
185 /* alloc header and data in one block */
186 slice_size = sizeof (GstMemoryDefault) + maxsize;
188 mem = g_slice_alloc (slice_size);
192 data = (guint8 *) mem + sizeof (GstMemoryDefault);
195 if ((aoffset = ((guintptr) data & align))) {
196 aoffset = (align + 1) - aoffset;
201 if (offset && (flags & GST_MEMORY_FLAG_ZERO_PREFIXED))
202 memset (data, 0, offset);
204 padding = maxsize - (offset + size);
205 if (padding && (flags & GST_MEMORY_FLAG_ZERO_PADDED))
206 memset (data + offset + size, 0, padding);
208 _default_mem_init (mem, flags, NULL, slice_size, data, maxsize,
209 offset, size, align, NULL, NULL);
215 _default_alloc_alloc (GstAllocator * allocator, gsize size,
216 GstAllocationParams * params, gpointer user_data)
218 gsize maxsize = size + params->prefix + params->padding;
220 return (GstMemory *) _default_mem_new_block (params->flags,
221 maxsize, params->align, params->prefix, size);
225 _default_mem_map (GstMemoryDefault * mem, GstMapFlags flags)
231 _default_mem_unmap (GstMemoryDefault * mem)
237 _default_mem_free (GstMemoryDefault * mem)
239 GST_CAT_DEBUG (GST_CAT_MEMORY, "free memory %p", mem);
242 gst_memory_unref (mem->mem.parent);
245 mem->notify (mem->user_data);
247 g_slice_free1 (mem->slice_size, mem);
250 static GstMemoryDefault *
251 _default_mem_copy (GstMemoryDefault * mem, gssize offset, gsize size)
253 GstMemoryDefault *copy;
256 size = mem->mem.size > offset ? mem->mem.size - offset : 0;
259 _default_mem_new_block (0, mem->mem.maxsize, 0, mem->mem.offset + offset,
261 memcpy (copy->data, mem->data, mem->mem.maxsize);
262 GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy memory %p -> %p", mem, copy);
267 static GstMemoryDefault *
268 _default_mem_share (GstMemoryDefault * mem, gssize offset, gsize size)
270 GstMemoryDefault *sub;
273 /* find the real parent */
274 if ((parent = mem->mem.parent) == NULL)
275 parent = (GstMemory *) mem;
278 size = mem->mem.size - offset;
281 _default_mem_new (parent->flags, parent, mem->data,
282 mem->mem.maxsize, mem->mem.offset + offset, size, mem->mem.align, NULL,
289 _default_mem_is_span (GstMemoryDefault * mem1, GstMemoryDefault * mem2,
294 GstMemoryDefault *parent;
296 parent = (GstMemoryDefault *) mem1->mem.parent;
298 *offset = mem1->mem.offset - parent->mem.offset;
301 /* and memory is contiguous */
302 return mem1->data + mem1->mem.offset + mem1->mem.size ==
303 mem2->data + mem2->mem.offset;
307 _fallback_mem_copy (GstMemory * mem, gssize offset, gssize size)
310 GstMapInfo sinfo, dinfo;
311 GstAllocationParams params = { 0, 0, 0, mem->align, };
313 if (!gst_memory_map (mem, &sinfo, GST_MAP_READ))
317 size = sinfo.size > offset ? sinfo.size - offset : 0;
319 /* use the same allocator as the memory we copy */
320 copy = gst_allocator_alloc (mem->allocator, size, ¶ms);
321 if (!gst_memory_map (copy, &dinfo, GST_MAP_WRITE)) {
322 GST_CAT_WARNING (GST_CAT_MEMORY, "could not write map memory %p", copy);
323 gst_memory_unmap (mem, &sinfo);
327 memcpy (dinfo.data, sinfo.data + offset, size);
328 GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy memory %p -> %p", mem, copy);
329 gst_memory_unmap (copy, &dinfo);
330 gst_memory_unmap (mem, &sinfo);
336 _fallback_mem_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
342 static GHashTable *allocators;
345 _priv_sysmem_notify (gpointer user_data)
347 g_warning ("The default memory allocator was freed!");
351 _priv_gst_memory_initialize (void)
353 static const GstMemoryInfo _mem_info = {
354 GST_ALLOCATOR_SYSMEM,
355 (GstAllocatorAllocFunction) _default_alloc_alloc,
356 (GstMemoryMapFunction) _default_mem_map,
357 (GstMemoryUnmapFunction) _default_mem_unmap,
358 (GstMemoryFreeFunction) _default_mem_free,
359 (GstMemoryCopyFunction) _default_mem_copy,
360 (GstMemoryShareFunction) _default_mem_share,
361 (GstMemoryIsSpanFunction) _default_mem_is_span,
364 #ifndef GST_DISABLE_TRACE
365 _gst_memory_trace = _gst_alloc_trace_register ("GstMemory", -1);
366 _gst_allocator_trace = _gst_alloc_trace_register ("GstAllocator", -1);
369 g_rw_lock_init (&lock);
370 allocators = g_hash_table_new (g_str_hash, g_str_equal);
372 #ifdef HAVE_GETPAGESIZE
373 #ifdef MEMORY_ALIGNMENT_PAGESIZE
374 gst_memory_alignment = getpagesize () - 1;
378 GST_CAT_DEBUG (GST_CAT_MEMORY, "memory alignment: %" G_GSIZE_FORMAT,
379 gst_memory_alignment);
381 _default_mem_impl = gst_allocator_new (&_mem_info, NULL, _priv_sysmem_notify);
383 _default_allocator = gst_allocator_ref (_default_mem_impl);
384 gst_allocator_register (GST_ALLOCATOR_SYSMEM,
385 gst_allocator_ref (_default_mem_impl));
389 * gst_memory_new_wrapped:
390 * @flags: #GstMemoryFlags
391 * @data: data to wrap
392 * @maxsize: allocated size of @data
393 * @offset: offset in @data
394 * @size: size of valid data
395 * @user_data: user_data
396 * @notify: called with @user_data when the memory is freed
398 * Allocate a new memory block that wraps the given @data.
400 * The prefix/padding must be filled with 0 if @flags contains
401 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
403 * Returns: a new #GstMemory.
406 gst_memory_new_wrapped (GstMemoryFlags flags, gpointer data,
407 gsize maxsize, gsize offset, gsize size, gpointer user_data,
408 GDestroyNotify notify)
410 GstMemoryDefault *mem;
412 g_return_val_if_fail (data != NULL, NULL);
413 g_return_val_if_fail (offset + size <= maxsize, NULL);
416 _default_mem_new (flags, NULL, data, maxsize, offset, size, 0, user_data,
419 #ifndef GST_DISABLE_TRACE
420 _gst_alloc_trace_new (_gst_memory_trace, mem);
423 return (GstMemory *) mem;
430 * Increases the refcount of @mem.
432 * Returns: @mem with increased refcount
435 gst_memory_ref (GstMemory * mem)
437 g_return_val_if_fail (mem != NULL, NULL);
439 GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
442 g_atomic_int_inc (&mem->refcount);
451 * Decreases the refcount of @mem. When the refcount reaches 0, the free
452 * function of @mem will be called.
455 gst_memory_unref (GstMemory * mem)
457 g_return_if_fail (mem != NULL);
458 g_return_if_fail (mem->allocator != NULL);
460 GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
463 if (g_atomic_int_dec_and_test (&mem->refcount)) {
464 /* there should be no outstanding mappings */
465 g_return_if_fail (g_atomic_int_get (&mem->state) < 4);
466 #ifndef GST_DISABLE_TRACE
467 _gst_alloc_trace_free (_gst_memory_trace, mem);
469 mem->allocator->info.mem_free (mem);
474 * gst_memory_is_exclusive:
477 * Check if the current ref to @mem is exclusive, this means that no other
478 * references exist other than @mem.
481 gst_memory_is_exclusive (GstMemory * mem)
483 g_return_val_if_fail (mem != NULL, FALSE);
485 return (g_atomic_int_get (&mem->refcount) == 1);
489 * gst_memory_get_sizes:
491 * @offset: pointer to offset
492 * @maxsize: pointer to maxsize
494 * Get the current @size, @offset and @maxsize of @mem.
496 * Returns: the current sizes of @mem
499 gst_memory_get_sizes (GstMemory * mem, gsize * offset, gsize * maxsize)
501 g_return_val_if_fail (mem != NULL, 0);
504 *offset = mem->offset;
506 *maxsize = mem->maxsize;
514 * @offset: a new offset
517 * Resize the memory region. @mem should be writable and offset + size should be
518 * less than the maxsize of @mem.
520 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED will be
521 * cleared when offset or padding is increased respectively.
524 gst_memory_resize (GstMemory * mem, gssize offset, gsize size)
526 g_return_if_fail (mem != NULL);
527 g_return_if_fail (offset >= 0 || mem->offset >= -offset);
528 g_return_if_fail (size + mem->offset + offset <= mem->maxsize);
530 /* if we increase the prefix, we can't guarantee it is still 0 filled */
531 if ((offset > 0) && GST_MEMORY_IS_ZERO_PREFIXED (mem))
532 GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PREFIXED);
534 /* if we increase the padding, we can't guarantee it is still 0 filled */
535 if ((offset + size < mem->size) && GST_MEMORY_IS_ZERO_PADDED (mem))
536 GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PADDED);
538 mem->offset += offset;
543 gst_memory_lock (GstMemory * mem, GstMapFlags flags)
545 gint access_mode, state, newstate;
547 access_mode = flags & 3;
550 state = g_atomic_int_get (&mem->state);
552 /* nothing mapped, set access_mode and refcount */
553 newstate = 4 | access_mode;
555 /* access_mode must match */
556 if ((state & access_mode) != access_mode)
558 /* increase refcount */
559 newstate = state + 4;
561 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
567 GST_CAT_DEBUG (GST_CAT_MEMORY, "lock failed %p: state %d, access_mode %d",
568 mem, state, access_mode);
574 gst_memory_unlock (GstMemory * mem)
576 gint state, newstate;
579 state = g_atomic_int_get (&mem->state);
580 /* decrease the refcount */
581 newstate = state - 4;
582 /* last refcount, unset access_mode */
585 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
590 * gst_memory_make_mapped:
591 * @mem: (transfer full): a #GstMemory
592 * @info: (out): pointer for info
593 * @flags: mapping flags
595 * Create a #GstMemory object that is mapped with @flags. If @mem is mappable
596 * with @flags, this function returns the mapped @mem directly. Otherwise a
597 * mapped copy of @mem is returned.
599 * This function takes ownership of old @mem and returns a reference to a new
602 * Returns: (transfer full): a #GstMemory object mapped with @flags or NULL when
603 * a mapping is not possible.
606 gst_memory_make_mapped (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
610 if (gst_memory_map (mem, info, flags)) {
613 result = gst_memory_copy (mem, 0, -1);
614 gst_memory_unref (mem);
619 if (!gst_memory_map (result, info, flags))
627 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot copy memory %p", mem);
632 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot map memory %p with flags %d", mem,
634 gst_memory_unref (result);
642 * @info: (out): pointer for info
643 * @flags: mapping flags
645 * Fill @info with the pointer and sizes of the memory in @mem that can be
646 * accessed according to @flags.
648 * This function can return %FALSE for various reasons:
649 * - the memory backed by @mem is not accessible with the given @flags.
650 * - the memory was already mapped with a different mapping.
652 * @info and its contents remain valid for as long as @mem is valid and
653 * until gst_memory_unmap() is called.
655 * For each gst_memory_map() call, a corresponding gst_memory_unmap() call
658 * Returns: %TRUE if the map operation was successful.
661 gst_memory_map (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
663 g_return_val_if_fail (mem != NULL, FALSE);
664 g_return_val_if_fail (info != NULL, FALSE);
666 if (!gst_memory_lock (mem, flags))
669 info->data = mem->allocator->info.mem_map (mem, mem->maxsize, flags);
671 if (G_UNLIKELY (info->data == NULL))
676 info->size = mem->size;
677 info->maxsize = mem->maxsize - mem->offset;
678 info->data = info->data + mem->offset;
685 GST_CAT_DEBUG (GST_CAT_MEMORY, "mem %p: lock %d failed", mem, flags);
690 /* something went wrong, restore the orginal state again */
691 GST_CAT_ERROR (GST_CAT_MEMORY, "mem %p: map failed", mem);
692 gst_memory_unlock (mem);
700 * @info: a #GstMapInfo
702 * Release the memory obtained with gst_memory_map()
705 gst_memory_unmap (GstMemory * mem, GstMapInfo * info)
707 g_return_if_fail (mem != NULL);
708 g_return_if_fail (info != NULL);
709 g_return_if_fail (info->memory == mem);
710 /* there must be a ref */
711 g_return_if_fail (g_atomic_int_get (&mem->state) >= 4);
713 mem->allocator->info.mem_unmap (mem);
714 gst_memory_unlock (mem);
720 * @offset: an offset to copy
721 * @size: size to copy or -1 to copy all bytes from offset
723 * Return a copy of @size bytes from @mem starting from @offset. This copy is
724 * guaranteed to be writable. @size can be set to -1 to return a copy all bytes
727 * Returns: a new #GstMemory.
730 gst_memory_copy (GstMemory * mem, gssize offset, gssize size)
734 g_return_val_if_fail (mem != NULL, NULL);
736 copy = mem->allocator->info.mem_copy (mem, offset, size);
738 #ifndef GST_DISABLE_TRACE
739 _gst_alloc_trace_new (_gst_memory_trace, copy);
748 * @offset: an offset to share
749 * @size: size to share or -1 to share bytes from offset
751 * Return a shared copy of @size bytes from @mem starting from @offset. No
752 * memory copy is performed and the memory region is simply shared. The result
753 * is guaranteed to be not-writable. @size can be set to -1 to return a share
754 * all bytes from @offset.
756 * Returns: a new #GstMemory.
759 gst_memory_share (GstMemory * mem, gssize offset, gssize size)
763 g_return_val_if_fail (mem != NULL, NULL);
764 g_return_val_if_fail (!GST_MEMORY_FLAG_IS_SET (mem, GST_MEMORY_FLAG_NO_SHARE),
767 shared = mem->allocator->info.mem_share (mem, offset, size);
769 #ifndef GST_DISABLE_TRACE
770 _gst_alloc_trace_new (_gst_memory_trace, shared);
777 * gst_memory_is_span:
778 * @mem1: a #GstMemory
779 * @mem2: a #GstMemory
780 * @offset: a pointer to a result offset
782 * Check if @mem1 and mem2 share the memory with a common parent memory object
783 * and that the memory is contiguous.
785 * If this is the case, the memory of @mem1 and @mem2 can be merged
786 * efficiently by performing gst_memory_share() on the parent object from
787 * the returned @offset.
789 * Returns: %TRUE if the memory is contiguous and of a common parent.
792 gst_memory_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
794 g_return_val_if_fail (mem1 != NULL, FALSE);
795 g_return_val_if_fail (mem2 != NULL, FALSE);
797 /* need to have the same allocators */
798 if (mem1->allocator != mem2->allocator)
801 /* need to have the same parent */
802 if (mem1->parent == NULL || mem1->parent != mem2->parent)
805 /* and memory is contiguous */
806 if (!mem1->allocator->info.mem_is_span (mem1, mem2, offset))
814 * @info: a #GstMemoryInfo
815 * @user_data: user data
816 * @notify: a #GDestroyNotify for @user_data
818 * Create a new memory allocator with @info and @user_data.
820 * All functions in @info are mandatory exept the copy and is_span
821 * functions, which will have a default implementation when left NULL.
823 * The @user_data will be passed to all calls of the alloc function. @notify
824 * will be called with @user_data when the allocator is freed.
826 * Returns: a new #GstAllocator.
829 gst_allocator_new (const GstMemoryInfo * info, gpointer user_data,
830 GDestroyNotify notify)
832 GstAllocator *allocator;
834 #define INSTALL_FALLBACK(_t) \
835 if (allocator->info._t == NULL) allocator->info._t = _fallback_ ##_t;
837 g_return_val_if_fail (info != NULL, NULL);
838 g_return_val_if_fail (info->alloc != NULL, NULL);
839 g_return_val_if_fail (info->mem_map != NULL, NULL);
840 g_return_val_if_fail (info->mem_unmap != NULL, NULL);
841 g_return_val_if_fail (info->mem_free != NULL, NULL);
842 g_return_val_if_fail (info->mem_share != NULL, NULL);
844 allocator = g_slice_new (GstAllocator);
845 allocator->refcount = 1;
846 allocator->info = *info;
847 allocator->user_data = user_data;
848 allocator->notify = notify;
849 INSTALL_FALLBACK (mem_copy);
850 INSTALL_FALLBACK (mem_is_span);
851 #undef INSTALL_FALLBACK
853 GST_CAT_DEBUG (GST_CAT_MEMORY, "new allocator %p", allocator);
855 #ifndef GST_DISABLE_TRACE
856 _gst_alloc_trace_new (_gst_allocator_trace, allocator);
863 * gst_allocator_get_memory_type:
864 * @allocator: a #GstAllocator
866 * Get the memory type allocated by this allocator
868 * Returns: the memory type provided by @allocator
871 gst_allocator_get_memory_type (GstAllocator * allocator)
873 g_return_val_if_fail (allocator != NULL, NULL);
875 return allocator->info.mem_type;
880 * @allocator: a #GstAllocator
882 * Increases the refcount of @allocator.
884 * Returns: @allocator with increased refcount
887 gst_allocator_ref (GstAllocator * allocator)
889 g_return_val_if_fail (allocator != NULL, NULL);
891 GST_CAT_TRACE (GST_CAT_MEMORY, "allocator %p, %d->%d", allocator,
892 allocator->refcount, allocator->refcount + 1);
894 g_atomic_int_inc (&allocator->refcount);
900 * gst_allocator_unref:
901 * @allocator: a #GstAllocator
903 * Decreases the refcount of @allocator. When the refcount reaches 0, the notify
904 * function of @allocator will be called and the allocator will be freed.
907 gst_allocator_unref (GstAllocator * allocator)
909 g_return_if_fail (allocator != NULL);
911 GST_CAT_TRACE (GST_CAT_MEMORY, "allocator %p, %d->%d", allocator,
912 allocator->refcount, allocator->refcount - 1);
914 if (g_atomic_int_dec_and_test (&allocator->refcount)) {
915 if (allocator->notify)
916 allocator->notify (allocator->user_data);
917 #ifndef GST_DISABLE_TRACE
918 _gst_alloc_trace_free (_gst_allocator_trace, allocator);
920 g_slice_free1 (sizeof (GstAllocator), allocator);
925 * gst_allocator_register:
926 * @name: the name of the allocator
927 * @allocator: (transfer full): #GstAllocator
929 * Registers the memory @allocator with @name. This function takes ownership of
933 gst_allocator_register (const gchar * name, GstAllocator * allocator)
935 g_return_if_fail (name != NULL);
936 g_return_if_fail (allocator != NULL);
938 GST_CAT_DEBUG (GST_CAT_MEMORY, "registering allocator %p with name \"%s\"",
941 g_rw_lock_writer_lock (&lock);
942 g_hash_table_insert (allocators, (gpointer) name, (gpointer) allocator);
943 g_rw_lock_writer_unlock (&lock);
947 * gst_allocator_find:
948 * @name: the name of the allocator
950 * Find a previously registered allocator with @name. When @name is NULL, the
951 * default allocator will be returned.
953 * Returns: (transfer full): a #GstAllocator or NULL when the allocator with @name was not
954 * registered. Use gst_allocator_unref() to release the allocator after usage.
957 gst_allocator_find (const gchar * name)
959 GstAllocator *allocator;
961 g_rw_lock_reader_lock (&lock);
963 allocator = g_hash_table_lookup (allocators, (gconstpointer) name);
965 allocator = _default_allocator;
968 gst_allocator_ref (allocator);
969 g_rw_lock_reader_unlock (&lock);
975 * gst_allocator_set_default:
976 * @allocator: (transfer full): a #GstAllocator
978 * Set the default allocator. This function takes ownership of @allocator.
981 gst_allocator_set_default (GstAllocator * allocator)
984 g_return_if_fail (allocator != NULL);
986 g_rw_lock_writer_lock (&lock);
987 old = _default_allocator;
988 _default_allocator = allocator;
989 g_rw_lock_writer_unlock (&lock);
992 gst_allocator_unref (old);
996 * gst_allocation_params_init:
997 * @params: a #GstAllocationParams
999 * Initialize @params to its default values
1002 gst_allocation_params_init (GstAllocationParams * params)
1004 g_return_if_fail (params != NULL);
1006 memset (params, 0, sizeof (GstAllocationParams));
1010 * gst_allocation_params_copy:
1011 * @params: (transfer none): a #GstAllocationParams
1013 * Create a copy of @params.
1015 * Free-function: gst_allocation_params_free
1017 * Returns: (transfer full): a new ##GstAllocationParams, free with
1018 * gst_allocation_params_free().
1020 GstAllocationParams *
1021 gst_allocation_params_copy (const GstAllocationParams * params)
1023 GstAllocationParams *result = NULL;
1027 (GstAllocationParams *) g_slice_copy (sizeof (GstAllocationParams),
1034 * gst_allocation_params_free:
1035 * @params: (in) (transfer full): a #GstAllocationParams
1040 gst_allocation_params_free (GstAllocationParams * params)
1042 g_slice_free (GstAllocationParams, params);
1046 * gst_allocator_alloc:
1047 * @allocator: (transfer none) (allow-none): a #GstAllocator to use
1048 * @size: size of the visible memory area
1049 * @params: (transfer none) (allow-none): optional parameters
1051 * Use @allocator to allocate a new memory block with memory that is at least
1054 * The optional @params can specify the prefix and padding for the memory. If
1055 * NULL is passed, no flags, no extra prefix/padding and a default alignment is
1058 * The prefix/padding will be filled with 0 if flags contains
1059 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
1061 * When @allocator is NULL, the default allocator will be used.
1063 * The alignment in @params is given as a bitmask so that @align + 1 equals
1064 * the amount of bytes to align to. For example, to align to 8 bytes,
1065 * use an alignment of 7.
1067 * Returns: (transfer full): a new #GstMemory.
1070 gst_allocator_alloc (GstAllocator * allocator, gsize size,
1071 GstAllocationParams * params)
1074 static GstAllocationParams defparams = { 0, 0, 0, 0, };
1077 g_return_val_if_fail (((params->align + 1) & params->align) == 0, NULL);
1079 params = &defparams;
1082 if (allocator == NULL)
1083 allocator = _default_allocator;
1085 mem = allocator->info.alloc (allocator, size, params, allocator->user_data);
1087 #ifndef GST_DISABLE_TRACE
1088 _gst_alloc_trace_new (_gst_memory_trace, mem);