2 * Copyright (C) 2011 Wim Taymans <wim.taymans@gmail.be>
4 * gstmemory.c: memory block handling
6 * This library is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Library General Public
8 * License as published by the Free Software Foundation; either
9 * version 2 of the License, or (at your option) any later version.
11 * This library is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Library General Public License for more details.
16 * You should have received a copy of the GNU Library General Public
17 * License along with this library; if not, write to the
18 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
19 * Boston, MA 02111-1307, USA.
24 * @short_description: refcounted wrapper for memory blocks
25 * @see_also: #GstBuffer
27 * GstMemory is a lightweight refcounted object that wraps a region of memory.
28 * They are typically used to manage the data of a #GstBuffer.
30 * A GstMemory object has an allocated region of memory of maxsize. The maximum
31 * size does not change during the lifetime of the memory object. The memory
32 * also has an offset and size property that specifies the valid range of memory
33 * in the allocated region.
35 * Memory is usually created by allocators with a gst_allocator_alloc()
36 * method call. When NULL is used as the allocator, the default allocator will
39 * New allocators can be registered with gst_allocator_register().
40 * Allocators are identified by name and can be retrieved with
41 * gst_allocator_find().
43 * New memory can be created with gst_memory_new_wrapped() that wraps the memory
44 * allocated elsewhere.
46 * Refcounting of the memory block is performed with gst_memory_ref() and
49 * The size of the memory can be retrieved and changed with
50 * gst_memory_get_sizes() and gst_memory_resize() respectively.
52 * Getting access to the data of the memory is performed with gst_memory_map().
53 * The call will return a pointer to offset bytes into the region of memory.
54 * After the memory access is completed, gst_memory_unmap() should be called.
56 * Memory can be copied with gst_memory_copy(), which will return a writable
57 * copy. gst_memory_share() will create a new memory block that shares the
58 * memory with an existing memory block at a custom offset and with a custom
61 * Memory can be efficiently merged when gst_memory_is_span() returns TRUE.
63 * Last reviewed on 2011-06-08 (0.11.0)
70 #include "gst_private.h"
71 #include "gstmemory.h"
73 #ifndef GST_DISABLE_TRACE
75 static GstAllocTrace *_gst_memory_trace;
76 static GstAllocTrace *_gst_allocator_trace;
79 G_DEFINE_BOXED_TYPE (GstMemory, gst_memory, (GBoxedCopyFunc) gst_memory_ref,
80 (GBoxedFreeFunc) gst_memory_unref);
82 G_DEFINE_BOXED_TYPE (GstAllocator, gst_allocator,
83 (GBoxedCopyFunc) gst_allocator_ref, (GBoxedFreeFunc) gst_allocator_unref);
85 G_DEFINE_BOXED_TYPE (GstAllocationParams, gst_allocation_params,
86 (GBoxedCopyFunc) gst_allocation_params_copy,
87 (GBoxedFreeFunc) gst_allocation_params_free);
90 * gst_memory_alignment:
92 * The default memory alignment in bytes - 1
93 * an alignment of 7 would be the same as what malloc() guarantees.
95 #if defined(MEMORY_ALIGNMENT_MALLOC)
96 size_t gst_memory_alignment = 7;
97 #elif defined(MEMORY_ALIGNMENT_PAGESIZE)
98 /* we fill this in in the _init method */
99 size_t gst_memory_alignment = 0;
100 #elif defined(MEMORY_ALIGNMENT)
101 size_t gst_memory_alignment = MEMORY_ALIGNMENT - 1;
103 #error "No memory alignment configured"
104 size_t gst_memory_alignment = 0;
114 GDestroyNotify notify;
117 /* default memory implementation */
124 GDestroyNotify notify;
127 /* the default allocator */
128 static GstAllocator *_default_allocator;
130 /* our predefined allocators */
131 static GstAllocator *_default_mem_impl;
133 /* initialize the fields */
135 _default_mem_init (GstMemoryDefault * mem, GstMemoryFlags flags,
136 GstMemory * parent, gsize slice_size, gpointer data,
137 gsize maxsize, gsize offset, gsize size, gsize align,
138 gpointer user_data, GDestroyNotify notify)
140 mem->mem.allocator = _default_mem_impl;
141 mem->mem.flags = flags;
142 mem->mem.refcount = 1;
143 mem->mem.parent = parent ? gst_memory_ref (parent) : NULL;
144 mem->mem.state = (flags & GST_MEMORY_FLAG_READONLY ? 0x1 : 0);
145 mem->mem.maxsize = maxsize;
146 mem->mem.align = align;
147 mem->mem.offset = offset;
148 mem->mem.size = size;
149 mem->slice_size = slice_size;
151 mem->user_data = user_data;
152 mem->notify = notify;
154 GST_CAT_DEBUG (GST_CAT_MEMORY, "new memory %p, maxsize:%" G_GSIZE_FORMAT
155 " offset:%" G_GSIZE_FORMAT " size:%" G_GSIZE_FORMAT, mem, maxsize,
159 /* create a new memory block that manages the given memory */
160 static GstMemoryDefault *
161 _default_mem_new (GstMemoryFlags flags, GstMemory * parent, gpointer data,
162 gsize maxsize, gsize offset, gsize size, gsize align, gpointer user_data,
163 GDestroyNotify notify)
165 GstMemoryDefault *mem;
168 slice_size = sizeof (GstMemoryDefault);
170 mem = g_slice_alloc (slice_size);
171 _default_mem_init (mem, flags, parent, slice_size,
172 data, maxsize, offset, size, align, user_data, notify);
177 /* allocate the memory and structure in one block */
178 static GstMemoryDefault *
179 _default_mem_new_block (GstMemoryFlags flags, gsize maxsize, gsize align,
180 gsize offset, gsize size)
182 GstMemoryDefault *mem;
183 gsize aoffset, slice_size, padding;
186 /* ensure configured alignment */
187 align |= gst_memory_alignment;
188 /* allocate more to compensate for alignment */
190 /* alloc header and data in one block */
191 slice_size = sizeof (GstMemoryDefault) + maxsize;
193 mem = g_slice_alloc (slice_size);
197 data = (guint8 *) mem + sizeof (GstMemoryDefault);
200 if ((aoffset = ((guintptr) data & align))) {
201 aoffset = (align + 1) - aoffset;
206 if (offset && (flags & GST_MEMORY_FLAG_ZERO_PREFIXED))
207 memset (data, 0, offset);
209 padding = maxsize - (offset + size);
210 if (padding && (flags & GST_MEMORY_FLAG_ZERO_PADDED))
211 memset (data + offset + size, 0, padding);
213 _default_mem_init (mem, flags, NULL, slice_size, data, maxsize,
214 offset, size, align, NULL, NULL);
220 _default_alloc_alloc (GstAllocator * allocator, gsize size,
221 GstAllocationParams * params, gpointer user_data)
223 gsize maxsize = size + params->prefix + params->padding;
225 return (GstMemory *) _default_mem_new_block (params->flags,
226 maxsize, params->align, params->prefix, size);
230 _default_mem_map (GstMemoryDefault * mem, GstMapFlags flags)
236 _default_mem_unmap (GstMemoryDefault * mem)
242 _default_mem_free (GstMemoryDefault * mem)
244 GST_CAT_DEBUG (GST_CAT_MEMORY, "free memory %p", mem);
247 gst_memory_unref (mem->mem.parent);
250 mem->notify (mem->user_data);
252 g_slice_free1 (mem->slice_size, mem);
255 static GstMemoryDefault *
256 _default_mem_copy (GstMemoryDefault * mem, gssize offset, gsize size)
258 GstMemoryDefault *copy;
261 size = mem->mem.size > offset ? mem->mem.size - offset : 0;
264 _default_mem_new_block (0, mem->mem.maxsize, 0, mem->mem.offset + offset,
266 memcpy (copy->data, mem->data, mem->mem.maxsize);
267 GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy memory %p -> %p", mem, copy);
272 static GstMemoryDefault *
273 _default_mem_share (GstMemoryDefault * mem, gssize offset, gsize size)
275 GstMemoryDefault *sub;
278 /* find the real parent */
279 if ((parent = mem->mem.parent) == NULL)
280 parent = (GstMemory *) mem;
283 size = mem->mem.size - offset;
286 _default_mem_new (parent->flags, parent, mem->data,
287 mem->mem.maxsize, mem->mem.offset + offset, size, mem->mem.align, NULL,
294 _default_mem_is_span (GstMemoryDefault * mem1, GstMemoryDefault * mem2,
299 GstMemoryDefault *parent;
301 parent = (GstMemoryDefault *) mem1->mem.parent;
303 *offset = mem1->mem.offset - parent->mem.offset;
306 /* and memory is contiguous */
307 return mem1->data + mem1->mem.offset + mem1->mem.size ==
308 mem2->data + mem2->mem.offset;
312 _fallback_mem_copy (GstMemory * mem, gssize offset, gssize size)
315 GstMapInfo sinfo, dinfo;
316 GstAllocationParams params = { 0, 0, 0, mem->align, };
318 if (!gst_memory_map (mem, &sinfo, GST_MAP_READ))
322 size = sinfo.size > offset ? sinfo.size - offset : 0;
324 /* use the same allocator as the memory we copy */
325 copy = gst_allocator_alloc (mem->allocator, size, ¶ms);
326 if (!gst_memory_map (copy, &dinfo, GST_MAP_WRITE)) {
327 GST_CAT_WARNING (GST_CAT_MEMORY, "could not write map memory %p", copy);
328 gst_memory_unmap (mem, &sinfo);
332 memcpy (dinfo.data, sinfo.data + offset, size);
333 GST_CAT_DEBUG (GST_CAT_PERFORMANCE, "copy memory %p -> %p", mem, copy);
334 gst_memory_unmap (copy, &dinfo);
335 gst_memory_unmap (mem, &sinfo);
341 _fallback_mem_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
347 static GHashTable *allocators;
350 _priv_sysmem_notify (gpointer user_data)
352 g_warning ("The default memory allocator was freed!");
356 _priv_gst_memory_initialize (void)
358 static const GstMemoryInfo _mem_info = {
359 GST_ALLOCATOR_SYSMEM,
360 (GstAllocatorAllocFunction) _default_alloc_alloc,
361 (GstMemoryMapFunction) _default_mem_map,
362 (GstMemoryUnmapFunction) _default_mem_unmap,
363 (GstMemoryFreeFunction) _default_mem_free,
364 (GstMemoryCopyFunction) _default_mem_copy,
365 (GstMemoryShareFunction) _default_mem_share,
366 (GstMemoryIsSpanFunction) _default_mem_is_span,
369 #ifndef GST_DISABLE_TRACE
370 _gst_memory_trace = _gst_alloc_trace_register ("GstMemory", -1);
371 _gst_allocator_trace = _gst_alloc_trace_register ("GstAllocator", -1);
374 g_rw_lock_init (&lock);
375 allocators = g_hash_table_new (g_str_hash, g_str_equal);
377 #ifdef HAVE_GETPAGESIZE
378 #ifdef MEMORY_ALIGNMENT_PAGESIZE
379 gst_memory_alignment = getpagesize () - 1;
383 GST_CAT_DEBUG (GST_CAT_MEMORY, "memory alignment: %" G_GSIZE_FORMAT,
384 gst_memory_alignment);
386 _default_mem_impl = gst_allocator_new (&_mem_info, NULL, _priv_sysmem_notify);
388 _default_allocator = gst_allocator_ref (_default_mem_impl);
389 gst_allocator_register (GST_ALLOCATOR_SYSMEM,
390 gst_allocator_ref (_default_mem_impl));
394 * gst_memory_new_wrapped:
395 * @flags: #GstMemoryFlags
396 * @data: data to wrap
397 * @maxsize: allocated size of @data
398 * @offset: offset in @data
399 * @size: size of valid data
400 * @user_data: user_data
401 * @notify: called with @user_data when the memory is freed
403 * Allocate a new memory block that wraps the given @data.
405 * The prefix/padding must be filled with 0 if @flags contains
406 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
408 * Returns: a new #GstMemory.
411 gst_memory_new_wrapped (GstMemoryFlags flags, gpointer data,
412 gsize maxsize, gsize offset, gsize size, gpointer user_data,
413 GDestroyNotify notify)
415 GstMemoryDefault *mem;
417 g_return_val_if_fail (data != NULL, NULL);
418 g_return_val_if_fail (offset + size <= maxsize, NULL);
421 _default_mem_new (flags, NULL, data, maxsize, offset, size, 0, user_data,
424 #ifndef GST_DISABLE_TRACE
425 _gst_alloc_trace_new (_gst_memory_trace, mem);
428 return (GstMemory *) mem;
435 * Increases the refcount of @mem.
437 * Returns: @mem with increased refcount
440 gst_memory_ref (GstMemory * mem)
442 g_return_val_if_fail (mem != NULL, NULL);
444 GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
447 g_atomic_int_inc (&mem->refcount);
456 * Decreases the refcount of @mem. When the refcount reaches 0, the free
457 * function of @mem will be called.
460 gst_memory_unref (GstMemory * mem)
462 g_return_if_fail (mem != NULL);
463 g_return_if_fail (mem->allocator != NULL);
465 GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
468 if (g_atomic_int_dec_and_test (&mem->refcount)) {
469 /* there should be no outstanding mappings */
470 g_return_if_fail (g_atomic_int_get (&mem->state) < 4);
471 #ifndef GST_DISABLE_TRACE
472 _gst_alloc_trace_free (_gst_memory_trace, mem);
474 mem->allocator->info.mem_free (mem);
479 * gst_memory_is_exclusive:
482 * Check if the current ref to @mem is exclusive, this means that no other
483 * references exist other than @mem.
486 gst_memory_is_exclusive (GstMemory * mem)
488 g_return_val_if_fail (mem != NULL, FALSE);
490 return (g_atomic_int_get (&mem->refcount) == 1);
494 * gst_memory_get_sizes:
496 * @offset: pointer to offset
497 * @maxsize: pointer to maxsize
499 * Get the current @size, @offset and @maxsize of @mem.
501 * Returns: the current sizes of @mem
504 gst_memory_get_sizes (GstMemory * mem, gsize * offset, gsize * maxsize)
506 g_return_val_if_fail (mem != NULL, 0);
509 *offset = mem->offset;
511 *maxsize = mem->maxsize;
519 * @offset: a new offset
522 * Resize the memory region. @mem should be writable and offset + size should be
523 * less than the maxsize of @mem.
525 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED will be
526 * cleared when offset or padding is increased respectively.
529 gst_memory_resize (GstMemory * mem, gssize offset, gsize size)
531 g_return_if_fail (mem != NULL);
532 g_return_if_fail (offset >= 0 || mem->offset >= -offset);
533 g_return_if_fail (size + mem->offset + offset <= mem->maxsize);
535 /* if we increase the prefix, we can't guarantee it is still 0 filled */
536 if ((offset > 0) && GST_MEMORY_IS_ZERO_PREFIXED (mem))
537 GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PREFIXED);
539 /* if we increase the padding, we can't guarantee it is still 0 filled */
540 if ((offset + size < mem->size) && GST_MEMORY_IS_ZERO_PADDED (mem))
541 GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PADDED);
543 mem->offset += offset;
548 gst_memory_lock (GstMemory * mem, GstMapFlags flags)
550 gint access_mode, state, newstate;
552 access_mode = flags & 3;
555 state = g_atomic_int_get (&mem->state);
557 /* nothing mapped, set access_mode and refcount */
558 newstate = 4 | access_mode;
560 /* access_mode must match */
561 if ((state & access_mode) != access_mode)
563 /* increase refcount */
564 newstate = state + 4;
566 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
572 GST_CAT_DEBUG (GST_CAT_MEMORY, "lock failed %p: state %d, access_mode %d",
573 mem, state, access_mode);
579 gst_memory_unlock (GstMemory * mem)
581 gint state, newstate;
584 state = g_atomic_int_get (&mem->state);
585 /* decrease the refcount */
586 newstate = state - 4;
587 /* last refcount, unset access_mode */
590 } while (!g_atomic_int_compare_and_exchange (&mem->state, state, newstate));
595 * gst_memory_make_mapped:
596 * @mem: (transfer full): a #GstMemory
597 * @info: (out): pointer for info
598 * @flags: mapping flags
600 * Create a #GstMemory object that is mapped with @flags. If @mem is mappable
601 * with @flags, this function returns the mapped @mem directly. Otherwise a
602 * mapped copy of @mem is returned.
604 * This function takes ownership of old @mem and returns a reference to a new
607 * Returns: (transfer full): a #GstMemory object mapped with @flags or NULL when
608 * a mapping is not possible.
611 gst_memory_make_mapped (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
615 if (gst_memory_map (mem, info, flags)) {
618 result = gst_memory_copy (mem, 0, -1);
619 gst_memory_unref (mem);
624 if (!gst_memory_map (result, info, flags))
632 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot copy memory %p", mem);
637 GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot map memory %p with flags %d", mem,
639 gst_memory_unref (result);
647 * @info: (out): pointer for info
648 * @flags: mapping flags
650 * Fill @info with the pointer and sizes of the memory in @mem that can be
651 * accessed according to @flags.
653 * This function can return %FALSE for various reasons:
654 * - the memory backed by @mem is not accessible with the given @flags.
655 * - the memory was already mapped with a different mapping.
657 * @info and its contents remain valid for as long as @mem is valid and
658 * until gst_memory_unmap() is called.
660 * For each gst_memory_map() call, a corresponding gst_memory_unmap() call
663 * Returns: %TRUE if the map operation was successful.
666 gst_memory_map (GstMemory * mem, GstMapInfo * info, GstMapFlags flags)
668 g_return_val_if_fail (mem != NULL, FALSE);
669 g_return_val_if_fail (info != NULL, FALSE);
671 if (!gst_memory_lock (mem, flags))
674 info->data = mem->allocator->info.mem_map (mem, mem->maxsize, flags);
676 if (G_UNLIKELY (info->data == NULL))
681 info->size = mem->size;
682 info->maxsize = mem->maxsize - mem->offset;
683 info->data = info->data + mem->offset;
690 GST_CAT_DEBUG (GST_CAT_MEMORY, "mem %p: lock %d failed", mem, flags);
695 /* something went wrong, restore the orginal state again */
696 GST_CAT_ERROR (GST_CAT_MEMORY, "mem %p: map failed", mem);
697 gst_memory_unlock (mem);
705 * @info: a #GstMapInfo
707 * Release the memory obtained with gst_memory_map()
710 gst_memory_unmap (GstMemory * mem, GstMapInfo * info)
712 g_return_if_fail (mem != NULL);
713 g_return_if_fail (info != NULL);
714 g_return_if_fail (info->memory == mem);
715 /* there must be a ref */
716 g_return_if_fail (g_atomic_int_get (&mem->state) >= 4);
718 mem->allocator->info.mem_unmap (mem);
719 gst_memory_unlock (mem);
725 * @offset: an offset to copy
726 * @size: size to copy or -1 to copy all bytes from offset
728 * Return a copy of @size bytes from @mem starting from @offset. This copy is
729 * guaranteed to be writable. @size can be set to -1 to return a copy all bytes
732 * Returns: a new #GstMemory.
735 gst_memory_copy (GstMemory * mem, gssize offset, gssize size)
739 g_return_val_if_fail (mem != NULL, NULL);
741 copy = mem->allocator->info.mem_copy (mem, offset, size);
743 #ifndef GST_DISABLE_TRACE
744 _gst_alloc_trace_new (_gst_memory_trace, copy);
753 * @offset: an offset to share
754 * @size: size to share or -1 to share bytes from offset
756 * Return a shared copy of @size bytes from @mem starting from @offset. No
757 * memory copy is performed and the memory region is simply shared. The result
758 * is guaranteed to be not-writable. @size can be set to -1 to return a share
759 * all bytes from @offset.
761 * Returns: a new #GstMemory.
764 gst_memory_share (GstMemory * mem, gssize offset, gssize size)
768 g_return_val_if_fail (mem != NULL, NULL);
769 g_return_val_if_fail (!GST_MEMORY_FLAG_IS_SET (mem, GST_MEMORY_FLAG_NO_SHARE),
772 shared = mem->allocator->info.mem_share (mem, offset, size);
774 #ifndef GST_DISABLE_TRACE
775 _gst_alloc_trace_new (_gst_memory_trace, shared);
782 * gst_memory_is_span:
783 * @mem1: a #GstMemory
784 * @mem2: a #GstMemory
785 * @offset: a pointer to a result offset
787 * Check if @mem1 and mem2 share the memory with a common parent memory object
788 * and that the memory is contiguous.
790 * If this is the case, the memory of @mem1 and @mem2 can be merged
791 * efficiently by performing gst_memory_share() on the parent object from
792 * the returned @offset.
794 * Returns: %TRUE if the memory is contiguous and of a common parent.
797 gst_memory_is_span (GstMemory * mem1, GstMemory * mem2, gsize * offset)
799 g_return_val_if_fail (mem1 != NULL, FALSE);
800 g_return_val_if_fail (mem2 != NULL, FALSE);
802 /* need to have the same allocators */
803 if (mem1->allocator != mem2->allocator)
806 /* need to have the same parent */
807 if (mem1->parent == NULL || mem1->parent != mem2->parent)
810 /* and memory is contiguous */
811 if (!mem1->allocator->info.mem_is_span (mem1, mem2, offset))
818 * gst_allocator_register:
819 * @info: a #GstMemoryInfo
820 * @user_data: user data
821 * @notify: a #GDestroyNotify for @user_data
823 * Create a new memory allocator with @info and @user_data.
825 * All functions in @info are mandatory exept the copy and is_span
826 * functions, which will have a default implementation when left NULL.
828 * The @user_data will be passed to all calls of the alloc function. @notify
829 * will be called with @user_data when the allocator is freed.
831 * Returns: a new #GstAllocator.
834 gst_allocator_new (const GstMemoryInfo * info, gpointer user_data,
835 GDestroyNotify notify)
837 GstAllocator *allocator;
839 #define INSTALL_FALLBACK(_t) \
840 if (allocator->info._t == NULL) allocator->info._t = _fallback_ ##_t;
842 g_return_val_if_fail (info != NULL, NULL);
843 g_return_val_if_fail (info->alloc != NULL, NULL);
844 g_return_val_if_fail (info->mem_map != NULL, NULL);
845 g_return_val_if_fail (info->mem_unmap != NULL, NULL);
846 g_return_val_if_fail (info->mem_free != NULL, NULL);
847 g_return_val_if_fail (info->mem_share != NULL, NULL);
849 allocator = g_slice_new (GstAllocator);
850 allocator->refcount = 1;
851 allocator->info = *info;
852 allocator->user_data = user_data;
853 allocator->notify = notify;
854 INSTALL_FALLBACK (mem_copy);
855 INSTALL_FALLBACK (mem_is_span);
856 #undef INSTALL_FALLBACK
858 GST_CAT_DEBUG (GST_CAT_MEMORY, "new allocator %p", allocator);
860 #ifndef GST_DISABLE_TRACE
861 _gst_alloc_trace_new (_gst_allocator_trace, allocator);
868 * gst_alocator_get_memory_type:
869 * @allocator: a #GstAllocator
871 * Get the memory type allocated by this allocator
873 * Returns: the memory type provided by @allocator
876 gst_allocator_get_memory_type (GstAllocator * allocator)
878 g_return_val_if_fail (allocator != NULL, NULL);
880 return allocator->info.mem_type;
885 * @allocator: a #GstAllocator
887 * Increases the refcount of @allocator.
889 * Returns: @allocator with increased refcount
892 gst_allocator_ref (GstAllocator * allocator)
894 g_return_val_if_fail (allocator != NULL, NULL);
896 GST_CAT_TRACE (GST_CAT_MEMORY, "alocator %p, %d->%d", allocator,
897 allocator->refcount, allocator->refcount + 1);
899 g_atomic_int_inc (&allocator->refcount);
905 * gst_allocator_unref:
906 * @allocator: a #GstAllocator
908 * Decreases the refcount of @allocator. When the refcount reaches 0, the notify
909 * function of @allocator will be called and the allocator will be freed.
912 gst_allocator_unref (GstAllocator * allocator)
914 g_return_if_fail (allocator != NULL);
916 GST_CAT_TRACE (GST_CAT_MEMORY, "allocator %p, %d->%d", allocator,
917 allocator->refcount, allocator->refcount - 1);
919 if (g_atomic_int_dec_and_test (&allocator->refcount)) {
920 if (allocator->notify)
921 allocator->notify (allocator->user_data);
922 #ifndef GST_DISABLE_TRACE
923 _gst_alloc_trace_free (_gst_allocator_trace, allocator);
925 g_slice_free1 (sizeof (GstAllocator), allocator);
930 * gst_allocator_register:
931 * @name: the name of the allocator
932 * @allocator: (transfer full): #GstAllocator
934 * Registers the memory @allocator with @name. This function takes ownership of
938 gst_allocator_register (const gchar * name, GstAllocator * allocator)
940 g_return_if_fail (name != NULL);
941 g_return_if_fail (allocator != NULL);
943 GST_CAT_DEBUG (GST_CAT_MEMORY, "registering allocator %p with name \"%s\"",
946 g_rw_lock_writer_lock (&lock);
947 g_hash_table_insert (allocators, (gpointer) name, (gpointer) allocator);
948 g_rw_lock_writer_unlock (&lock);
952 * gst_allocator_find:
953 * @name: the name of the allocator
955 * Find a previously registered allocator with @name. When @name is NULL, the
956 * default allocator will be returned.
958 * Returns: (transfer full): a #GstAllocator or NULL when the allocator with @name was not
959 * registered. Use gst_allocator_unref() to release the allocator after usage.
962 gst_allocator_find (const gchar * name)
964 GstAllocator *allocator;
966 g_rw_lock_reader_lock (&lock);
968 allocator = g_hash_table_lookup (allocators, (gconstpointer) name);
970 allocator = _default_allocator;
973 gst_allocator_ref (allocator);
974 g_rw_lock_reader_unlock (&lock);
980 * gst_allocator_set_default:
981 * @allocator: (transfer full): a #GstAllocator
983 * Set the default allocator. This function takes ownership of @allocator.
986 gst_allocator_set_default (GstAllocator * allocator)
989 g_return_if_fail (allocator != NULL);
991 g_rw_lock_writer_lock (&lock);
992 old = _default_allocator;
993 _default_allocator = allocator;
994 g_rw_lock_writer_unlock (&lock);
997 gst_allocator_unref (old);
1001 * gst_allocation_params_init:
1002 * @params: a #GstAllocationParams
1004 * Initialize @params to its default values
1007 gst_allocation_params_init (GstAllocationParams * params)
1009 g_return_if_fail (params != NULL);
1011 memset (params, 0, sizeof (GstAllocationParams));
1015 * gst_allocation_params_copy:
1016 * @params: (transfer none): a #GstAllocationParams
1018 * Create a copy of @params.
1020 * Free-function: gst_allocation_params_free
1022 * Returns: (transfer full): a new ##GstAllocationParams, free with
1023 * gst_allocation_params_free().
1025 GstAllocationParams *
1026 gst_allocation_params_copy (const GstAllocationParams * params)
1028 GstAllocationParams *result = NULL;
1032 (GstAllocationParams *) g_slice_copy (sizeof (GstAllocationParams),
1039 * gst_allocation_params_free:
1040 * @params: (in) (transfer full): a #GstAllocationParams
1045 gst_allocation_params_free (GstAllocationParams * params)
1047 g_slice_free (GstAllocationParams, params);
1051 * gst_allocator_alloc:
1052 * @allocator: (transfer none) (allow-none): a #GstAllocator to use
1053 * @size: size of the visible memory area
1054 * @params: (transfer none) (allow-none): optional parameters
1056 * Use @allocator to allocate a new memory block with memory that is at least
1059 * The optional @params can specify the prefix and padding for the memory. If
1060 * NULL is passed, no flags, no extra prefix/padding and a default alignment is
1063 * The prefix/padding will be filled with 0 if flags contains
1064 * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
1066 * When @allocator is NULL, the default allocator will be used.
1068 * The alignment in @params is given as a bitmask so that @align + 1 equals
1069 * the amount of bytes to align to. For example, to align to 8 bytes,
1070 * use an alignment of 7.
1072 * Returns: (transfer full): a new #GstMemory.
1075 gst_allocator_alloc (GstAllocator * allocator, gsize size,
1076 GstAllocationParams * params)
1079 static GstAllocationParams defparams = { 0, 0, 0, 0, };
1082 g_return_val_if_fail (((params->align + 1) & params->align) == 0, NULL);
1084 params = &defparams;
1087 if (allocator == NULL)
1088 allocator = _default_allocator;
1090 mem = allocator->info.alloc (allocator, size, params, allocator->user_data);
1092 #ifndef GST_DISABLE_TRACE
1093 _gst_alloc_trace_new (_gst_memory_trace, mem);