*
* New allocators can be registered with gst_allocator_register().
* Allocators are identified by name and can be retrieved with
- * gst_allocator_find().
+ * gst_allocator_find(). gst_allocator_set_default() can be used to change the
+ * default allocator.
*
* New memory can be created with gst_memory_new_wrapped() that wraps the memory
* allocated elsewhere.
* The call will return a pointer to offset bytes into the region of memory.
* After the memory access is completed, gst_memory_unmap() should be called.
*
- * Memory can be copied with gst_memory_copy(), which will returnn a writable
+ * Memory can be copied with gst_memory_copy(), which will return a writable
* copy. gst_memory_share() will create a new memory block that shares the
* memory with an existing memory block at a custom offset and with a custom
* size.
*
* Memory can be efficiently merged when gst_memory_is_span() returns TRUE.
*
- * Last reviewed on 2011-06-08 (0.11.0)
+ * Last reviewed on 2012-03-28 (0.11.3)
*/
#ifdef HAVE_CONFIG_H
#include "gst_private.h"
#include "gstmemory.h"
+#ifndef GST_DISABLE_TRACE
+#include "gsttrace.h"
+static GstAllocTrace *_gst_memory_trace;
+static GstAllocTrace *_gst_allocator_trace;
+#endif
+
G_DEFINE_BOXED_TYPE (GstMemory, gst_memory, (GBoxedCopyFunc) gst_memory_ref,
(GBoxedFreeFunc) gst_memory_unref);
G_DEFINE_BOXED_TYPE (GstAllocator, gst_allocator,
(GBoxedCopyFunc) gst_allocator_ref, (GBoxedFreeFunc) gst_allocator_unref);
-/**
- * gst_memory_alignment:
- *
- * The default memory alignment in bytes - 1
- * an alignment of 7 would be the same as what malloc() guarantees.
- */
+G_DEFINE_BOXED_TYPE (GstAllocationParams, gst_allocation_params,
+ (GBoxedCopyFunc) gst_allocation_params_copy,
+ (GBoxedFreeFunc) gst_allocation_params_free);
+
#if defined(MEMORY_ALIGNMENT_MALLOC)
size_t gst_memory_alignment = 7;
#elif defined(MEMORY_ALIGNMENT_PAGESIZE)
GstMemory mem;
gsize slice_size;
guint8 *data;
- GFreeFunc free_func;
+ gpointer user_data;
+ GDestroyNotify notify;
} GstMemoryDefault;
/* the default allocator */
static void
_default_mem_init (GstMemoryDefault * mem, GstMemoryFlags flags,
GstMemory * parent, gsize slice_size, gpointer data,
- GFreeFunc free_func, gsize maxsize, gsize offset, gsize size)
+ gsize maxsize, gsize offset, gsize size, gsize align,
+ gpointer user_data, GDestroyNotify notify)
{
mem->mem.allocator = _default_mem_impl;
mem->mem.flags = flags;
mem->mem.refcount = 1;
mem->mem.parent = parent ? gst_memory_ref (parent) : NULL;
- mem->mem.state = (flags & GST_MEMORY_FLAG_READONLY ? 0x5 : 0);
+ mem->mem.state = (flags & GST_MEMORY_FLAG_READONLY ? 0x1 : 0);
mem->mem.maxsize = maxsize;
+ mem->mem.align = align;
mem->mem.offset = offset;
mem->mem.size = size;
mem->slice_size = slice_size;
mem->data = data;
- mem->free_func = free_func;
+ mem->user_data = user_data;
+ mem->notify = notify;
- GST_DEBUG ("new memory %p", mem);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "new memory %p, maxsize:%" G_GSIZE_FORMAT
+ " offset:%" G_GSIZE_FORMAT " size:%" G_GSIZE_FORMAT, mem, maxsize,
+ offset, size);
}
/* create a new memory block that manages the given memory */
static GstMemoryDefault *
_default_mem_new (GstMemoryFlags flags, GstMemory * parent, gpointer data,
- GFreeFunc free_func, gsize maxsize, gsize offset, gsize size)
+ gsize maxsize, gsize offset, gsize size, gsize align, gpointer user_data,
+ GDestroyNotify notify)
{
GstMemoryDefault *mem;
gsize slice_size;
mem = g_slice_alloc (slice_size);
_default_mem_init (mem, flags, parent, slice_size,
- data, free_func, maxsize, offset, size);
+ data, maxsize, offset, size, align, user_data, notify);
return mem;
}
/* allocate the memory and structure in one block */
static GstMemoryDefault *
-_default_mem_new_block (gsize maxsize, gsize align, gsize offset, gsize size)
+_default_mem_new_block (GstMemoryFlags flags, gsize maxsize, gsize align,
+ gsize offset, gsize size)
{
GstMemoryDefault *mem;
- gsize aoffset, slice_size;
+ gsize aoffset, slice_size, padding;
guint8 *data;
/* ensure configured alignment */
data = (guint8 *) mem + sizeof (GstMemoryDefault);
- if ((aoffset = ((guintptr) data & align)))
+ /* do alignment */
+ if ((aoffset = ((guintptr) data & align))) {
aoffset = (align + 1) - aoffset;
+ data += aoffset;
+ maxsize -= aoffset;
+ }
+
+ if (offset && (flags & GST_MEMORY_FLAG_ZERO_PREFIXED))
+ memset (data, 0, offset);
+
+ padding = maxsize - (offset + size);
+ if (padding && (flags & GST_MEMORY_FLAG_ZERO_PADDED))
+ memset (data + offset + size, 0, padding);
- _default_mem_init (mem, 0, NULL, slice_size, data, NULL, maxsize,
- aoffset + offset, size);
+ _default_mem_init (mem, flags, NULL, slice_size, data, maxsize,
+ offset, size, align, NULL, NULL);
return mem;
}
static GstMemory *
-_default_alloc_alloc (GstAllocator * allocator, gsize maxsize, gsize align,
- gpointer user_data)
+_default_alloc_alloc (GstAllocator * allocator, gsize size,
+ GstAllocationParams * params, gpointer user_data)
{
- return (GstMemory *) _default_mem_new_block (maxsize, align, 0, maxsize);
+ gsize maxsize = size + params->prefix + params->padding;
+
+ return (GstMemory *) _default_mem_new_block (params->flags,
+ maxsize, params->align, params->prefix, size);
}
static gpointer
-_default_mem_map (GstMemoryDefault * mem, GstMapFlags flags)
+_default_mem_map (GstMemoryDefault * mem, gsize maxsize, GstMapFlags flags)
{
return mem->data;
}
static void
_default_mem_free (GstMemoryDefault * mem)
{
- GST_DEBUG ("free memory %p", mem);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "free memory %p", mem);
if (mem->mem.parent)
gst_memory_unref (mem->mem.parent);
- if (mem->free_func)
- mem->free_func (mem->data);
+ if (mem->notify)
+ mem->notify (mem->user_data);
g_slice_free1 (mem->slice_size, mem);
}
size = mem->mem.size > offset ? mem->mem.size - offset : 0;
copy =
- _default_mem_new_block (mem->mem.maxsize, 0, mem->mem.offset + offset,
+ _default_mem_new_block (0, mem->mem.maxsize, 0, mem->mem.offset + offset,
size);
+ GST_CAT_DEBUG (GST_CAT_PERFORMANCE,
+ "memcpy %" G_GSIZE_FORMAT " memory %p -> %p", mem->mem.maxsize, mem,
+ copy);
memcpy (copy->data, mem->data, mem->mem.maxsize);
return copy;
size = mem->mem.size - offset;
sub =
- _default_mem_new (parent->flags, parent, mem->data, NULL,
- mem->mem.maxsize, mem->mem.offset + offset, size);
+ _default_mem_new (parent->flags, parent, mem->data,
+ mem->mem.maxsize, mem->mem.offset + offset, size, mem->mem.align, NULL,
+ NULL);
return sub;
}
{
GstMemory *copy;
GstMapInfo sinfo, dinfo;
+ GstAllocationParams params = { 0, 0, 0, mem->align, };
if (!gst_memory_map (mem, &sinfo, GST_MAP_READ))
return NULL;
size = sinfo.size > offset ? sinfo.size - offset : 0;
/* use the same allocator as the memory we copy */
- copy = gst_allocator_alloc (mem->allocator, size, mem->align);
+ copy = gst_allocator_alloc (mem->allocator, size, ¶ms);
if (!gst_memory_map (copy, &dinfo, GST_MAP_WRITE)) {
- GST_WARNING ("could not write map memory %p", copy);
+ GST_CAT_WARNING (GST_CAT_MEMORY, "could not write map memory %p", copy);
gst_memory_unmap (mem, &sinfo);
return NULL;
}
+ GST_CAT_DEBUG (GST_CAT_PERFORMANCE,
+ "memcpy %" G_GSSIZE_FORMAT " memory %p -> %p", size, mem, copy);
memcpy (dinfo.data, sinfo.data + offset, size);
gst_memory_unmap (copy, &dinfo);
gst_memory_unmap (mem, &sinfo);
(GstMemoryIsSpanFunction) _default_mem_is_span,
};
+#ifndef GST_DISABLE_TRACE
+ _gst_memory_trace = _gst_alloc_trace_register ("GstMemory", -1);
+ _gst_allocator_trace = _gst_alloc_trace_register ("GstAllocator", -1);
+#endif
+
g_rw_lock_init (&lock);
allocators = g_hash_table_new (g_str_hash, g_str_equal);
#endif
#endif
- GST_DEBUG ("memory alignment: %" G_GSIZE_FORMAT, gst_memory_alignment);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "memory alignment: %" G_GSIZE_FORMAT,
+ gst_memory_alignment);
_default_mem_impl = gst_allocator_new (&_mem_info, NULL, _priv_sysmem_notify);
* gst_memory_new_wrapped:
* @flags: #GstMemoryFlags
* @data: data to wrap
- * @free_func: function to free @data
* @maxsize: allocated size of @data
* @offset: offset in @data
* @size: size of valid data
+ * @user_data: user_data
+ * @notify: called with @user_data when the memory is freed
*
* Allocate a new memory block that wraps the given @data.
*
+ * The prefix/padding must be filled with 0 if @flags contains
+ * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
+ *
* Returns: a new #GstMemory.
*/
GstMemory *
gst_memory_new_wrapped (GstMemoryFlags flags, gpointer data,
- GFreeFunc free_func, gsize maxsize, gsize offset, gsize size)
+ gsize maxsize, gsize offset, gsize size, gpointer user_data,
+ GDestroyNotify notify)
{
GstMemoryDefault *mem;
g_return_val_if_fail (data != NULL, NULL);
g_return_val_if_fail (offset + size <= maxsize, NULL);
- mem = _default_mem_new (flags, NULL, data, free_func, maxsize, offset, size);
+ mem =
+ _default_mem_new (flags, NULL, data, maxsize, offset, size, 0, user_data,
+ notify);
+
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_new (_gst_memory_trace, mem);
+#endif
return (GstMemory *) mem;
}
{
g_return_val_if_fail (mem != NULL, NULL);
- GST_DEBUG ("memory %p, %d->%d", mem, mem->refcount, mem->refcount + 1);
+ GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
+ mem->refcount + 1);
g_atomic_int_inc (&mem->refcount);
g_return_if_fail (mem != NULL);
g_return_if_fail (mem->allocator != NULL);
- GST_DEBUG ("memory %p, %d->%d", mem, mem->refcount, mem->refcount - 1);
+ GST_CAT_TRACE (GST_CAT_MEMORY, "memory %p, %d->%d", mem, mem->refcount,
+ mem->refcount - 1);
- if (g_atomic_int_dec_and_test (&mem->refcount))
+ if (g_atomic_int_dec_and_test (&mem->refcount)) {
+ /* there should be no outstanding mappings */
+ g_return_if_fail (g_atomic_int_get (&mem->state) < 4);
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_free (_gst_memory_trace, mem);
+#endif
mem->allocator->info.mem_free (mem);
+ }
+}
+
+/**
+ * gst_memory_is_exclusive:
+ * @mem: a #GstMemory
+ *
+ * Check if the current ref to @mem is exclusive, this means that no other
+ * references exist other than @mem.
+ */
+gboolean
+gst_memory_is_exclusive (GstMemory * mem)
+{
+ g_return_val_if_fail (mem != NULL, FALSE);
+
+ return (g_atomic_int_get (&mem->refcount) == 1);
}
/**
*
* Resize the memory region. @mem should be writable and offset + size should be
* less than the maxsize of @mem.
+ *
+ * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED will be
+ * cleared when offset or padding is increased respectively.
*/
void
gst_memory_resize (GstMemory * mem, gssize offset, gsize size)
{
g_return_if_fail (mem != NULL);
- g_return_if_fail (gst_memory_is_writable (mem));
g_return_if_fail (offset >= 0 || mem->offset >= -offset);
g_return_if_fail (size + mem->offset + offset <= mem->maxsize);
- mem->offset += offset;
- mem->size = size;
-}
+ /* if we increase the prefix, we can't guarantee it is still 0 filled */
+ if ((offset > 0) && GST_MEMORY_IS_ZERO_PREFIXED (mem))
+ GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PREFIXED);
-/**
- * gst_memory_is_writable:
- * @mem: a #GstMemory
- *
- * Check if @mem is writable.
- *
- * Returns: %TRUE is @mem is writable.
- */
-gboolean
-gst_memory_is_writable (GstMemory * mem)
-{
- g_return_val_if_fail (mem != NULL, FALSE);
+ /* if we increase the padding, we can't guarantee it is still 0 filled */
+ if ((offset + size < mem->size) && GST_MEMORY_IS_ZERO_PADDED (mem))
+ GST_MEMORY_FLAG_UNSET (mem, GST_MEMORY_FLAG_ZERO_PADDED);
- return (mem->refcount == 1) &&
- ((mem->parent == NULL) || (mem->parent->refcount == 1)) &&
- ((mem->flags & GST_MEMORY_FLAG_READONLY) == 0);
+ mem->offset += offset;
+ mem->size = size;
}
static gboolean
lock_failed:
{
- GST_DEBUG ("lock failed %p: state %d, access_mode %d", mem, state,
- access_mode);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "lock failed %p: state %d, access_mode %d",
+ mem, state, access_mode);
return FALSE;
}
}
/* ERRORS */
cannot_copy:
{
- GST_DEBUG ("cannot copy memory %p", mem);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot copy memory %p", mem);
return NULL;
}
cannot_map:
{
- GST_DEBUG ("cannot map memory %p with flags %d", mem, flags);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "cannot map memory %p with flags %d", mem,
+ flags);
gst_memory_unref (result);
return NULL;
}
* - the memory backed by @mem is not accessible with the given @flags.
* - the memory was already mapped with a different mapping.
*
- * @info and its contents remains valid for as long as @mem is alive and until
- * gst_memory_unmap() is called.
+ * @info and its contents remain valid for as long as @mem is valid and
+ * until gst_memory_unmap() is called.
*
* For each gst_memory_map() call, a corresponding gst_memory_unmap() call
* should be done.
/* ERRORS */
lock_failed:
{
- GST_DEBUG ("mem %p: lock %d failed", mem, flags);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "mem %p: lock %d failed", mem, flags);
return FALSE;
}
error:
{
/* something went wrong, restore the orginal state again */
- GST_ERROR ("mem %p: map failed", mem);
+ GST_CAT_ERROR (GST_CAT_MEMORY, "mem %p: map failed", mem);
gst_memory_unlock (mem);
return FALSE;
}
GstMemory *copy;
g_return_val_if_fail (mem != NULL, NULL);
- g_return_val_if_fail (gst_memory_lock (mem, GST_MAP_READ), NULL);
copy = mem->allocator->info.mem_copy (mem, offset, size);
- gst_memory_unlock (mem);
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_new (_gst_memory_trace, copy);
+#endif
return copy;
}
GstMemory *
gst_memory_share (GstMemory * mem, gssize offset, gssize size)
{
+ GstMemory *shared;
+
g_return_val_if_fail (mem != NULL, NULL);
+ g_return_val_if_fail (!GST_MEMORY_FLAG_IS_SET (mem, GST_MEMORY_FLAG_NO_SHARE),
+ NULL);
+
+ shared = mem->allocator->info.mem_share (mem, offset, size);
- return mem->allocator->info.mem_share (mem, offset, size);
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_new (_gst_memory_trace, shared);
+#endif
+
+ return shared;
}
/**
}
/**
- * gst_allocator_register:
+ * gst_allocator_new:
* @info: a #GstMemoryInfo
* @user_data: user data
* @notify: a #GDestroyNotify for @user_data
* All functions in @info are mandatory exept the copy and is_span
* functions, which will have a default implementation when left NULL.
*
- * The @user_data will be passed to all calls of the alloc function and the
- * @notify function.
+ * The @user_data will be passed to all calls of the alloc function. @notify
+ * will be called with @user_data when the allocator is freed.
*
* Returns: a new #GstAllocator.
*/
INSTALL_FALLBACK (mem_is_span);
#undef INSTALL_FALLBACK
- GST_DEBUG ("new allocator %p", allocator);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "new allocator %p", allocator);
+
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_new (_gst_allocator_trace, allocator);
+#endif
return allocator;
}
/**
- * gst_alocator_get_memory_type:
+ * gst_allocator_get_memory_type:
* @allocator: a #GstAllocator
*
* Get the memory type allocated by this allocator
*
- * Returns: @allocator with increased refcount
+ * Returns: the memory type provided by @allocator
*/
const gchar *
gst_allocator_get_memory_type (GstAllocator * allocator)
}
/**
- * gst_alocator_ref:
+ * gst_allocator_ref:
* @allocator: a #GstAllocator
*
* Increases the refcount of @allocator.
{
g_return_val_if_fail (allocator != NULL, NULL);
- GST_DEBUG ("alocator %p, %d->%d", allocator, allocator->refcount,
- allocator->refcount + 1);
+ GST_CAT_TRACE (GST_CAT_MEMORY, "allocator %p, %d->%d", allocator,
+ allocator->refcount, allocator->refcount + 1);
g_atomic_int_inc (&allocator->refcount);
* gst_allocator_unref:
* @allocator: a #GstAllocator
*
- * Decreases the refcount of @allocator. When the refcount reaches 0, the free
- * function of @allocator will be called.
+ * Decreases the refcount of @allocator. When the refcount reaches 0, the notify
+ * function of @allocator will be called and the allocator will be freed.
*/
void
gst_allocator_unref (GstAllocator * allocator)
{
g_return_if_fail (allocator != NULL);
- GST_DEBUG ("allocator %p, %d->%d", allocator, allocator->refcount,
- allocator->refcount - 1);
+ GST_CAT_TRACE (GST_CAT_MEMORY, "allocator %p, %d->%d", allocator,
+ allocator->refcount, allocator->refcount - 1);
if (g_atomic_int_dec_and_test (&allocator->refcount)) {
if (allocator->notify)
allocator->notify (allocator->user_data);
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_free (_gst_allocator_trace, allocator);
+#endif
g_slice_free1 (sizeof (GstAllocator), allocator);
}
}
g_return_if_fail (name != NULL);
g_return_if_fail (allocator != NULL);
- GST_DEBUG ("registering allocator %p with name \"%s\"", allocator, name);
+ GST_CAT_DEBUG (GST_CAT_MEMORY, "registering allocator %p with name \"%s\"",
+ allocator, name);
g_rw_lock_writer_lock (&lock);
g_hash_table_insert (allocators, (gpointer) name, (gpointer) allocator);
}
/**
+ * gst_allocation_params_init:
+ * @params: a #GstAllocationParams
+ *
+ * Initialize @params to its default values
+ */
+void
+gst_allocation_params_init (GstAllocationParams * params)
+{
+ g_return_if_fail (params != NULL);
+
+ memset (params, 0, sizeof (GstAllocationParams));
+}
+
+/**
+ * gst_allocation_params_copy:
+ * @params: (transfer none): a #GstAllocationParams
+ *
+ * Create a copy of @params.
+ *
+ * Free-function: gst_allocation_params_free
+ *
+ * Returns: (transfer full): a new ##GstAllocationParams, free with
+ * gst_allocation_params_free().
+ */
+GstAllocationParams *
+gst_allocation_params_copy (const GstAllocationParams * params)
+{
+ GstAllocationParams *result = NULL;
+
+ if (params) {
+ result =
+ (GstAllocationParams *) g_slice_copy (sizeof (GstAllocationParams),
+ params);
+ }
+ return result;
+}
+
+/**
+ * gst_allocation_params_free:
+ * @params: (in) (transfer full): a #GstAllocationParams
+ *
+ * Free @params
+ */
+void
+gst_allocation_params_free (GstAllocationParams * params)
+{
+ g_slice_free (GstAllocationParams, params);
+}
+
+/**
* gst_allocator_alloc:
* @allocator: (transfer none) (allow-none): a #GstAllocator to use
- * @maxsize: allocated size of @data
- * @align: alignment for the data
+ * @size: size of the visible memory area
+ * @params: (transfer none) (allow-none): optional parameters
*
* Use @allocator to allocate a new memory block with memory that is at least
- * @maxsize big and has the given alignment.
+ * @size big.
+ *
+ * The optional @params can specify the prefix and padding for the memory. If
+ * NULL is passed, no flags, no extra prefix/padding and a default alignment is
+ * used.
+ *
+ * The prefix/padding will be filled with 0 if flags contains
+ * #GST_MEMORY_FLAG_ZERO_PREFIXED and #GST_MEMORY_FLAG_ZERO_PADDED respectively.
*
* When @allocator is NULL, the default allocator will be used.
*
- * @align is given as a bitmask so that @align + 1 equals the amount of bytes to
- * align to. For example, to align to 8 bytes, use an alignment of 7.
+ * The alignment in @params is given as a bitmask so that @align + 1 equals
+ * the amount of bytes to align to. For example, to align to 8 bytes,
+ * use an alignment of 7.
*
* Returns: (transfer full): a new #GstMemory.
*/
GstMemory *
-gst_allocator_alloc (GstAllocator * allocator, gsize maxsize, gsize align)
+gst_allocator_alloc (GstAllocator * allocator, gsize size,
+ GstAllocationParams * params)
{
- g_return_val_if_fail (((align + 1) & align) == 0, NULL);
+ GstMemory *mem;
+ static GstAllocationParams defparams = { 0, 0, 0, 0, };
+
+ if (params) {
+ g_return_val_if_fail (((params->align + 1) & params->align) == 0, NULL);
+ } else {
+ params = &defparams;
+ }
if (allocator == NULL)
allocator = _default_allocator;
- return allocator->info.alloc (allocator, maxsize, align,
- allocator->user_data);
+ mem = allocator->info.alloc (allocator, size, params, allocator->user_data);
+
+#ifndef GST_DISABLE_TRACE
+ _gst_alloc_trace_new (_gst_memory_trace, mem);
+#endif
+ return mem;
}