1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
21 * Modified by the GLib Team and others 1997-2000. See the AUTHORS
22 * file for a list of people on the GLib Team. See the ChangeLog
23 * files for a list of changes. These files are distributed with
24 * GLib at ftp://ftp.gtk.org/pub/gtk/.
41 * having DISABLE_MEM_POOLS defined, disables mem_chunks alltogether, their
42 * allocations are performed through ordinary g_malloc/g_free.
43 * having G_DISABLE_CHECKS defined disables use of glib_mem_profiler_table and
45 * REALLOC_0_WORKS is defined if g_realloc (NULL, x) works.
46 * SANE_MALLOC_PROTOS is defined if the systems malloc() and friends functions
47 * match the corresponding GLib prototypes, keep configure.in and gmem.h in sync here.
48 * if ENABLE_GC_FRIENDLY is defined, freed memory should be 0-wiped.
51 #define MEM_PROFILE_TABLE_SIZE 4096
53 #define MEM_AREA_SIZE 4L
55 #ifdef G_DISABLE_CHECKS
56 # define ENTER_MEM_CHUNK_ROUTINE()
57 # define LEAVE_MEM_CHUNK_ROUTINE()
58 # define IN_MEM_CHUNK_ROUTINE() FALSE
59 #else /* !G_DISABLE_CHECKS */
60 static GPrivate* mem_chunk_recursion = NULL;
61 # define MEM_CHUNK_ROUTINE_COUNT() GPOINTER_TO_UINT (g_private_get (mem_chunk_recursion))
62 # define ENTER_MEM_CHUNK_ROUTINE() g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () + 1))
63 # define LEAVE_MEM_CHUNK_ROUTINE() g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () - 1))
64 #endif /* !G_DISABLE_CHECKS */
66 #ifndef REALLOC_0_WORKS
68 standard_realloc (gpointer mem,
72 return malloc (n_bytes);
74 return realloc (mem, n_bytes);
76 #endif /* !REALLOC_0_WORKS */
78 #ifdef SANE_MALLOC_PROTOS
79 # define standard_malloc malloc
80 # ifdef REALLOC_0_WORKS
81 # define standard_realloc realloc
82 # endif /* REALLOC_0_WORKS */
83 # define standard_free free
84 # define standard_calloc calloc
85 # define standard_try_malloc malloc
86 # define standard_try_realloc realloc
87 #else /* !SANE_MALLOC_PROTOS */
89 standard_malloc (gsize n_bytes)
91 return malloc (n_bytes);
93 # ifdef REALLOC_0_WORKS
95 standard_realloc (gpointer mem,
98 return realloc (mem, n_bytes);
100 # endif /* REALLOC_0_WORKS */
102 standard_free (gpointer mem)
107 standard_calloc (gsize n_blocks,
110 return calloc (n_blocks, n_bytes);
112 #define standard_try_malloc standard_malloc
113 #define standard_try_realloc standard_realloc
114 #endif /* !SANE_MALLOC_PROTOS */
117 /* --- variables --- */
118 static GMemVTable glib_mem_vtable = {
124 standard_try_realloc,
128 /* --- functions --- */
130 g_malloc (gulong n_bytes)
136 mem = glib_mem_vtable.malloc (n_bytes);
140 g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
147 g_malloc0 (gulong n_bytes)
153 mem = glib_mem_vtable.calloc (1, n_bytes);
157 g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
164 g_realloc (gpointer mem,
169 mem = glib_mem_vtable.realloc (mem, n_bytes);
173 g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
177 glib_mem_vtable.free (mem);
183 g_free (gpointer mem)
186 glib_mem_vtable.free (mem);
190 g_try_malloc (gulong n_bytes)
193 return glib_mem_vtable.try_malloc (n_bytes);
199 g_try_realloc (gpointer mem,
203 return glib_mem_vtable.try_realloc (mem, n_bytes);
206 glib_mem_vtable.free (mem);
212 fallback_calloc (gsize n_blocks,
215 gsize l = n_blocks * n_block_bytes;
216 gpointer mem = glib_mem_vtable.malloc (l);
224 static gboolean vtable_set = FALSE;
227 * g_mem_vtable_is_set:
229 * Checks whether a custom vtable as been set by g_mem_set_vtable.
230 * If a custom vtable has not been set, memory allocated with
231 * free() can be used interchangeable with memory allocated using
232 * g_free(). This function is useful for avoiding an extra copy
233 * of allocated memory returned by a non-GLib-based API.
235 * Return value: if %TRUE, a custom vtable has been set.
238 g_mem_vtable_is_set (void)
244 g_mem_set_vtable (GMemVTable *vtable)
249 if (vtable->malloc && vtable->realloc && vtable->free)
251 glib_mem_vtable.malloc = vtable->malloc;
252 glib_mem_vtable.realloc = vtable->realloc;
253 glib_mem_vtable.free = vtable->free;
254 glib_mem_vtable.calloc = vtable->calloc ? vtable->calloc : fallback_calloc;
255 glib_mem_vtable.try_malloc = vtable->try_malloc ? vtable->try_malloc : glib_mem_vtable.malloc;
256 glib_mem_vtable.try_realloc = vtable->try_realloc ? vtable->try_realloc : glib_mem_vtable.realloc;
259 g_warning (G_STRLOC ": memory allocation vtable lacks one of malloc(), realloc() or free()");
262 g_warning (G_STRLOC ": memory allocation vtable can only be set once at startup");
266 /* --- memory profiling and checking --- */
267 #ifdef G_DISABLE_CHECKS
268 GMemVTable *glib_mem_profiler_table = &glib_mem_vtable;
273 #else /* !G_DISABLE_CHECKS */
280 static guint *profile_data = NULL;
281 static gulong profile_allocs = 0;
282 static gulong profile_mc_allocs = 0;
283 static gulong profile_zinit = 0;
284 static gulong profile_frees = 0;
285 static gulong profile_mc_frees = 0;
286 static GMutex *g_profile_mutex = NULL;
287 #ifdef G_ENABLE_DEBUG
288 static volatile gulong g_trap_free_size = 0;
289 static volatile gulong g_trap_realloc_size = 0;
290 static volatile gulong g_trap_malloc_size = 0;
291 #endif /* G_ENABLE_DEBUG */
293 #define PROFILE_TABLE(f1,f2,f3) ( ( ((f3) << 2) | ((f2) << 1) | (f1) ) * (MEM_PROFILE_TABLE_SIZE + 1))
296 profiler_log (ProfilerJob job,
300 g_mutex_lock (g_profile_mutex);
303 profile_data = standard_malloc ((MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
304 if (!profile_data) /* memory system kiddin' me, eh? */
306 g_mutex_unlock (g_profile_mutex);
311 if (MEM_CHUNK_ROUTINE_COUNT () == 0)
313 if (n_bytes < MEM_PROFILE_TABLE_SIZE)
314 profile_data[n_bytes + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
315 (job & PROFILER_RELOC) != 0,
318 profile_data[MEM_PROFILE_TABLE_SIZE + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
319 (job & PROFILER_RELOC) != 0,
323 if (job & PROFILER_ALLOC)
325 profile_allocs += n_bytes;
326 if (job & PROFILER_ZINIT)
327 profile_zinit += n_bytes;
330 profile_frees += n_bytes;
335 if (job & PROFILER_ALLOC)
336 profile_mc_allocs += n_bytes;
338 profile_mc_frees += n_bytes;
340 g_mutex_unlock (g_profile_mutex);
344 profile_print_locked (guint *local_data,
347 gboolean need_header = TRUE;
350 for (i = 0; i <= MEM_PROFILE_TABLE_SIZE; i++)
352 glong t_malloc = local_data[i + PROFILE_TABLE (1, 0, success)];
353 glong t_realloc = local_data[i + PROFILE_TABLE (1, 1, success)];
354 glong t_free = local_data[i + PROFILE_TABLE (0, 0, success)];
355 glong t_refree = local_data[i + PROFILE_TABLE (0, 1, success)];
357 if (!t_malloc && !t_realloc && !t_free && !t_refree)
359 else if (need_header)
362 g_print (" blocks of | allocated | freed | allocated | freed | n_bytes \n");
363 g_print (" n_bytes | n_times by | n_times by | n_times by | n_times by | remaining \n");
364 g_print (" | malloc() | free() | realloc() | realloc() | \n");
365 g_print ("===========|============|============|============|============|===========\n");
367 if (i < MEM_PROFILE_TABLE_SIZE)
368 g_print ("%10u | %10ld | %10ld | %10ld | %10ld |%+11ld\n",
369 i, t_malloc, t_free, t_realloc, t_refree,
370 (t_malloc - t_free + t_realloc - t_refree) * i);
371 else if (i >= MEM_PROFILE_TABLE_SIZE)
372 g_print (" >%6u | %10ld | %10ld | %10ld | %10ld | ***\n",
373 i, t_malloc, t_free, t_realloc, t_refree);
376 g_print (" --- none ---\n");
382 guint local_data[(MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0])];
386 gulong local_mc_allocs;
387 gulong local_mc_frees;
389 g_mutex_lock (g_profile_mutex);
391 local_allocs = profile_allocs;
392 local_zinit = profile_zinit;
393 local_frees = profile_frees;
394 local_mc_allocs = profile_mc_allocs;
395 local_mc_frees = profile_mc_frees;
399 g_mutex_unlock (g_profile_mutex);
403 memcpy (local_data, profile_data,
404 (MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
406 g_mutex_unlock (g_profile_mutex);
408 g_print ("GLib Memory statistics (successful operations):\n");
409 profile_print_locked (local_data, TRUE);
410 g_print ("GLib Memory statistics (failing operations):\n");
411 profile_print_locked (local_data, FALSE);
412 g_print ("Total bytes: allocated=%lu, zero-initialized=%lu (%.2f%%), freed=%lu (%.2f%%), remaining=%lu\n",
415 ((gdouble) local_zinit) / local_allocs * 100.0,
417 ((gdouble) local_frees) / local_allocs * 100.0,
418 local_allocs - local_frees);
419 g_print ("MemChunk bytes: allocated=%lu, freed=%lu (%.2f%%), remaining=%lu\n",
422 ((gdouble) local_mc_frees) / local_mc_allocs * 100.0,
423 local_mc_allocs - local_mc_frees);
427 profiler_try_malloc (gsize n_bytes)
431 #ifdef G_ENABLE_DEBUG
432 if (g_trap_malloc_size == n_bytes)
434 #endif /* G_ENABLE_DEBUG */
436 p = standard_malloc (sizeof (gulong) * 2 + n_bytes);
440 p[0] = 0; /* free count */
441 p[1] = n_bytes; /* length */
442 profiler_log (PROFILER_ALLOC, n_bytes, TRUE);
446 profiler_log (PROFILER_ALLOC, n_bytes, FALSE);
452 profiler_malloc (gsize n_bytes)
454 gpointer mem = profiler_try_malloc (n_bytes);
463 profiler_calloc (gsize n_blocks,
466 gsize l = n_blocks * n_block_bytes;
469 #ifdef G_ENABLE_DEBUG
470 if (g_trap_malloc_size == l)
472 #endif /* G_ENABLE_DEBUG */
474 p = standard_calloc (1, sizeof (gulong) * 2 + l);
478 p[0] = 0; /* free count */
479 p[1] = l; /* length */
480 profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, TRUE);
485 profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, FALSE);
493 profiler_free (gpointer mem)
498 if (p[0]) /* free count */
500 g_warning ("free(%p): memory has been freed %lu times already", p + 2, p[0]);
501 profiler_log (PROFILER_FREE,
507 #ifdef G_ENABLE_DEBUG
508 if (g_trap_free_size == p[1])
510 #endif /* G_ENABLE_DEBUG */
512 profiler_log (PROFILER_FREE,
515 memset (p + 2, 0xaa, p[1]);
517 /* for all those that miss standard_free (p); in this place, yes,
518 * we do leak all memory when profiling, and that is intentional
519 * to catch double frees. patch submissions are futile.
526 profiler_try_realloc (gpointer mem,
533 #ifdef G_ENABLE_DEBUG
534 if (g_trap_realloc_size == n_bytes)
536 #endif /* G_ENABLE_DEBUG */
538 if (mem && p[0]) /* free count */
540 g_warning ("realloc(%p, %u): memory has been freed %lu times already", p + 2, n_bytes, p[0]);
541 profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
547 p = standard_realloc (mem ? p : NULL, sizeof (gulong) * 2 + n_bytes);
552 profiler_log (PROFILER_FREE | PROFILER_RELOC, p[1], TRUE);
555 profiler_log (PROFILER_ALLOC | PROFILER_RELOC, p[1], TRUE);
559 profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
566 profiler_realloc (gpointer mem,
569 mem = profiler_try_realloc (mem, n_bytes);
577 static GMemVTable profiler_table = {
583 profiler_try_realloc,
585 GMemVTable *glib_mem_profiler_table = &profiler_table;
587 #endif /* !G_DISABLE_CHECKS */
590 /* --- MemChunks --- */
591 typedef struct _GFreeAtom GFreeAtom;
592 typedef struct _GMemArea GMemArea;
601 GMemArea *next; /* the next mem area */
602 GMemArea *prev; /* the previous mem area */
603 gulong index; /* the current index into the "mem" array */
604 gulong free; /* the number of free bytes in this mem area */
605 gulong allocated; /* the number of atoms allocated from this area */
606 gulong mark; /* is this mem area marked for deletion */
607 gchar mem[MEM_AREA_SIZE]; /* the mem array from which atoms get allocated
608 * the actual size of this array is determined by
609 * the mem chunk "area_size". ANSI says that it
610 * must be declared to be the maximum size it
611 * can possibly be (even though the actual size
618 const gchar *name; /* name of this MemChunk...used for debugging output */
619 gint type; /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
620 gint num_mem_areas; /* the number of memory areas */
621 gint num_marked_areas; /* the number of areas marked for deletion */
622 guint atom_size; /* the size of an atom */
623 gulong area_size; /* the size of a memory area */
624 GMemArea *mem_area; /* the current memory area */
625 GMemArea *mem_areas; /* a list of all the mem areas owned by this chunk */
626 GMemArea *free_mem_area; /* the free area...which is about to be destroyed */
627 GFreeAtom *free_atoms; /* the free atoms list */
628 GTree *mem_tree; /* tree of mem areas sorted by memory address */
629 GMemChunk *next; /* pointer to the next chunk */
630 GMemChunk *prev; /* pointer to the previous chunk */
634 #ifndef DISABLE_MEM_POOLS
635 static gulong g_mem_chunk_compute_size (gulong size,
636 gulong min_size) G_GNUC_CONST;
637 static gint g_mem_chunk_area_compare (GMemArea *a,
639 static gint g_mem_chunk_area_search (GMemArea *a,
642 /* here we can't use StaticMutexes, as they depend upon a working
643 * g_malloc, the same holds true for StaticPrivate
645 static GMutex *mem_chunks_lock = NULL;
646 static GMemChunk *mem_chunks = NULL;
649 g_mem_chunk_new (const gchar *name,
654 GMemChunk *mem_chunk;
657 g_return_val_if_fail (atom_size > 0, NULL);
658 g_return_val_if_fail (area_size >= atom_size, NULL);
660 ENTER_MEM_CHUNK_ROUTINE ();
662 area_size = (area_size + atom_size - 1) / atom_size;
663 area_size *= atom_size;
665 mem_chunk = g_new (GMemChunk, 1);
666 mem_chunk->name = name;
667 mem_chunk->type = type;
668 mem_chunk->num_mem_areas = 0;
669 mem_chunk->num_marked_areas = 0;
670 mem_chunk->mem_area = NULL;
671 mem_chunk->free_mem_area = NULL;
672 mem_chunk->free_atoms = NULL;
673 mem_chunk->mem_tree = NULL;
674 mem_chunk->mem_areas = NULL;
675 mem_chunk->atom_size = atom_size;
677 if (mem_chunk->type == G_ALLOC_AND_FREE)
678 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
680 if (mem_chunk->atom_size % G_MEM_ALIGN)
681 mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN);
683 rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
684 rarea_size = g_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE);
685 mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
687 g_mutex_lock (mem_chunks_lock);
688 mem_chunk->next = mem_chunks;
689 mem_chunk->prev = NULL;
691 mem_chunks->prev = mem_chunk;
692 mem_chunks = mem_chunk;
693 g_mutex_unlock (mem_chunks_lock);
695 LEAVE_MEM_CHUNK_ROUTINE ();
701 g_mem_chunk_destroy (GMemChunk *mem_chunk)
706 g_return_if_fail (mem_chunk != NULL);
708 ENTER_MEM_CHUNK_ROUTINE ();
710 mem_areas = mem_chunk->mem_areas;
713 temp_area = mem_areas;
714 mem_areas = mem_areas->next;
719 mem_chunk->next->prev = mem_chunk->prev;
721 mem_chunk->prev->next = mem_chunk->next;
723 g_mutex_lock (mem_chunks_lock);
724 if (mem_chunk == mem_chunks)
725 mem_chunks = mem_chunks->next;
726 g_mutex_unlock (mem_chunks_lock);
728 if (mem_chunk->type == G_ALLOC_AND_FREE)
729 g_tree_destroy (mem_chunk->mem_tree);
733 LEAVE_MEM_CHUNK_ROUTINE ();
737 g_mem_chunk_alloc (GMemChunk *mem_chunk)
742 ENTER_MEM_CHUNK_ROUTINE ();
744 g_return_val_if_fail (mem_chunk != NULL, NULL);
746 while (mem_chunk->free_atoms)
748 /* Get the first piece of memory on the "free_atoms" list.
749 * We can go ahead and destroy the list node we used to keep
750 * track of it with and to update the "free_atoms" list to
751 * point to its next element.
753 mem = mem_chunk->free_atoms;
754 mem_chunk->free_atoms = mem_chunk->free_atoms->next;
756 /* Determine which area this piece of memory is allocated from */
757 temp_area = g_tree_search (mem_chunk->mem_tree,
758 (GCompareFunc) g_mem_chunk_area_search,
761 /* If the area has been marked, then it is being destroyed.
762 * (ie marked to be destroyed).
763 * We check to see if all of the segments on the free list that
764 * reference this area have been removed. This occurs when
765 * the ammount of free memory is less than the allocatable size.
766 * If the chunk should be freed, then we place it in the "free_mem_area".
767 * This is so we make sure not to free the mem area here and then
768 * allocate it again a few lines down.
769 * If we don't allocate a chunk a few lines down then the "free_mem_area"
771 * If there is already a "free_mem_area" then we'll just free this mem area.
775 /* Update the "free" memory available in that area */
776 temp_area->free += mem_chunk->atom_size;
778 if (temp_area->free == mem_chunk->area_size)
780 if (temp_area == mem_chunk->mem_area)
781 mem_chunk->mem_area = NULL;
783 if (mem_chunk->free_mem_area)
785 mem_chunk->num_mem_areas -= 1;
788 temp_area->next->prev = temp_area->prev;
790 temp_area->prev->next = temp_area->next;
791 if (temp_area == mem_chunk->mem_areas)
792 mem_chunk->mem_areas = mem_chunk->mem_areas->next;
794 if (mem_chunk->type == G_ALLOC_AND_FREE)
795 g_tree_remove (mem_chunk->mem_tree, temp_area);
799 mem_chunk->free_mem_area = temp_area;
801 mem_chunk->num_marked_areas -= 1;
806 /* Update the number of allocated atoms count.
808 temp_area->allocated += 1;
810 /* The area wasn't marked...return the memory
816 /* If there isn't a current mem area or the current mem area is out of space
817 * then allocate a new mem area. We'll first check and see if we can use
818 * the "free_mem_area". Otherwise we'll just malloc the mem area.
820 if ((!mem_chunk->mem_area) ||
821 ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size))
823 if (mem_chunk->free_mem_area)
825 mem_chunk->mem_area = mem_chunk->free_mem_area;
826 mem_chunk->free_mem_area = NULL;
830 #ifdef ENABLE_GC_FRIENDLY
831 mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) -
833 mem_chunk->area_size);
834 #else /* !ENABLE_GC_FRIENDLY */
835 mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
837 mem_chunk->area_size);
838 #endif /* ENABLE_GC_FRIENDLY */
840 mem_chunk->num_mem_areas += 1;
841 mem_chunk->mem_area->next = mem_chunk->mem_areas;
842 mem_chunk->mem_area->prev = NULL;
844 if (mem_chunk->mem_areas)
845 mem_chunk->mem_areas->prev = mem_chunk->mem_area;
846 mem_chunk->mem_areas = mem_chunk->mem_area;
848 if (mem_chunk->type == G_ALLOC_AND_FREE)
849 g_tree_insert (mem_chunk->mem_tree, mem_chunk->mem_area, mem_chunk->mem_area);
852 mem_chunk->mem_area->index = 0;
853 mem_chunk->mem_area->free = mem_chunk->area_size;
854 mem_chunk->mem_area->allocated = 0;
855 mem_chunk->mem_area->mark = 0;
858 /* Get the memory and modify the state variables appropriately.
860 mem = (gpointer) &mem_chunk->mem_area->mem[mem_chunk->mem_area->index];
861 mem_chunk->mem_area->index += mem_chunk->atom_size;
862 mem_chunk->mem_area->free -= mem_chunk->atom_size;
863 mem_chunk->mem_area->allocated += 1;
867 LEAVE_MEM_CHUNK_ROUTINE ();
873 g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
877 mem = g_mem_chunk_alloc (mem_chunk);
880 memset (mem, 0, mem_chunk->atom_size);
887 g_mem_chunk_free (GMemChunk *mem_chunk,
891 GFreeAtom *free_atom;
893 g_return_if_fail (mem_chunk != NULL);
894 g_return_if_fail (mem != NULL);
896 ENTER_MEM_CHUNK_ROUTINE ();
898 #ifdef ENABLE_GC_FRIENDLY
899 memset (mem, 0, mem_chunk->atom_size);
900 #endif /* ENABLE_GC_FRIENDLY */
902 /* Don't do anything if this is an ALLOC_ONLY chunk
904 if (mem_chunk->type == G_ALLOC_AND_FREE)
906 /* Place the memory on the "free_atoms" list
908 free_atom = (GFreeAtom*) mem;
909 free_atom->next = mem_chunk->free_atoms;
910 mem_chunk->free_atoms = free_atom;
912 temp_area = g_tree_search (mem_chunk->mem_tree,
913 (GCompareFunc) g_mem_chunk_area_search,
916 temp_area->allocated -= 1;
918 if (temp_area->allocated == 0)
921 mem_chunk->num_marked_areas += 1;
925 LEAVE_MEM_CHUNK_ROUTINE ();
928 /* This doesn't free the free_area if there is one */
930 g_mem_chunk_clean (GMemChunk *mem_chunk)
933 GFreeAtom *prev_free_atom;
934 GFreeAtom *temp_free_atom;
937 g_return_if_fail (mem_chunk != NULL);
939 ENTER_MEM_CHUNK_ROUTINE ();
941 if (mem_chunk->type == G_ALLOC_AND_FREE)
943 prev_free_atom = NULL;
944 temp_free_atom = mem_chunk->free_atoms;
946 while (temp_free_atom)
948 mem = (gpointer) temp_free_atom;
950 mem_area = g_tree_search (mem_chunk->mem_tree,
951 (GCompareFunc) g_mem_chunk_area_search,
954 /* If this mem area is marked for destruction then delete the
955 * area and list node and decrement the free mem.
960 prev_free_atom->next = temp_free_atom->next;
962 mem_chunk->free_atoms = temp_free_atom->next;
963 temp_free_atom = temp_free_atom->next;
965 mem_area->free += mem_chunk->atom_size;
966 if (mem_area->free == mem_chunk->area_size)
968 mem_chunk->num_mem_areas -= 1;
969 mem_chunk->num_marked_areas -= 1;
972 mem_area->next->prev = mem_area->prev;
974 mem_area->prev->next = mem_area->next;
975 if (mem_area == mem_chunk->mem_areas)
976 mem_chunk->mem_areas = mem_chunk->mem_areas->next;
977 if (mem_area == mem_chunk->mem_area)
978 mem_chunk->mem_area = NULL;
980 if (mem_chunk->type == G_ALLOC_AND_FREE)
981 g_tree_remove (mem_chunk->mem_tree, mem_area);
987 prev_free_atom = temp_free_atom;
988 temp_free_atom = temp_free_atom->next;
992 LEAVE_MEM_CHUNK_ROUTINE ();
996 g_mem_chunk_reset (GMemChunk *mem_chunk)
1001 g_return_if_fail (mem_chunk != NULL);
1003 ENTER_MEM_CHUNK_ROUTINE ();
1005 mem_areas = mem_chunk->mem_areas;
1006 mem_chunk->num_mem_areas = 0;
1007 mem_chunk->mem_areas = NULL;
1008 mem_chunk->mem_area = NULL;
1012 temp_area = mem_areas;
1013 mem_areas = mem_areas->next;
1017 mem_chunk->free_atoms = NULL;
1019 if (mem_chunk->mem_tree)
1020 g_tree_destroy (mem_chunk->mem_tree);
1021 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
1023 LEAVE_MEM_CHUNK_ROUTINE ();
1027 g_mem_chunk_print (GMemChunk *mem_chunk)
1029 GMemArea *mem_areas;
1032 g_return_if_fail (mem_chunk != NULL);
1034 mem_areas = mem_chunk->mem_areas;
1039 mem += mem_chunk->area_size - mem_areas->free;
1040 mem_areas = mem_areas->next;
1043 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
1044 "%s: %ld bytes using %d mem areas",
1045 mem_chunk->name, mem, mem_chunk->num_mem_areas);
1049 g_mem_chunk_info (void)
1051 GMemChunk *mem_chunk;
1055 g_mutex_lock (mem_chunks_lock);
1056 mem_chunk = mem_chunks;
1060 mem_chunk = mem_chunk->next;
1062 g_mutex_unlock (mem_chunks_lock);
1064 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%d mem chunks", count);
1066 g_mutex_lock (mem_chunks_lock);
1067 mem_chunk = mem_chunks;
1068 g_mutex_unlock (mem_chunks_lock);
1072 g_mem_chunk_print ((GMemChunk*) mem_chunk);
1073 mem_chunk = mem_chunk->next;
1078 g_blow_chunks (void)
1080 GMemChunk *mem_chunk;
1082 g_mutex_lock (mem_chunks_lock);
1083 mem_chunk = mem_chunks;
1084 g_mutex_unlock (mem_chunks_lock);
1087 g_mem_chunk_clean ((GMemChunk*) mem_chunk);
1088 mem_chunk = mem_chunk->next;
1093 g_mem_chunk_compute_size (gulong size,
1097 gulong lower, upper;
1100 while (power_of_2 < size)
1103 lower = power_of_2 >> 1;
1106 if (size - lower < upper - size && lower >= min_size)
1113 g_mem_chunk_area_compare (GMemArea *a,
1116 if (a->mem > b->mem)
1118 else if (a->mem < b->mem)
1124 g_mem_chunk_area_search (GMemArea *a,
1129 if (addr < &a->mem[a->index])
1136 #else /* DISABLE_MEM_POOLS */
1139 guint alloc_size; /* the size of an atom */
1143 g_mem_chunk_new (const gchar *name,
1148 GMinimalMemChunk *mem_chunk;
1150 g_return_val_if_fail (atom_size > 0, NULL);
1152 mem_chunk = g_new (GMinimalMemChunk, 1);
1153 mem_chunk->alloc_size = atom_size;
1155 return ((GMemChunk*) mem_chunk);
1159 g_mem_chunk_destroy (GMemChunk *mem_chunk)
1161 g_return_if_fail (mem_chunk != NULL);
1167 g_mem_chunk_alloc (GMemChunk *mem_chunk)
1169 GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1171 g_return_val_if_fail (mem_chunk != NULL, NULL);
1173 return g_malloc (minimal->alloc_size);
1177 g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
1179 GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1181 g_return_val_if_fail (mem_chunk != NULL, NULL);
1183 return g_malloc0 (minimal->alloc_size);
1187 g_mem_chunk_free (GMemChunk *mem_chunk,
1190 g_return_if_fail (mem_chunk != NULL);
1195 void g_mem_chunk_clean (GMemChunk *mem_chunk) {}
1196 void g_mem_chunk_reset (GMemChunk *mem_chunk) {}
1197 void g_mem_chunk_print (GMemChunk *mem_chunk) {}
1198 void g_mem_chunk_info (void) {}
1199 void g_blow_chunks (void) {}
1201 #endif /* DISABLE_MEM_POOLS */
1204 /* generic allocators
1206 struct _GAllocator /* from gmem.c */
1209 guint16 n_preallocs;
1210 guint is_unused : 1;
1213 GMemChunk *mem_chunk;
1214 gpointer dummy; /* implementation specific */
1218 g_allocator_new (const gchar *name,
1221 GAllocator *allocator;
1223 g_return_val_if_fail (name != NULL, NULL);
1225 allocator = g_new0 (GAllocator, 1);
1226 allocator->name = g_strdup (name);
1227 allocator->n_preallocs = CLAMP (n_preallocs, 1, 65535);
1228 allocator->is_unused = TRUE;
1229 allocator->type = 0;
1230 allocator->last = NULL;
1231 allocator->mem_chunk = NULL;
1232 allocator->dummy = NULL;
1238 g_allocator_free (GAllocator *allocator)
1240 g_return_if_fail (allocator != NULL);
1241 g_return_if_fail (allocator->is_unused == TRUE);
1243 g_free (allocator->name);
1244 if (allocator->mem_chunk)
1245 g_mem_chunk_destroy (allocator->mem_chunk);
1253 #ifndef DISABLE_MEM_POOLS
1254 mem_chunks_lock = g_mutex_new ();
1256 #ifndef G_DISABLE_CHECKS
1257 mem_chunk_recursion = g_private_new (NULL);
1258 g_profile_mutex = g_mutex_new ();