1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
21 * Modified by the GLib Team and others 1997-2000. See the AUTHORS
22 * file for a list of people on the GLib Team. See the ChangeLog
23 * files for a list of changes. These files are distributed with
24 * GLib at ftp://ftp.gtk.org/pub/gtk/.
38 #include "gthreadinit.h"
41 * having DISABLE_MEM_POOLS defined, disables mem_chunks alltogether, their
42 * allocations are performed through ordinary g_malloc/g_free.
43 * having G_DISABLE_CHECKS defined disables use of glib_mem_profiler_table and
45 * REALLOC_0_WORKS is defined if g_realloc (NULL, x) works.
46 * SANE_MALLOC_PROTOS is defined if the systems malloc() and friends functions
47 * match the corresponding GLib prototypes, keep configure.in and gmem.h in sync here.
48 * if ENABLE_GC_FRIENDLY is defined, freed memory should be 0-wiped.
51 #define MEM_PROFILE_TABLE_SIZE 4096
53 #define MEM_AREA_SIZE 4L
55 #ifdef G_DISABLE_CHECKS
56 # define ENTER_MEM_CHUNK_ROUTINE()
57 # define LEAVE_MEM_CHUNK_ROUTINE()
58 # define IN_MEM_CHUNK_ROUTINE() FALSE
59 #else /* !G_DISABLE_CHECKS */
60 static GPrivate* mem_chunk_recursion = NULL;
61 # define MEM_CHUNK_ROUTINE_COUNT() GPOINTER_TO_UINT (g_private_get (mem_chunk_recursion))
62 # define ENTER_MEM_CHUNK_ROUTINE() g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () + 1))
63 # define LEAVE_MEM_CHUNK_ROUTINE() g_private_set (mem_chunk_recursion, GUINT_TO_POINTER (MEM_CHUNK_ROUTINE_COUNT () - 1))
64 #endif /* !G_DISABLE_CHECKS */
66 #ifndef REALLOC_0_WORKS
68 standard_realloc (gpointer mem,
72 return malloc (n_bytes);
74 return realloc (mem, n_bytes);
76 #endif /* !REALLOC_0_WORKS */
78 #ifdef SANE_MALLOC_PROTOS
79 # define standard_malloc malloc
80 # ifdef REALLOC_0_WORKS
81 # define standard_realloc realloc
82 # endif /* REALLOC_0_WORKS */
83 # define standard_free free
84 # define standard_calloc calloc
85 # define standard_try_malloc malloc
86 # define standard_try_realloc realloc
87 #else /* !SANE_MALLOC_PROTOS */
89 standard_malloc (gsize n_bytes)
91 return malloc (n_bytes);
93 # ifdef REALLOC_0_WORKS
95 standard_realloc (gpointer mem,
98 return realloc (mem, n_bytes);
100 # endif /* REALLOC_0_WORKS */
102 standard_free (gpointer mem)
107 standard_calloc (gsize n_blocks,
110 return calloc (n_blocks, n_bytes);
112 #define standard_try_malloc standard_malloc
113 #define standard_try_realloc standard_realloc
114 #endif /* !SANE_MALLOC_PROTOS */
117 /* --- variables --- */
118 static GMemVTable glib_mem_vtable = {
124 standard_try_realloc,
128 /* --- functions --- */
130 g_malloc (gulong n_bytes)
136 mem = glib_mem_vtable.malloc (n_bytes);
140 g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
147 g_malloc0 (gulong n_bytes)
153 mem = glib_mem_vtable.calloc (1, n_bytes);
157 g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
164 g_realloc (gpointer mem,
169 mem = glib_mem_vtable.realloc (mem, n_bytes);
173 g_error ("%s: failed to allocate %lu bytes", G_STRLOC, n_bytes);
177 glib_mem_vtable.free (mem);
183 g_free (gpointer mem)
186 glib_mem_vtable.free (mem);
190 g_try_malloc (gulong n_bytes)
193 return glib_mem_vtable.try_malloc (n_bytes);
199 g_try_realloc (gpointer mem,
203 return glib_mem_vtable.try_realloc (mem, n_bytes);
206 glib_mem_vtable.free (mem);
212 fallback_calloc (gsize n_blocks,
215 gsize l = n_blocks * n_block_bytes;
216 gpointer mem = glib_mem_vtable.malloc (l);
224 static gboolean vtable_set = FALSE;
227 * g_mem_is_system_malloc
229 * Checks whether the allocator used by g_malloc() is the system's
230 * malloc implementation. If it returns %TRUE memory allocated with
231 * malloc() can be used interchangeable with memory allocated using g_malloc().
232 * This function is useful for avoiding an extra copy of allocated memory returned
233 * by a non-GLib-based API.
235 * A different allocator can be set using g_mem_set_vtable().
237 * Return value: if %TRUE, malloc() and g_malloc() can be mixed.
240 g_mem_is_system_malloc (void)
246 g_mem_set_vtable (GMemVTable *vtable)
251 if (vtable->malloc && vtable->realloc && vtable->free)
253 glib_mem_vtable.malloc = vtable->malloc;
254 glib_mem_vtable.realloc = vtable->realloc;
255 glib_mem_vtable.free = vtable->free;
256 glib_mem_vtable.calloc = vtable->calloc ? vtable->calloc : fallback_calloc;
257 glib_mem_vtable.try_malloc = vtable->try_malloc ? vtable->try_malloc : glib_mem_vtable.malloc;
258 glib_mem_vtable.try_realloc = vtable->try_realloc ? vtable->try_realloc : glib_mem_vtable.realloc;
261 g_warning (G_STRLOC ": memory allocation vtable lacks one of malloc(), realloc() or free()");
264 g_warning (G_STRLOC ": memory allocation vtable can only be set once at startup");
268 /* --- memory profiling and checking --- */
269 #ifdef G_DISABLE_CHECKS
270 GMemVTable *glib_mem_profiler_table = &glib_mem_vtable;
275 #else /* !G_DISABLE_CHECKS */
282 static guint *profile_data = NULL;
283 static gulong profile_allocs = 0;
284 static gulong profile_mc_allocs = 0;
285 static gulong profile_zinit = 0;
286 static gulong profile_frees = 0;
287 static gulong profile_mc_frees = 0;
288 static GMutex *g_profile_mutex = NULL;
289 #ifdef G_ENABLE_DEBUG
290 static volatile gulong g_trap_free_size = 0;
291 static volatile gulong g_trap_realloc_size = 0;
292 static volatile gulong g_trap_malloc_size = 0;
293 #endif /* G_ENABLE_DEBUG */
295 #define PROFILE_TABLE(f1,f2,f3) ( ( ((f3) << 2) | ((f2) << 1) | (f1) ) * (MEM_PROFILE_TABLE_SIZE + 1))
298 profiler_log (ProfilerJob job,
302 g_mutex_lock (g_profile_mutex);
305 profile_data = standard_malloc ((MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
306 if (!profile_data) /* memory system kiddin' me, eh? */
308 g_mutex_unlock (g_profile_mutex);
313 if (MEM_CHUNK_ROUTINE_COUNT () == 0)
315 if (n_bytes < MEM_PROFILE_TABLE_SIZE)
316 profile_data[n_bytes + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
317 (job & PROFILER_RELOC) != 0,
320 profile_data[MEM_PROFILE_TABLE_SIZE + PROFILE_TABLE ((job & PROFILER_ALLOC) != 0,
321 (job & PROFILER_RELOC) != 0,
325 if (job & PROFILER_ALLOC)
327 profile_allocs += n_bytes;
328 if (job & PROFILER_ZINIT)
329 profile_zinit += n_bytes;
332 profile_frees += n_bytes;
337 if (job & PROFILER_ALLOC)
338 profile_mc_allocs += n_bytes;
340 profile_mc_frees += n_bytes;
342 g_mutex_unlock (g_profile_mutex);
346 profile_print_locked (guint *local_data,
349 gboolean need_header = TRUE;
352 for (i = 0; i <= MEM_PROFILE_TABLE_SIZE; i++)
354 glong t_malloc = local_data[i + PROFILE_TABLE (1, 0, success)];
355 glong t_realloc = local_data[i + PROFILE_TABLE (1, 1, success)];
356 glong t_free = local_data[i + PROFILE_TABLE (0, 0, success)];
357 glong t_refree = local_data[i + PROFILE_TABLE (0, 1, success)];
359 if (!t_malloc && !t_realloc && !t_free && !t_refree)
361 else if (need_header)
364 g_print (" blocks of | allocated | freed | allocated | freed | n_bytes \n");
365 g_print (" n_bytes | n_times by | n_times by | n_times by | n_times by | remaining \n");
366 g_print (" | malloc() | free() | realloc() | realloc() | \n");
367 g_print ("===========|============|============|============|============|===========\n");
369 if (i < MEM_PROFILE_TABLE_SIZE)
370 g_print ("%10u | %10ld | %10ld | %10ld | %10ld |%+11ld\n",
371 i, t_malloc, t_free, t_realloc, t_refree,
372 (t_malloc - t_free + t_realloc - t_refree) * i);
373 else if (i >= MEM_PROFILE_TABLE_SIZE)
374 g_print (" >%6u | %10ld | %10ld | %10ld | %10ld | ***\n",
375 i, t_malloc, t_free, t_realloc, t_refree);
378 g_print (" --- none ---\n");
384 guint local_data[(MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0])];
388 gulong local_mc_allocs;
389 gulong local_mc_frees;
391 g_mutex_lock (g_profile_mutex);
393 local_allocs = profile_allocs;
394 local_zinit = profile_zinit;
395 local_frees = profile_frees;
396 local_mc_allocs = profile_mc_allocs;
397 local_mc_frees = profile_mc_frees;
401 g_mutex_unlock (g_profile_mutex);
405 memcpy (local_data, profile_data,
406 (MEM_PROFILE_TABLE_SIZE + 1) * 8 * sizeof (profile_data[0]));
408 g_mutex_unlock (g_profile_mutex);
410 g_print ("GLib Memory statistics (successful operations):\n");
411 profile_print_locked (local_data, TRUE);
412 g_print ("GLib Memory statistics (failing operations):\n");
413 profile_print_locked (local_data, FALSE);
414 g_print ("Total bytes: allocated=%lu, zero-initialized=%lu (%.2f%%), freed=%lu (%.2f%%), remaining=%lu\n",
417 ((gdouble) local_zinit) / local_allocs * 100.0,
419 ((gdouble) local_frees) / local_allocs * 100.0,
420 local_allocs - local_frees);
421 g_print ("MemChunk bytes: allocated=%lu, freed=%lu (%.2f%%), remaining=%lu\n",
424 ((gdouble) local_mc_frees) / local_mc_allocs * 100.0,
425 local_mc_allocs - local_mc_frees);
429 profiler_try_malloc (gsize n_bytes)
433 #ifdef G_ENABLE_DEBUG
434 if (g_trap_malloc_size == n_bytes)
436 #endif /* G_ENABLE_DEBUG */
438 p = standard_malloc (sizeof (gulong) * 2 + n_bytes);
442 p[0] = 0; /* free count */
443 p[1] = n_bytes; /* length */
444 profiler_log (PROFILER_ALLOC, n_bytes, TRUE);
448 profiler_log (PROFILER_ALLOC, n_bytes, FALSE);
454 profiler_malloc (gsize n_bytes)
456 gpointer mem = profiler_try_malloc (n_bytes);
465 profiler_calloc (gsize n_blocks,
468 gsize l = n_blocks * n_block_bytes;
471 #ifdef G_ENABLE_DEBUG
472 if (g_trap_malloc_size == l)
474 #endif /* G_ENABLE_DEBUG */
476 p = standard_calloc (1, sizeof (gulong) * 2 + l);
480 p[0] = 0; /* free count */
481 p[1] = l; /* length */
482 profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, TRUE);
487 profiler_log (PROFILER_ALLOC | PROFILER_ZINIT, l, FALSE);
495 profiler_free (gpointer mem)
500 if (p[0]) /* free count */
502 g_warning ("free(%p): memory has been freed %lu times already", p + 2, p[0]);
503 profiler_log (PROFILER_FREE,
509 #ifdef G_ENABLE_DEBUG
510 if (g_trap_free_size == p[1])
512 #endif /* G_ENABLE_DEBUG */
514 profiler_log (PROFILER_FREE,
517 memset (p + 2, 0xaa, p[1]);
519 /* for all those that miss standard_free (p); in this place, yes,
520 * we do leak all memory when profiling, and that is intentional
521 * to catch double frees. patch submissions are futile.
528 profiler_try_realloc (gpointer mem,
535 #ifdef G_ENABLE_DEBUG
536 if (g_trap_realloc_size == n_bytes)
538 #endif /* G_ENABLE_DEBUG */
540 if (mem && p[0]) /* free count */
542 g_warning ("realloc(%p, %lu): memory has been freed %lu times already", p + 2, (gulong)n_bytes, p[0]);
543 profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
549 p = standard_realloc (mem ? p : NULL, sizeof (gulong) * 2 + n_bytes);
554 profiler_log (PROFILER_FREE | PROFILER_RELOC, p[1], TRUE);
557 profiler_log (PROFILER_ALLOC | PROFILER_RELOC, p[1], TRUE);
561 profiler_log (PROFILER_ALLOC | PROFILER_RELOC, n_bytes, FALSE);
568 profiler_realloc (gpointer mem,
571 mem = profiler_try_realloc (mem, n_bytes);
579 static GMemVTable profiler_table = {
585 profiler_try_realloc,
587 GMemVTable *glib_mem_profiler_table = &profiler_table;
589 #endif /* !G_DISABLE_CHECKS */
592 /* --- MemChunks --- */
593 typedef struct _GFreeAtom GFreeAtom;
594 typedef struct _GMemArea GMemArea;
603 GMemArea *next; /* the next mem area */
604 GMemArea *prev; /* the previous mem area */
605 gulong index; /* the current index into the "mem" array */
606 gulong free; /* the number of free bytes in this mem area */
607 gulong allocated; /* the number of atoms allocated from this area */
608 gulong mark; /* is this mem area marked for deletion */
609 gchar mem[MEM_AREA_SIZE]; /* the mem array from which atoms get allocated
610 * the actual size of this array is determined by
611 * the mem chunk "area_size". ANSI says that it
612 * must be declared to be the maximum size it
613 * can possibly be (even though the actual size
620 const gchar *name; /* name of this MemChunk...used for debugging output */
621 gint type; /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
622 gint num_mem_areas; /* the number of memory areas */
623 gint num_marked_areas; /* the number of areas marked for deletion */
624 guint atom_size; /* the size of an atom */
625 gulong area_size; /* the size of a memory area */
626 GMemArea *mem_area; /* the current memory area */
627 GMemArea *mem_areas; /* a list of all the mem areas owned by this chunk */
628 GMemArea *free_mem_area; /* the free area...which is about to be destroyed */
629 GFreeAtom *free_atoms; /* the free atoms list */
630 GTree *mem_tree; /* tree of mem areas sorted by memory address */
631 GMemChunk *next; /* pointer to the next chunk */
632 GMemChunk *prev; /* pointer to the previous chunk */
636 #ifndef DISABLE_MEM_POOLS
637 static gulong g_mem_chunk_compute_size (gulong size,
638 gulong min_size) G_GNUC_CONST;
639 static gint g_mem_chunk_area_compare (GMemArea *a,
641 static gint g_mem_chunk_area_search (GMemArea *a,
644 /* here we can't use StaticMutexes, as they depend upon a working
645 * g_malloc, the same holds true for StaticPrivate
647 static GMutex *mem_chunks_lock = NULL;
648 static GMemChunk *mem_chunks = NULL;
651 g_mem_chunk_new (const gchar *name,
656 GMemChunk *mem_chunk;
659 g_return_val_if_fail (atom_size > 0, NULL);
660 g_return_val_if_fail (area_size >= atom_size, NULL);
662 ENTER_MEM_CHUNK_ROUTINE ();
664 area_size = (area_size + atom_size - 1) / atom_size;
665 area_size *= atom_size;
667 mem_chunk = g_new (GMemChunk, 1);
668 mem_chunk->name = name;
669 mem_chunk->type = type;
670 mem_chunk->num_mem_areas = 0;
671 mem_chunk->num_marked_areas = 0;
672 mem_chunk->mem_area = NULL;
673 mem_chunk->free_mem_area = NULL;
674 mem_chunk->free_atoms = NULL;
675 mem_chunk->mem_tree = NULL;
676 mem_chunk->mem_areas = NULL;
677 mem_chunk->atom_size = atom_size;
679 if (mem_chunk->type == G_ALLOC_AND_FREE)
680 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
682 if (mem_chunk->atom_size % G_MEM_ALIGN)
683 mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN);
685 rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
686 rarea_size = g_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE);
687 mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
689 g_mutex_lock (mem_chunks_lock);
690 mem_chunk->next = mem_chunks;
691 mem_chunk->prev = NULL;
693 mem_chunks->prev = mem_chunk;
694 mem_chunks = mem_chunk;
695 g_mutex_unlock (mem_chunks_lock);
697 LEAVE_MEM_CHUNK_ROUTINE ();
703 g_mem_chunk_destroy (GMemChunk *mem_chunk)
708 g_return_if_fail (mem_chunk != NULL);
710 ENTER_MEM_CHUNK_ROUTINE ();
712 mem_areas = mem_chunk->mem_areas;
715 temp_area = mem_areas;
716 mem_areas = mem_areas->next;
720 g_mutex_lock (mem_chunks_lock);
722 mem_chunk->next->prev = mem_chunk->prev;
724 mem_chunk->prev->next = mem_chunk->next;
726 if (mem_chunk == mem_chunks)
727 mem_chunks = mem_chunks->next;
728 g_mutex_unlock (mem_chunks_lock);
730 if (mem_chunk->type == G_ALLOC_AND_FREE)
731 g_tree_destroy (mem_chunk->mem_tree);
735 LEAVE_MEM_CHUNK_ROUTINE ();
739 g_mem_chunk_alloc (GMemChunk *mem_chunk)
744 ENTER_MEM_CHUNK_ROUTINE ();
746 g_return_val_if_fail (mem_chunk != NULL, NULL);
748 while (mem_chunk->free_atoms)
750 /* Get the first piece of memory on the "free_atoms" list.
751 * We can go ahead and destroy the list node we used to keep
752 * track of it with and to update the "free_atoms" list to
753 * point to its next element.
755 mem = mem_chunk->free_atoms;
756 mem_chunk->free_atoms = mem_chunk->free_atoms->next;
758 /* Determine which area this piece of memory is allocated from */
759 temp_area = g_tree_search (mem_chunk->mem_tree,
760 (GCompareFunc) g_mem_chunk_area_search,
763 /* If the area has been marked, then it is being destroyed.
764 * (ie marked to be destroyed).
765 * We check to see if all of the segments on the free list that
766 * reference this area have been removed. This occurs when
767 * the ammount of free memory is less than the allocatable size.
768 * If the chunk should be freed, then we place it in the "free_mem_area".
769 * This is so we make sure not to free the mem area here and then
770 * allocate it again a few lines down.
771 * If we don't allocate a chunk a few lines down then the "free_mem_area"
773 * If there is already a "free_mem_area" then we'll just free this mem area.
777 /* Update the "free" memory available in that area */
778 temp_area->free += mem_chunk->atom_size;
780 if (temp_area->free == mem_chunk->area_size)
782 if (temp_area == mem_chunk->mem_area)
783 mem_chunk->mem_area = NULL;
785 if (mem_chunk->free_mem_area)
787 mem_chunk->num_mem_areas -= 1;
790 temp_area->next->prev = temp_area->prev;
792 temp_area->prev->next = temp_area->next;
793 if (temp_area == mem_chunk->mem_areas)
794 mem_chunk->mem_areas = mem_chunk->mem_areas->next;
796 if (mem_chunk->type == G_ALLOC_AND_FREE)
797 g_tree_remove (mem_chunk->mem_tree, temp_area);
801 mem_chunk->free_mem_area = temp_area;
803 mem_chunk->num_marked_areas -= 1;
808 /* Update the number of allocated atoms count.
810 temp_area->allocated += 1;
812 /* The area wasn't marked...return the memory
818 /* If there isn't a current mem area or the current mem area is out of space
819 * then allocate a new mem area. We'll first check and see if we can use
820 * the "free_mem_area". Otherwise we'll just malloc the mem area.
822 if ((!mem_chunk->mem_area) ||
823 ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size))
825 if (mem_chunk->free_mem_area)
827 mem_chunk->mem_area = mem_chunk->free_mem_area;
828 mem_chunk->free_mem_area = NULL;
832 #ifdef ENABLE_GC_FRIENDLY
833 mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) -
835 mem_chunk->area_size);
836 #else /* !ENABLE_GC_FRIENDLY */
837 mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
839 mem_chunk->area_size);
840 #endif /* ENABLE_GC_FRIENDLY */
842 mem_chunk->num_mem_areas += 1;
843 mem_chunk->mem_area->next = mem_chunk->mem_areas;
844 mem_chunk->mem_area->prev = NULL;
846 if (mem_chunk->mem_areas)
847 mem_chunk->mem_areas->prev = mem_chunk->mem_area;
848 mem_chunk->mem_areas = mem_chunk->mem_area;
850 if (mem_chunk->type == G_ALLOC_AND_FREE)
851 g_tree_insert (mem_chunk->mem_tree, mem_chunk->mem_area, mem_chunk->mem_area);
854 mem_chunk->mem_area->index = 0;
855 mem_chunk->mem_area->free = mem_chunk->area_size;
856 mem_chunk->mem_area->allocated = 0;
857 mem_chunk->mem_area->mark = 0;
860 /* Get the memory and modify the state variables appropriately.
862 mem = (gpointer) &mem_chunk->mem_area->mem[mem_chunk->mem_area->index];
863 mem_chunk->mem_area->index += mem_chunk->atom_size;
864 mem_chunk->mem_area->free -= mem_chunk->atom_size;
865 mem_chunk->mem_area->allocated += 1;
869 LEAVE_MEM_CHUNK_ROUTINE ();
875 g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
879 mem = g_mem_chunk_alloc (mem_chunk);
882 memset (mem, 0, mem_chunk->atom_size);
889 g_mem_chunk_free (GMemChunk *mem_chunk,
893 GFreeAtom *free_atom;
895 g_return_if_fail (mem_chunk != NULL);
896 g_return_if_fail (mem != NULL);
898 ENTER_MEM_CHUNK_ROUTINE ();
900 #ifdef ENABLE_GC_FRIENDLY
901 memset (mem, 0, mem_chunk->atom_size);
902 #endif /* ENABLE_GC_FRIENDLY */
904 /* Don't do anything if this is an ALLOC_ONLY chunk
906 if (mem_chunk->type == G_ALLOC_AND_FREE)
908 /* Place the memory on the "free_atoms" list
910 free_atom = (GFreeAtom*) mem;
911 free_atom->next = mem_chunk->free_atoms;
912 mem_chunk->free_atoms = free_atom;
914 temp_area = g_tree_search (mem_chunk->mem_tree,
915 (GCompareFunc) g_mem_chunk_area_search,
918 temp_area->allocated -= 1;
920 if (temp_area->allocated == 0)
923 mem_chunk->num_marked_areas += 1;
927 LEAVE_MEM_CHUNK_ROUTINE ();
930 /* This doesn't free the free_area if there is one */
932 g_mem_chunk_clean (GMemChunk *mem_chunk)
935 GFreeAtom *prev_free_atom;
936 GFreeAtom *temp_free_atom;
939 g_return_if_fail (mem_chunk != NULL);
941 ENTER_MEM_CHUNK_ROUTINE ();
943 if (mem_chunk->type == G_ALLOC_AND_FREE)
945 prev_free_atom = NULL;
946 temp_free_atom = mem_chunk->free_atoms;
948 while (temp_free_atom)
950 mem = (gpointer) temp_free_atom;
952 mem_area = g_tree_search (mem_chunk->mem_tree,
953 (GCompareFunc) g_mem_chunk_area_search,
956 /* If this mem area is marked for destruction then delete the
957 * area and list node and decrement the free mem.
962 prev_free_atom->next = temp_free_atom->next;
964 mem_chunk->free_atoms = temp_free_atom->next;
965 temp_free_atom = temp_free_atom->next;
967 mem_area->free += mem_chunk->atom_size;
968 if (mem_area->free == mem_chunk->area_size)
970 mem_chunk->num_mem_areas -= 1;
971 mem_chunk->num_marked_areas -= 1;
974 mem_area->next->prev = mem_area->prev;
976 mem_area->prev->next = mem_area->next;
977 if (mem_area == mem_chunk->mem_areas)
978 mem_chunk->mem_areas = mem_chunk->mem_areas->next;
979 if (mem_area == mem_chunk->mem_area)
980 mem_chunk->mem_area = NULL;
982 if (mem_chunk->type == G_ALLOC_AND_FREE)
983 g_tree_remove (mem_chunk->mem_tree, mem_area);
989 prev_free_atom = temp_free_atom;
990 temp_free_atom = temp_free_atom->next;
994 LEAVE_MEM_CHUNK_ROUTINE ();
998 g_mem_chunk_reset (GMemChunk *mem_chunk)
1000 GMemArea *mem_areas;
1001 GMemArea *temp_area;
1003 g_return_if_fail (mem_chunk != NULL);
1005 ENTER_MEM_CHUNK_ROUTINE ();
1007 mem_areas = mem_chunk->mem_areas;
1008 mem_chunk->num_mem_areas = 0;
1009 mem_chunk->mem_areas = NULL;
1010 mem_chunk->mem_area = NULL;
1014 temp_area = mem_areas;
1015 mem_areas = mem_areas->next;
1019 mem_chunk->free_atoms = NULL;
1021 if (mem_chunk->mem_tree)
1023 g_tree_destroy (mem_chunk->mem_tree);
1024 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
1027 LEAVE_MEM_CHUNK_ROUTINE ();
1031 g_mem_chunk_print (GMemChunk *mem_chunk)
1033 GMemArea *mem_areas;
1036 g_return_if_fail (mem_chunk != NULL);
1038 mem_areas = mem_chunk->mem_areas;
1043 mem += mem_chunk->area_size - mem_areas->free;
1044 mem_areas = mem_areas->next;
1047 g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO,
1048 "%s: %ld bytes using %d mem areas",
1049 mem_chunk->name, mem, mem_chunk->num_mem_areas);
1053 g_mem_chunk_info (void)
1055 GMemChunk *mem_chunk;
1059 g_mutex_lock (mem_chunks_lock);
1060 mem_chunk = mem_chunks;
1064 mem_chunk = mem_chunk->next;
1066 g_mutex_unlock (mem_chunks_lock);
1068 g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%d mem chunks", count);
1070 g_mutex_lock (mem_chunks_lock);
1071 mem_chunk = mem_chunks;
1072 g_mutex_unlock (mem_chunks_lock);
1076 g_mem_chunk_print ((GMemChunk*) mem_chunk);
1077 mem_chunk = mem_chunk->next;
1082 g_blow_chunks (void)
1084 GMemChunk *mem_chunk;
1086 g_mutex_lock (mem_chunks_lock);
1087 mem_chunk = mem_chunks;
1088 g_mutex_unlock (mem_chunks_lock);
1091 g_mem_chunk_clean ((GMemChunk*) mem_chunk);
1092 mem_chunk = mem_chunk->next;
1097 g_mem_chunk_compute_size (gulong size,
1101 gulong lower, upper;
1104 while (power_of_2 < size)
1107 lower = power_of_2 >> 1;
1110 if (size - lower < upper - size && lower >= min_size)
1117 g_mem_chunk_area_compare (GMemArea *a,
1120 if (a->mem > b->mem)
1122 else if (a->mem < b->mem)
1128 g_mem_chunk_area_search (GMemArea *a,
1133 if (addr < &a->mem[a->index])
1140 #else /* DISABLE_MEM_POOLS */
1143 guint alloc_size; /* the size of an atom */
1147 g_mem_chunk_new (const gchar *name,
1152 GMinimalMemChunk *mem_chunk;
1154 g_return_val_if_fail (atom_size > 0, NULL);
1156 mem_chunk = g_new (GMinimalMemChunk, 1);
1157 mem_chunk->alloc_size = atom_size;
1159 return ((GMemChunk*) mem_chunk);
1163 g_mem_chunk_destroy (GMemChunk *mem_chunk)
1165 g_return_if_fail (mem_chunk != NULL);
1171 g_mem_chunk_alloc (GMemChunk *mem_chunk)
1173 GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1175 g_return_val_if_fail (mem_chunk != NULL, NULL);
1177 return g_malloc (minimal->alloc_size);
1181 g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
1183 GMinimalMemChunk *minimal = (GMinimalMemChunk *)mem_chunk;
1185 g_return_val_if_fail (mem_chunk != NULL, NULL);
1187 return g_malloc0 (minimal->alloc_size);
1191 g_mem_chunk_free (GMemChunk *mem_chunk,
1194 g_return_if_fail (mem_chunk != NULL);
1199 void g_mem_chunk_clean (GMemChunk *mem_chunk) {}
1200 void g_mem_chunk_reset (GMemChunk *mem_chunk) {}
1201 void g_mem_chunk_print (GMemChunk *mem_chunk) {}
1202 void g_mem_chunk_info (void) {}
1203 void g_blow_chunks (void) {}
1205 #endif /* DISABLE_MEM_POOLS */
1208 /* generic allocators
1210 struct _GAllocator /* from gmem.c */
1213 guint16 n_preallocs;
1214 guint is_unused : 1;
1217 GMemChunk *mem_chunk;
1218 gpointer dummy; /* implementation specific */
1222 g_allocator_new (const gchar *name,
1225 GAllocator *allocator;
1227 g_return_val_if_fail (name != NULL, NULL);
1229 allocator = g_new0 (GAllocator, 1);
1230 allocator->name = g_strdup (name);
1231 allocator->n_preallocs = CLAMP (n_preallocs, 1, 65535);
1232 allocator->is_unused = TRUE;
1233 allocator->type = 0;
1234 allocator->last = NULL;
1235 allocator->mem_chunk = NULL;
1236 allocator->dummy = NULL;
1242 g_allocator_free (GAllocator *allocator)
1244 g_return_if_fail (allocator != NULL);
1245 g_return_if_fail (allocator->is_unused == TRUE);
1247 g_free (allocator->name);
1248 if (allocator->mem_chunk)
1249 g_mem_chunk_destroy (allocator->mem_chunk);
1255 _g_mem_thread_init (void)
1257 #ifndef DISABLE_MEM_POOLS
1258 mem_chunks_lock = g_mutex_new ();
1260 #ifndef G_DISABLE_CHECKS
1261 g_profile_mutex = g_mutex_new ();
1266 _g_mem_thread_private_init (void)
1268 #ifndef G_DISABLE_CHECKS
1269 g_assert (mem_chunk_recursion == NULL);
1270 mem_chunk_recursion = g_private_new (NULL);