1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
23 /* #define ENABLE_MEM_PROFILE */
24 /* #define ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS */
25 /* #define ENABLE_MEM_CHECK */
26 #define MEM_PROFILE_TABLE_SIZE 8192
29 * This library can check for some attempts to do illegal things to
30 * memory (ENABLE_MEM_CHECK), and can do profiling
31 * (ENABLE_MEM_PROFILE). Both features are implemented by storing
32 * words before the start of the memory chunk.
34 * The first, at offset -2*SIZEOF_LONG, is used only if
35 * ENABLE_MEM_CHECK is set, and stores 0 after the memory has been
36 * allocated and 1 when it has been freed. The second, at offset
37 * -SIZEOF_LONG, is used if either flag is set and stores the size of
40 * The MEM_CHECK flag is checked when memory is realloc'd and free'd,
41 * and it can be explicitly checked before using a block by calling
45 #if defined(ENABLE_MEM_PROFILE) && defined(ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS)
46 #define ENTER_MEM_CHUNK_ROUTINE() allocating_for_mem_chunk++
47 #define LEAVE_MEM_CHUNK_ROUTINE() allocating_for_mem_chunk--
49 #define ENTER_MEM_CHUNK_ROUTINE()
50 #define LEAVE_MEM_CHUNK_ROUTINE()
54 #define MAX_MEM_AREA 65536L
55 #define MEM_AREA_SIZE 4L
57 #if SIZEOF_VOID_P > SIZEOF_LONG
58 #define MEM_ALIGN SIZEOF_VOID_P
60 #define MEM_ALIGN SIZEOF_LONG
64 typedef struct _GFreeAtom GFreeAtom;
65 typedef struct _GMemArea GMemArea;
66 typedef struct _GRealMemChunk GRealMemChunk;
75 GMemArea *next; /* the next mem area */
76 GMemArea *prev; /* the previous mem area */
77 gulong index; /* the current index into the "mem" array */
78 gulong free; /* the number of free bytes in this mem area */
79 gulong allocated; /* the number of atoms allocated from this area */
80 gulong mark; /* is this mem area marked for deletion */
81 gchar mem[MEM_AREA_SIZE]; /* the mem array from which atoms get allocated
82 * the actual size of this array is determined by
83 * the mem chunk "area_size". ANSI says that it
84 * must be declared to be the maximum size it
85 * can possibly be (even though the actual size
92 gchar *name; /* name of this MemChunk...used for debugging output */
93 gint type; /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
94 gint num_mem_areas; /* the number of memory areas */
95 gint num_marked_areas; /* the number of areas marked for deletion */
96 guint atom_size; /* the size of an atom */
97 gulong area_size; /* the size of a memory area */
98 GMemArea *mem_area; /* the current memory area */
99 GMemArea *mem_areas; /* a list of all the mem areas owned by this chunk */
100 GMemArea *free_mem_area; /* the free area...which is about to be destroyed */
101 GFreeAtom *free_atoms; /* the free atoms list */
102 GTree *mem_tree; /* tree of mem areas sorted by memory address */
103 GRealMemChunk *next; /* pointer to the next chunk */
104 GRealMemChunk *prev; /* pointer to the previous chunk */
108 static gulong g_mem_chunk_compute_size (gulong size);
109 static gint g_mem_chunk_area_compare (GMemArea *a,
111 static gint g_mem_chunk_area_search (GMemArea *a,
115 static GRealMemChunk *mem_chunks = NULL;
117 #ifdef ENABLE_MEM_PROFILE
118 static gulong allocations[MEM_PROFILE_TABLE_SIZE] = { 0 };
119 static gulong allocated_mem = 0;
120 static gulong freed_mem = 0;
121 static gint allocating_for_mem_chunk = 0;
122 #endif /* ENABLE_MEM_PROFILE */
128 g_malloc (gulong size)
133 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
135 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
142 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
144 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
146 #ifdef ENABLE_MEM_CHECK
148 #endif /* ENABLE_MEM_CHECK */
151 p = (gpointer) malloc (size);
153 g_error ("could not allocate %ld bytes", size);
156 #ifdef ENABLE_MEM_CHECK
160 p = ((guchar*) p + SIZEOF_LONG);
162 #endif /* ENABLE_MEM_CHECK */
164 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
168 p = ((guchar*) p + SIZEOF_LONG);
171 #ifdef ENABLE_MEM_PROFILE
172 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
173 if(!allocating_for_mem_chunk) {
175 if (size <= MEM_PROFILE_TABLE_SIZE - 1)
176 allocations[size-1] += 1;
178 allocations[MEM_PROFILE_TABLE_SIZE - 1] += 1;
179 allocated_mem += size;
180 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
183 #endif /* ENABLE_MEM_PROFILE */
184 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
191 g_malloc0 (gulong size)
196 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
198 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
205 #if defined (ENABLE_MEM_PROFILE) || defined (ENABLE_MEM_CHECK)
207 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
209 #ifdef ENABLE_MEM_CHECK
211 #endif /* ENABLE_MEM_CHECK */
214 p = (gpointer) calloc (size, 1);
216 g_error ("could not allocate %ld bytes", size);
219 #ifdef ENABLE_MEM_CHECK
223 p = ((guchar*) p + SIZEOF_LONG);
225 #endif /* ENABLE_MEM_CHECK */
227 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
231 p = ((guchar*) p + SIZEOF_LONG);
234 # ifdef ENABLE_MEM_PROFILE
235 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
236 if(!allocating_for_mem_chunk) {
238 if (size <= (MEM_PROFILE_TABLE_SIZE - 1))
239 allocations[size-1] += 1;
241 allocations[MEM_PROFILE_TABLE_SIZE - 1] += 1;
242 allocated_mem += size;
243 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
246 # endif /* ENABLE_MEM_PROFILE */
247 #endif /* ENABLE_MEM_PROFILE */
254 g_realloc (gpointer mem,
259 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
261 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
268 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
270 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
272 #ifdef ENABLE_MEM_CHECK
274 #endif /* ENABLE_MEM_CHECK */
278 p = (gpointer) malloc (size);
281 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
282 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
283 #ifdef ENABLE_MEM_PROFILE
285 #endif /* ENABLE_MEM_PROFILE */
287 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
289 #ifdef ENABLE_MEM_CHECK
290 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
292 g_warning ("trying to realloc freed memory\n");
294 #endif /* ENABLE_MEM_CHECK */
296 p = (gpointer) realloc (mem, size);
300 g_error ("could not reallocate %lu bytes", (gulong) size);
303 #ifdef ENABLE_MEM_CHECK
307 p = ((guchar*) p + SIZEOF_LONG);
309 #endif /* ENABLE_MEM_CHECK */
311 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
315 p = ((guchar*) p + SIZEOF_LONG);
318 #ifdef ENABLE_MEM_PROFILE
319 #ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
320 if(!allocating_for_mem_chunk) {
322 if (size <= (MEM_PROFILE_TABLE_SIZE - 1))
323 allocations[size-1] += 1;
325 allocations[MEM_PROFILE_TABLE_SIZE - 1] += 1;
326 allocated_mem += size;
327 #ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
330 #endif /* ENABLE_MEM_PROFILE */
331 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
338 g_free (gpointer mem)
342 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
345 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
347 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
348 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
350 #ifdef ENABLE_MEM_PROFILE
352 #endif /* ENABLE_MEM_PROFILE */
354 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
356 #ifdef ENABLE_MEM_CHECK
357 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
359 g_warning ("freeing previously freed memory\n");
363 memset ((guchar*) mem + 8, 0, size);
364 #else /* ENABLE_MEM_CHECK */
366 #endif /* ENABLE_MEM_CHECK */
370 #endif /* ! USE_DMALLOC */
376 #ifdef ENABLE_MEM_PROFILE
379 for (i = 0; i < (MEM_PROFILE_TABLE_SIZE - 1); i++)
380 if (allocations[i] > 0)
381 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
382 "%lu allocations of %d bytes\n", allocations[i], i + 1);
384 if (allocations[MEM_PROFILE_TABLE_SIZE - 1] > 0)
385 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
386 "%lu allocations of greater than %d bytes\n",
387 allocations[MEM_PROFILE_TABLE_SIZE - 1], MEM_PROFILE_TABLE_SIZE - 1);
388 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%lu bytes allocated\n", allocated_mem);
389 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%lu bytes freed\n", freed_mem);
390 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%lu bytes in use\n", allocated_mem - freed_mem);
391 #endif /* ENABLE_MEM_PROFILE */
395 g_mem_check (gpointer mem)
397 #ifdef ENABLE_MEM_CHECK
400 t = (gulong*) ((guchar*) mem - SIZEOF_LONG - SIZEOF_LONG);
403 g_warning ("mem: 0x%08x has been freed %lu times\n", (gulong) mem, *t);
404 #endif /* ENABLE_MEM_CHECK */
408 g_mem_chunk_new (gchar *name,
413 GRealMemChunk *mem_chunk;
416 ENTER_MEM_CHUNK_ROUTINE();
418 mem_chunk = g_new (struct _GRealMemChunk, 1);
419 mem_chunk->name = name;
420 mem_chunk->type = type;
421 mem_chunk->num_mem_areas = 0;
422 mem_chunk->num_marked_areas = 0;
423 mem_chunk->mem_area = NULL;
424 mem_chunk->free_mem_area = NULL;
425 mem_chunk->free_atoms = NULL;
426 mem_chunk->mem_tree = NULL;
427 mem_chunk->mem_areas = NULL;
428 mem_chunk->atom_size = atom_size;
430 if (mem_chunk->type == G_ALLOC_AND_FREE)
431 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
433 if (mem_chunk->atom_size % MEM_ALIGN)
434 mem_chunk->atom_size += MEM_ALIGN - (mem_chunk->atom_size % MEM_ALIGN);
436 mem_chunk->area_size = area_size;
437 if (mem_chunk->area_size > MAX_MEM_AREA)
438 mem_chunk->area_size = MAX_MEM_AREA;
439 while (mem_chunk->area_size < mem_chunk->atom_size)
440 mem_chunk->area_size *= 2;
442 rarea_size = mem_chunk->area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
443 rarea_size = g_mem_chunk_compute_size (rarea_size);
444 mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
447 mem_chunk->area_size -= (sizeof (GMemArea) - MEM_AREA_SIZE);
448 if (mem_chunk->area_size < mem_chunk->atom_size)
450 mem_chunk->area_size = (mem_chunk->area_size + sizeof (GMemArea) - MEM_AREA_SIZE) * 2;
451 mem_chunk->area_size -= (sizeof (GMemArea) - MEM_AREA_SIZE);
454 if (mem_chunk->area_size % mem_chunk->atom_size)
455 mem_chunk->area_size += mem_chunk->atom_size - (mem_chunk->area_size % mem_chunk->atom_size);
458 mem_chunk->next = mem_chunks;
459 mem_chunk->prev = NULL;
461 mem_chunks->prev = mem_chunk;
462 mem_chunks = mem_chunk;
464 LEAVE_MEM_CHUNK_ROUTINE();
466 return ((GMemChunk*) mem_chunk);
470 g_mem_chunk_destroy (GMemChunk *mem_chunk)
472 GRealMemChunk *rmem_chunk;
476 g_assert (mem_chunk != NULL);
478 ENTER_MEM_CHUNK_ROUTINE();
480 rmem_chunk = (GRealMemChunk*) mem_chunk;
482 mem_areas = rmem_chunk->mem_areas;
485 temp_area = mem_areas;
486 mem_areas = mem_areas->next;
490 if (rmem_chunk->next)
491 rmem_chunk->next->prev = rmem_chunk->prev;
492 if (rmem_chunk->prev)
493 rmem_chunk->prev->next = rmem_chunk->next;
495 if (rmem_chunk == mem_chunks)
496 mem_chunks = mem_chunks->next;
498 if (rmem_chunk->type == G_ALLOC_AND_FREE)
499 g_tree_destroy (rmem_chunk->mem_tree);
503 LEAVE_MEM_CHUNK_ROUTINE();
507 g_mem_chunk_alloc (GMemChunk *mem_chunk)
509 GRealMemChunk *rmem_chunk;
513 ENTER_MEM_CHUNK_ROUTINE();
515 g_assert (mem_chunk != NULL);
517 rmem_chunk = (GRealMemChunk*) mem_chunk;
519 while (rmem_chunk->free_atoms)
521 /* Get the first piece of memory on the "free_atoms" list.
522 * We can go ahead and destroy the list node we used to keep
523 * track of it with and to update the "free_atoms" list to
524 * point to its next element.
526 mem = rmem_chunk->free_atoms;
527 rmem_chunk->free_atoms = rmem_chunk->free_atoms->next;
529 /* Determine which area this piece of memory is allocated from */
530 temp_area = g_tree_search (rmem_chunk->mem_tree,
531 (GSearchFunc) g_mem_chunk_area_search,
534 /* If the area has been marked, then it is being destroyed.
535 * (ie marked to be destroyed).
536 * We check to see if all of the segments on the free list that
537 * reference this area have been removed. This occurs when
538 * the ammount of free memory is less than the allocatable size.
539 * If the chunk should be freed, then we place it in the "free_mem_area".
540 * This is so we make sure not to free the mem area here and then
541 * allocate it again a few lines down.
542 * If we don't allocate a chunk a few lines down then the "free_mem_area"
544 * If there is already a "free_mem_area" then we'll just free this mem area.
548 /* Update the "free" memory available in that area */
549 temp_area->free += rmem_chunk->atom_size;
551 if (temp_area->free == rmem_chunk->area_size)
553 if (temp_area == rmem_chunk->mem_area)
554 rmem_chunk->mem_area = NULL;
556 if (rmem_chunk->free_mem_area)
558 rmem_chunk->num_mem_areas -= 1;
561 temp_area->next->prev = temp_area->prev;
563 temp_area->prev->next = temp_area->next;
564 if (temp_area == rmem_chunk->mem_areas)
565 rmem_chunk->mem_areas = rmem_chunk->mem_areas->next;
567 if (rmem_chunk->type == G_ALLOC_AND_FREE)
568 g_tree_remove (rmem_chunk->mem_tree, temp_area);
572 rmem_chunk->free_mem_area = temp_area;
574 rmem_chunk->num_marked_areas -= 1;
579 /* Update the number of allocated atoms count.
581 temp_area->allocated += 1;
583 /* The area wasn't marked...return the memory
589 /* If there isn't a current mem area or the current mem area is out of space
590 * then allocate a new mem area. We'll first check and see if we can use
591 * the "free_mem_area". Otherwise we'll just malloc the mem area.
593 if ((!rmem_chunk->mem_area) ||
594 ((rmem_chunk->mem_area->index + rmem_chunk->atom_size) > rmem_chunk->area_size))
596 if (rmem_chunk->free_mem_area)
598 rmem_chunk->mem_area = rmem_chunk->free_mem_area;
599 rmem_chunk->free_mem_area = NULL;
603 rmem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
605 rmem_chunk->area_size);
607 rmem_chunk->num_mem_areas += 1;
608 rmem_chunk->mem_area->next = rmem_chunk->mem_areas;
609 rmem_chunk->mem_area->prev = NULL;
611 if (rmem_chunk->mem_areas)
612 rmem_chunk->mem_areas->prev = rmem_chunk->mem_area;
613 rmem_chunk->mem_areas = rmem_chunk->mem_area;
615 if (rmem_chunk->type == G_ALLOC_AND_FREE)
616 g_tree_insert (rmem_chunk->mem_tree, rmem_chunk->mem_area, rmem_chunk->mem_area);
619 rmem_chunk->mem_area->index = 0;
620 rmem_chunk->mem_area->free = rmem_chunk->area_size;
621 rmem_chunk->mem_area->allocated = 0;
622 rmem_chunk->mem_area->mark = 0;
625 /* Get the memory and modify the state variables appropriately.
627 mem = (gpointer) &rmem_chunk->mem_area->mem[rmem_chunk->mem_area->index];
628 rmem_chunk->mem_area->index += rmem_chunk->atom_size;
629 rmem_chunk->mem_area->free -= rmem_chunk->atom_size;
630 rmem_chunk->mem_area->allocated += 1;
634 LEAVE_MEM_CHUNK_ROUTINE();
640 g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
644 mem = g_mem_chunk_alloc (mem_chunk);
647 GRealMemChunk *rmem_chunk = (GRealMemChunk*) mem_chunk;
649 memset (mem, 0, rmem_chunk->atom_size);
656 g_mem_chunk_free (GMemChunk *mem_chunk,
659 GRealMemChunk *rmem_chunk;
661 GFreeAtom *free_atom;
663 g_assert (mem_chunk != NULL);
664 g_assert (mem != NULL);
666 ENTER_MEM_CHUNK_ROUTINE();
668 rmem_chunk = (GRealMemChunk*) mem_chunk;
670 /* Don't do anything if this is an ALLOC_ONLY chunk
672 if (rmem_chunk->type == G_ALLOC_AND_FREE)
674 /* Place the memory on the "free_atoms" list
676 free_atom = (GFreeAtom*) mem;
677 free_atom->next = rmem_chunk->free_atoms;
678 rmem_chunk->free_atoms = free_atom;
680 temp_area = g_tree_search (rmem_chunk->mem_tree,
681 (GSearchFunc) g_mem_chunk_area_search,
684 temp_area->allocated -= 1;
686 if (temp_area->allocated == 0)
689 rmem_chunk->num_marked_areas += 1;
693 LEAVE_MEM_CHUNK_ROUTINE();
696 /* This doesn't free the free_area if there is one */
698 g_mem_chunk_clean (GMemChunk *mem_chunk)
700 GRealMemChunk *rmem_chunk;
702 GFreeAtom *prev_free_atom;
703 GFreeAtom *temp_free_atom;
706 g_assert (mem_chunk != NULL);
708 rmem_chunk = (GRealMemChunk*) mem_chunk;
710 if (rmem_chunk->type == G_ALLOC_AND_FREE)
712 prev_free_atom = NULL;
713 temp_free_atom = rmem_chunk->free_atoms;
715 while (temp_free_atom)
717 mem = (gpointer) temp_free_atom;
719 mem_area = g_tree_search (rmem_chunk->mem_tree,
720 (GSearchFunc) g_mem_chunk_area_search,
723 /* If this mem area is marked for destruction then delete the
724 * area and list node and decrement the free mem.
729 prev_free_atom->next = temp_free_atom->next;
731 rmem_chunk->free_atoms = temp_free_atom->next;
732 temp_free_atom = temp_free_atom->next;
734 mem_area->free += rmem_chunk->atom_size;
735 if (mem_area->free == rmem_chunk->area_size)
737 rmem_chunk->num_mem_areas -= 1;
738 rmem_chunk->num_marked_areas -= 1;
741 mem_area->next->prev = mem_area->prev;
743 mem_area->prev->next = mem_area->next;
744 if (mem_area == rmem_chunk->mem_areas)
745 rmem_chunk->mem_areas = rmem_chunk->mem_areas->next;
746 if (mem_area == rmem_chunk->mem_area)
747 rmem_chunk->mem_area = NULL;
749 if (rmem_chunk->type == G_ALLOC_AND_FREE)
750 g_tree_remove (rmem_chunk->mem_tree, mem_area);
756 prev_free_atom = temp_free_atom;
757 temp_free_atom = temp_free_atom->next;
764 g_mem_chunk_reset (GMemChunk *mem_chunk)
766 GRealMemChunk *rmem_chunk;
770 g_assert (mem_chunk != NULL);
772 rmem_chunk = (GRealMemChunk*) mem_chunk;
774 mem_areas = rmem_chunk->mem_areas;
775 rmem_chunk->num_mem_areas = 0;
776 rmem_chunk->mem_areas = NULL;
777 rmem_chunk->mem_area = NULL;
781 temp_area = mem_areas;
782 mem_areas = mem_areas->next;
786 rmem_chunk->free_atoms = NULL;
788 if (rmem_chunk->mem_tree)
789 g_tree_destroy (rmem_chunk->mem_tree);
790 rmem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
794 g_mem_chunk_print (GMemChunk *mem_chunk)
796 GRealMemChunk *rmem_chunk;
800 g_assert (mem_chunk != NULL);
802 rmem_chunk = (GRealMemChunk*) mem_chunk;
803 mem_areas = rmem_chunk->mem_areas;
808 mem += rmem_chunk->area_size - mem_areas->free;
809 mem_areas = mem_areas->next;
812 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
813 "%s: %ld bytes using %d mem areas\n",
814 rmem_chunk->name, mem, rmem_chunk->num_mem_areas);
818 g_mem_chunk_info (void)
820 GRealMemChunk *mem_chunk;
824 mem_chunk = mem_chunks;
828 mem_chunk = mem_chunk->next;
831 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%d mem chunks\n", count);
833 mem_chunk = mem_chunks;
836 g_mem_chunk_print ((GMemChunk*) mem_chunk);
837 mem_chunk = mem_chunk->next;
844 GRealMemChunk *mem_chunk;
846 mem_chunk = mem_chunks;
849 g_mem_chunk_clean ((GMemChunk*) mem_chunk);
850 mem_chunk = mem_chunk->next;
856 g_mem_chunk_compute_size (gulong size)
862 while (power_of_2 < size)
865 lower = power_of_2 >> 1;
868 if ((size - lower) < (upper - size))
874 g_mem_chunk_area_compare (GMemArea *a,
877 return (a->mem - b->mem);
881 g_mem_chunk_area_search (GMemArea *a,
886 if (addr < &a->mem[a->index])