1 /* GLIB - Library of useful routines for C programming
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
17 * Boston, MA 02111-1307, USA.
23 /* #define ENABLE_MEM_PROFILE */
24 /* #define ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS */
25 /* #define ENABLE_MEM_CHECK */
28 * This library can check for some attempts to do illegal things to
29 * memory (ENABLE_MEM_CHECK), and can do profiling
30 * (ENABLE_MEM_PROFILE). Both features are implemented by storing
31 * words before the start of the memory chunk.
33 * The first, at offset -2*SIZEOF_LONG, is used only if
34 * ENABLE_MEM_CHECK is set, and stores 0 after the memory has been
35 * allocated and 1 when it has been freed. The second, at offset
36 * -SIZEOF_LONG, is used if either flag is set and stores the size of
39 * The MEM_CHECK flag is checked when memory is realloc'd and free'd,
40 * and it can be explicitly checked before using a block by calling
44 #if defined(ENABLE_MEM_PROFILE) && defined(ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS)
45 #define ENTER_MEM_CHUNK_ROUTINE() allocating_for_mem_chunk++
46 #define LEAVE_MEM_CHUNK_ROUTINE() allocating_for_mem_chunk--
48 #define ENTER_MEM_CHUNK_ROUTINE()
49 #define LEAVE_MEM_CHUNK_ROUTINE()
53 #define MAX_MEM_AREA 65536L
54 #define MEM_AREA_SIZE 4L
56 #if SIZEOF_VOID_P > SIZEOF_LONG
57 #define MEM_ALIGN SIZEOF_VOID_P
59 #define MEM_ALIGN SIZEOF_LONG
63 typedef struct _GFreeAtom GFreeAtom;
64 typedef struct _GMemArea GMemArea;
65 typedef struct _GRealMemChunk GRealMemChunk;
74 GMemArea *next; /* the next mem area */
75 GMemArea *prev; /* the previous mem area */
76 gulong index; /* the current index into the "mem" array */
77 gulong free; /* the number of free bytes in this mem area */
78 gulong allocated; /* the number of atoms allocated from this area */
79 gulong mark; /* is this mem area marked for deletion */
80 gchar mem[MEM_AREA_SIZE]; /* the mem array from which atoms get allocated
81 * the actual size of this array is determined by
82 * the mem chunk "area_size". ANSI says that it
83 * must be declared to be the maximum size it
84 * can possibly be (even though the actual size
91 gchar *name; /* name of this MemChunk...used for debugging output */
92 gint type; /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */
93 gint num_mem_areas; /* the number of memory areas */
94 gint num_marked_areas; /* the number of areas marked for deletion */
95 guint atom_size; /* the size of an atom */
96 gulong area_size; /* the size of a memory area */
97 GMemArea *mem_area; /* the current memory area */
98 GMemArea *mem_areas; /* a list of all the mem areas owned by this chunk */
99 GMemArea *free_mem_area; /* the free area...which is about to be destroyed */
100 GFreeAtom *free_atoms; /* the free atoms list */
101 GTree *mem_tree; /* tree of mem areas sorted by memory address */
102 GRealMemChunk *next; /* pointer to the next chunk */
103 GRealMemChunk *prev; /* pointer to the previous chunk */
107 static gulong g_mem_chunk_compute_size (gulong size);
108 static gint g_mem_chunk_area_compare (GMemArea *a,
110 static gint g_mem_chunk_area_search (GMemArea *a,
114 static GRealMemChunk *mem_chunks = NULL;
116 #ifdef ENABLE_MEM_PROFILE
117 static gulong allocations[4096] = { 0 };
118 static gulong allocated_mem = 0;
119 static gulong freed_mem = 0;
120 static gint allocating_for_mem_chunk = 0;
121 #endif /* ENABLE_MEM_PROFILE */
127 g_malloc (gulong size)
132 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
134 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
141 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
143 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
145 #ifdef ENABLE_MEM_CHECK
147 #endif /* ENABLE_MEM_CHECK */
150 p = (gpointer) malloc (size);
152 g_error ("could not allocate %ld bytes", size);
155 #ifdef ENABLE_MEM_CHECK
159 p = ((guchar*) p + SIZEOF_LONG);
161 #endif /* ENABLE_MEM_CHECK */
163 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
167 p = ((guchar*) p + SIZEOF_LONG);
170 #ifdef ENABLE_MEM_PROFILE
171 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
172 if(!allocating_for_mem_chunk) {
175 allocations[size-1] += 1;
177 allocations[4095] += 1;
178 allocated_mem += size;
179 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
182 #endif /* ENABLE_MEM_PROFILE */
183 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
190 g_malloc0 (gulong size)
195 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
197 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
204 #if defined (ENABLE_MEM_PROFILE) || defined (ENABLE_MEM_CHECK)
206 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
208 #ifdef ENABLE_MEM_CHECK
210 #endif /* ENABLE_MEM_CHECK */
213 p = (gpointer) calloc (size, 1);
215 g_error ("could not allocate %ld bytes", size);
218 #ifdef ENABLE_MEM_CHECK
222 p = ((guchar*) p + SIZEOF_LONG);
224 #endif /* ENABLE_MEM_CHECK */
226 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
230 p = ((guchar*) p + SIZEOF_LONG);
233 # ifdef ENABLE_MEM_PROFILE
234 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
235 if(!allocating_for_mem_chunk) {
238 allocations[size-1] += 1;
240 allocations[4095] += 1;
241 allocated_mem += size;
242 # ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
245 # endif /* ENABLE_MEM_PROFILE */
246 #endif /* ENABLE_MEM_PROFILE */
253 g_realloc (gpointer mem,
258 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
260 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
267 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
269 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
271 #ifdef ENABLE_MEM_CHECK
273 #endif /* ENABLE_MEM_CHECK */
277 p = (gpointer) malloc (size);
280 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
281 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
282 #ifdef ENABLE_MEM_PROFILE
284 #endif /* ENABLE_MEM_PROFILE */
286 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
288 #ifdef ENABLE_MEM_CHECK
289 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
291 g_warning ("trying to realloc freed memory\n");
293 #endif /* ENABLE_MEM_CHECK */
295 p = (gpointer) realloc (mem, size);
299 g_error ("could not reallocate %lu bytes", (gulong) size);
302 #ifdef ENABLE_MEM_CHECK
306 p = ((guchar*) p + SIZEOF_LONG);
308 #endif /* ENABLE_MEM_CHECK */
310 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
314 p = ((guchar*) p + SIZEOF_LONG);
317 #ifdef ENABLE_MEM_PROFILE
318 #ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
319 if(!allocating_for_mem_chunk) {
322 allocations[size-1] += 1;
324 allocations[4095] += 1;
325 allocated_mem += size;
326 #ifdef ENABLE_MEM_PROFILE_EXCLUDES_MEM_CHUNKS
329 #endif /* ENABLE_MEM_PROFILE */
330 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
337 g_free (gpointer mem)
341 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
344 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
346 #if defined(ENABLE_MEM_PROFILE) || defined(ENABLE_MEM_CHECK)
347 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
349 #ifdef ENABLE_MEM_PROFILE
351 #endif /* ENABLE_MEM_PROFILE */
353 #endif /* ENABLE_MEM_PROFILE || ENABLE_MEM_CHECK */
355 #ifdef ENABLE_MEM_CHECK
356 t = (gulong*) ((guchar*) mem - SIZEOF_LONG);
358 g_warning ("freeing previously freed memory\n");
362 memset ((guchar*) mem + 8, 0, size);
363 #else /* ENABLE_MEM_CHECK */
365 #endif /* ENABLE_MEM_CHECK */
369 #endif /* ! USE_DMALLOC */
375 #ifdef ENABLE_MEM_PROFILE
378 for (i = 0; i < 4095; i++)
379 if (allocations[i] > 0)
380 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
381 "%lu allocations of %d bytes\n", allocations[i], i + 1);
383 if (allocations[4095] > 0)
384 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
385 "%lu allocations of greater than 4095 bytes\n", allocations[4095]);
386 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%lu bytes allocated\n", allocated_mem);
387 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%lu bytes freed\n", freed_mem);
388 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%lu bytes in use\n", allocated_mem - freed_mem);
389 #endif /* ENABLE_MEM_PROFILE */
393 g_mem_check (gpointer mem)
395 #ifdef ENABLE_MEM_CHECK
398 t = (gulong*) ((guchar*) mem - SIZEOF_LONG - SIZEOF_LONG);
401 g_warning ("mem: 0x%08x has been freed %lu times\n", (gulong) mem, *t);
402 #endif /* ENABLE_MEM_CHECK */
406 g_mem_chunk_new (gchar *name,
411 GRealMemChunk *mem_chunk;
414 ENTER_MEM_CHUNK_ROUTINE();
416 mem_chunk = g_new (struct _GRealMemChunk, 1);
417 mem_chunk->name = name;
418 mem_chunk->type = type;
419 mem_chunk->num_mem_areas = 0;
420 mem_chunk->num_marked_areas = 0;
421 mem_chunk->mem_area = NULL;
422 mem_chunk->free_mem_area = NULL;
423 mem_chunk->free_atoms = NULL;
424 mem_chunk->mem_tree = NULL;
425 mem_chunk->mem_areas = NULL;
426 mem_chunk->atom_size = atom_size;
428 if (mem_chunk->type == G_ALLOC_AND_FREE)
429 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
431 if (mem_chunk->atom_size % MEM_ALIGN)
432 mem_chunk->atom_size += MEM_ALIGN - (mem_chunk->atom_size % MEM_ALIGN);
434 mem_chunk->area_size = area_size;
435 if (mem_chunk->area_size > MAX_MEM_AREA)
436 mem_chunk->area_size = MAX_MEM_AREA;
437 while (mem_chunk->area_size < mem_chunk->atom_size)
438 mem_chunk->area_size *= 2;
440 rarea_size = mem_chunk->area_size + sizeof (GMemArea) - MEM_AREA_SIZE;
441 rarea_size = g_mem_chunk_compute_size (rarea_size);
442 mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE);
445 mem_chunk->area_size -= (sizeof (GMemArea) - MEM_AREA_SIZE);
446 if (mem_chunk->area_size < mem_chunk->atom_size)
448 mem_chunk->area_size = (mem_chunk->area_size + sizeof (GMemArea) - MEM_AREA_SIZE) * 2;
449 mem_chunk->area_size -= (sizeof (GMemArea) - MEM_AREA_SIZE);
452 if (mem_chunk->area_size % mem_chunk->atom_size)
453 mem_chunk->area_size += mem_chunk->atom_size - (mem_chunk->area_size % mem_chunk->atom_size);
456 mem_chunk->next = mem_chunks;
457 mem_chunk->prev = NULL;
459 mem_chunks->prev = mem_chunk;
460 mem_chunks = mem_chunk;
462 LEAVE_MEM_CHUNK_ROUTINE();
464 return ((GMemChunk*) mem_chunk);
468 g_mem_chunk_destroy (GMemChunk *mem_chunk)
470 GRealMemChunk *rmem_chunk;
474 g_assert (mem_chunk != NULL);
476 ENTER_MEM_CHUNK_ROUTINE();
478 rmem_chunk = (GRealMemChunk*) mem_chunk;
480 mem_areas = rmem_chunk->mem_areas;
483 temp_area = mem_areas;
484 mem_areas = mem_areas->next;
488 if (rmem_chunk->next)
489 rmem_chunk->next->prev = rmem_chunk->prev;
490 if (rmem_chunk->prev)
491 rmem_chunk->prev->next = rmem_chunk->next;
493 if (rmem_chunk == mem_chunks)
494 mem_chunks = mem_chunks->next;
496 if (rmem_chunk->type == G_ALLOC_AND_FREE)
497 g_tree_destroy (rmem_chunk->mem_tree);
501 LEAVE_MEM_CHUNK_ROUTINE();
505 g_mem_chunk_alloc (GMemChunk *mem_chunk)
507 GRealMemChunk *rmem_chunk;
511 ENTER_MEM_CHUNK_ROUTINE();
513 g_assert (mem_chunk != NULL);
515 rmem_chunk = (GRealMemChunk*) mem_chunk;
517 while (rmem_chunk->free_atoms)
519 /* Get the first piece of memory on the "free_atoms" list.
520 * We can go ahead and destroy the list node we used to keep
521 * track of it with and to update the "free_atoms" list to
522 * point to its next element.
524 mem = rmem_chunk->free_atoms;
525 rmem_chunk->free_atoms = rmem_chunk->free_atoms->next;
527 /* Determine which area this piece of memory is allocated from */
528 temp_area = g_tree_search (rmem_chunk->mem_tree,
529 (GSearchFunc) g_mem_chunk_area_search,
532 /* If the area has been marked, then it is being destroyed.
533 * (ie marked to be destroyed).
534 * We check to see if all of the segments on the free list that
535 * reference this area have been removed. This occurs when
536 * the ammount of free memory is less than the allocatable size.
537 * If the chunk should be freed, then we place it in the "free_mem_area".
538 * This is so we make sure not to free the mem area here and then
539 * allocate it again a few lines down.
540 * If we don't allocate a chunk a few lines down then the "free_mem_area"
542 * If there is already a "free_mem_area" then we'll just free this mem area.
546 /* Update the "free" memory available in that area */
547 temp_area->free += rmem_chunk->atom_size;
549 if (temp_area->free == rmem_chunk->area_size)
551 if (temp_area == rmem_chunk->mem_area)
552 rmem_chunk->mem_area = NULL;
554 if (rmem_chunk->free_mem_area)
556 rmem_chunk->num_mem_areas -= 1;
559 temp_area->next->prev = temp_area->prev;
561 temp_area->prev->next = temp_area->next;
562 if (temp_area == rmem_chunk->mem_areas)
563 rmem_chunk->mem_areas = rmem_chunk->mem_areas->next;
565 if (rmem_chunk->type == G_ALLOC_AND_FREE)
566 g_tree_remove (rmem_chunk->mem_tree, temp_area);
570 rmem_chunk->free_mem_area = temp_area;
572 rmem_chunk->num_marked_areas -= 1;
577 /* Update the number of allocated atoms count.
579 temp_area->allocated += 1;
581 /* The area wasn't marked...return the memory
587 /* If there isn't a current mem area or the current mem area is out of space
588 * then allocate a new mem area. We'll first check and see if we can use
589 * the "free_mem_area". Otherwise we'll just malloc the mem area.
591 if ((!rmem_chunk->mem_area) ||
592 ((rmem_chunk->mem_area->index + rmem_chunk->atom_size) > rmem_chunk->area_size))
594 if (rmem_chunk->free_mem_area)
596 rmem_chunk->mem_area = rmem_chunk->free_mem_area;
597 rmem_chunk->free_mem_area = NULL;
601 rmem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) -
603 rmem_chunk->area_size);
605 rmem_chunk->num_mem_areas += 1;
606 rmem_chunk->mem_area->next = rmem_chunk->mem_areas;
607 rmem_chunk->mem_area->prev = NULL;
609 if (rmem_chunk->mem_areas)
610 rmem_chunk->mem_areas->prev = rmem_chunk->mem_area;
611 rmem_chunk->mem_areas = rmem_chunk->mem_area;
613 if (rmem_chunk->type == G_ALLOC_AND_FREE)
614 g_tree_insert (rmem_chunk->mem_tree, rmem_chunk->mem_area, rmem_chunk->mem_area);
617 rmem_chunk->mem_area->index = 0;
618 rmem_chunk->mem_area->free = rmem_chunk->area_size;
619 rmem_chunk->mem_area->allocated = 0;
620 rmem_chunk->mem_area->mark = 0;
623 /* Get the memory and modify the state variables appropriately.
625 mem = (gpointer) &rmem_chunk->mem_area->mem[rmem_chunk->mem_area->index];
626 rmem_chunk->mem_area->index += rmem_chunk->atom_size;
627 rmem_chunk->mem_area->free -= rmem_chunk->atom_size;
628 rmem_chunk->mem_area->allocated += 1;
632 LEAVE_MEM_CHUNK_ROUTINE();
638 g_mem_chunk_alloc0 (GMemChunk *mem_chunk)
642 mem = g_mem_chunk_alloc (mem_chunk);
645 GRealMemChunk *rmem_chunk = (GRealMemChunk*) mem_chunk;
647 memset (mem, 0, rmem_chunk->atom_size);
654 g_mem_chunk_free (GMemChunk *mem_chunk,
657 GRealMemChunk *rmem_chunk;
659 GFreeAtom *free_atom;
661 g_assert (mem_chunk != NULL);
662 g_assert (mem != NULL);
664 ENTER_MEM_CHUNK_ROUTINE();
666 rmem_chunk = (GRealMemChunk*) mem_chunk;
668 /* Don't do anything if this is an ALLOC_ONLY chunk
670 if (rmem_chunk->type == G_ALLOC_AND_FREE)
672 /* Place the memory on the "free_atoms" list
674 free_atom = (GFreeAtom*) mem;
675 free_atom->next = rmem_chunk->free_atoms;
676 rmem_chunk->free_atoms = free_atom;
678 temp_area = g_tree_search (rmem_chunk->mem_tree,
679 (GSearchFunc) g_mem_chunk_area_search,
682 temp_area->allocated -= 1;
684 if (temp_area->allocated == 0)
687 rmem_chunk->num_marked_areas += 1;
691 LEAVE_MEM_CHUNK_ROUTINE();
694 /* This doesn't free the free_area if there is one */
696 g_mem_chunk_clean (GMemChunk *mem_chunk)
698 GRealMemChunk *rmem_chunk;
700 GFreeAtom *prev_free_atom;
701 GFreeAtom *temp_free_atom;
704 g_assert (mem_chunk != NULL);
706 rmem_chunk = (GRealMemChunk*) mem_chunk;
708 if (rmem_chunk->type == G_ALLOC_AND_FREE)
710 prev_free_atom = NULL;
711 temp_free_atom = rmem_chunk->free_atoms;
713 while (temp_free_atom)
715 mem = (gpointer) temp_free_atom;
717 mem_area = g_tree_search (rmem_chunk->mem_tree,
718 (GSearchFunc) g_mem_chunk_area_search,
721 /* If this mem area is marked for destruction then delete the
722 * area and list node and decrement the free mem.
727 prev_free_atom->next = temp_free_atom->next;
729 rmem_chunk->free_atoms = temp_free_atom->next;
730 temp_free_atom = temp_free_atom->next;
732 mem_area->free += rmem_chunk->atom_size;
733 if (mem_area->free == rmem_chunk->area_size)
735 rmem_chunk->num_mem_areas -= 1;
736 rmem_chunk->num_marked_areas -= 1;
739 mem_area->next->prev = mem_area->prev;
741 mem_area->prev->next = mem_area->next;
742 if (mem_area == rmem_chunk->mem_areas)
743 rmem_chunk->mem_areas = rmem_chunk->mem_areas->next;
744 if (mem_area == rmem_chunk->mem_area)
745 rmem_chunk->mem_area = NULL;
747 if (rmem_chunk->type == G_ALLOC_AND_FREE)
748 g_tree_remove (rmem_chunk->mem_tree, mem_area);
754 prev_free_atom = temp_free_atom;
755 temp_free_atom = temp_free_atom->next;
762 g_mem_chunk_reset (GMemChunk *mem_chunk)
764 GRealMemChunk *rmem_chunk;
768 g_assert (mem_chunk != NULL);
770 rmem_chunk = (GRealMemChunk*) mem_chunk;
772 mem_areas = rmem_chunk->mem_areas;
773 rmem_chunk->num_mem_areas = 0;
774 rmem_chunk->mem_areas = NULL;
775 rmem_chunk->mem_area = NULL;
779 temp_area = mem_areas;
780 mem_areas = mem_areas->next;
784 rmem_chunk->free_atoms = NULL;
786 if (rmem_chunk->mem_tree)
787 g_tree_destroy (rmem_chunk->mem_tree);
788 rmem_chunk->mem_tree = g_tree_new ((GCompareFunc) g_mem_chunk_area_compare);
792 g_mem_chunk_print (GMemChunk *mem_chunk)
794 GRealMemChunk *rmem_chunk;
798 g_assert (mem_chunk != NULL);
800 rmem_chunk = (GRealMemChunk*) mem_chunk;
801 mem_areas = rmem_chunk->mem_areas;
806 mem += rmem_chunk->area_size - mem_areas->free;
807 mem_areas = mem_areas->next;
810 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO,
811 "%s: %ld bytes using %d mem areas\n",
812 rmem_chunk->name, mem, rmem_chunk->num_mem_areas);
816 g_mem_chunk_info (void)
818 GRealMemChunk *mem_chunk;
822 mem_chunk = mem_chunks;
826 mem_chunk = mem_chunk->next;
829 g_log (g_log_domain_glib, G_LOG_LEVEL_INFO, "%d mem chunks\n", count);
831 mem_chunk = mem_chunks;
834 g_mem_chunk_print ((GMemChunk*) mem_chunk);
835 mem_chunk = mem_chunk->next;
842 GRealMemChunk *mem_chunk;
844 mem_chunk = mem_chunks;
847 g_mem_chunk_clean ((GMemChunk*) mem_chunk);
848 mem_chunk = mem_chunk->next;
854 g_mem_chunk_compute_size (gulong size)
860 while (power_of_2 < size)
863 lower = power_of_2 >> 1;
866 if ((size - lower) < (upper - size))
872 g_mem_chunk_area_compare (GMemArea *a,
875 return (a->mem - b->mem);
879 g_mem_chunk_area_search (GMemArea *a,
884 if (addr < &a->mem[a->index])