4 * Very simple linked-list based malloc()/free().
11 struct free_arena_header __malloc_head = {
22 extern void *__mem_end; /* In argv.c */
24 void __init_memory_arena(void)
26 extern char __heap_end[];
27 struct free_arena_header *fp;
29 fp = (struct free_arena_header *)__mem_end;
30 fp->a.type = ARENA_TYPE_FREE;
31 fp->a.size = __heap_end - (char *)__mem_end;
33 /* Insert into chains */
34 fp->a.next = fp->a.prev = &__malloc_head;
35 fp->next_free = fp->prev_free = &__malloc_head;
36 __malloc_head.a.next = __malloc_head.a.prev = fp;
37 __malloc_head.next_free = __malloc_head.prev_free = fp;
40 static void *__malloc_from_block(struct free_arena_header *fp, size_t size)
43 struct free_arena_header *nfp, *na;
47 /* We need the 2* to account for the larger requirements of a free block */
48 if (fsize >= size + 2 * sizeof(struct arena_header)) {
49 /* Bigger block than required -- split block */
50 nfp = (struct free_arena_header *)((char *)fp + size);
53 nfp->a.type = ARENA_TYPE_FREE;
54 nfp->a.size = fsize - size;
55 fp->a.type = ARENA_TYPE_USED;
58 /* Insert into all-block chain */
64 /* Replace current block on free chain */
65 nfp->next_free = fp->next_free;
66 nfp->prev_free = fp->prev_free;
67 fp->next_free->prev_free = nfp;
68 fp->prev_free->next_free = nfp;
70 /* Allocate the whole block */
71 fp->a.type = ARENA_TYPE_USED;
73 /* Remove from free chain */
74 fp->next_free->prev_free = fp->prev_free;
75 fp->prev_free->next_free = fp->next_free;
78 return (void *)(&fp->a + 1);
81 void *malloc(size_t size)
83 struct free_arena_header *fp;
88 /* Add the obligatory arena header, and round up */
89 size = (size + 2 * sizeof(struct arena_header) - 1) & ~ARENA_SIZE_MASK;
91 for (fp = __malloc_head.next_free; fp->a.type != ARENA_TYPE_HEAD;
93 if (fp->a.size >= size) {
94 /* Found fit -- allocate out of this block */
95 return __malloc_from_block(fp, size);
99 /* Nothing found... need to request a block from the kernel */
100 return NULL; /* No kernel to get stuff from */
103 void *calloc(size_t nmemb, size_t size)