ggc_mark_tree_hash_table (*(struct hash_table **) elt);
}
+/* Type-correct function to pass to ggc_add_root. It just forwards
+ ELT (which is really a char **) to ggc_mark_string. */
+
static void
ggc_mark_string_ptr (elt)
void *elt;
ggc_mark_string (*(char **)elt);
}
+/* Add BASE as a new garbage collection root. It is an array of
+ length NELT with each element SIZE bytes long. CB is a
+ function that will be called with a pointer to each element
+ of the array; it is the intention that CB call the appropriate
+ routine to mark gc-able memory for that element. */
+
void
ggc_add_root (base, nelt, size, cb)
void *base;
roots = x;
}
+/* Register an array of rtx as a GC root. */
+
void
ggc_add_rtx_root (base, nelt)
rtx *base;
ggc_add_root (base, nelt, sizeof(rtx), ggc_mark_rtx_ptr);
}
+/* Register an array of trees as a GC root. */
+
void
ggc_add_tree_root (base, nelt)
tree *base;
ggc_add_root (base, nelt, sizeof(tree), ggc_mark_tree_ptr);
}
-/* Add V (a varray full of trees) to the list of GC roots. */
+/* Register a varray of trees as a GC root. */
void
ggc_add_tree_varray_root (base, nelt)
ggc_mark_tree_varray_ptr);
}
-/* Add HT (a hash-table where ever key is a tree) to the list of GC
- roots. */
+/* Register a hash table of trees as a GC root. */
void
ggc_add_tree_hash_table_root (base, nelt)
ggc_mark_tree_hash_table_ptr);
}
+/* Register an array of strings as a GC root. */
+
void
ggc_add_string_root (base, nelt)
char **base;
ggc_add_root (base, nelt, sizeof (char *), ggc_mark_string_ptr);
}
+/* Remove the previously registered GC root at BASE. */
void
ggc_del_root (base)
abort();
}
+/* Iterate through all registered roots and mark each element. */
+
void
ggc_mark_roots ()
{
}
}
+/* R had not been previously marked, but has now been marked via
+ ggc_set_mark. Now recurse and process the children. */
+
void
ggc_mark_rtx_children (r)
rtx r;
}
}
+/* V had not been previously marked, but has now been marked via
+ ggc_set_mark. Now recurse and process the children. */
+
void
ggc_mark_rtvec_children (v)
rtvec v;
ggc_mark_rtx (RTVEC_ELT (v, i));
}
+/* T had not been previously marked, but has now been marked via
+ ggc_set_mark. Now recurse and process the children. */
+
void
ggc_mark_tree_children (t)
tree t;
hash_traverse (ht, ggc_mark_tree_hash_table_entry, /*info=*/0);
}
-/* Allocation wrappers. */
+/* Allocate a gc-able string. If CONTENTS is null, then the memory will
+ be uninitialized. If LENGTH is -1, then CONTENTS is assumed to be a
+ null-terminated string and the memory sized accordingly. Otherwise,
+ the memory is filled with LENGTH bytes from CONTENTS. */
char *
ggc_alloc_string (contents, length)
static void sweep_pages PROTO ((void));
#ifdef GGC_POISON
-static void poison PROTO ((void *, size_t));
static void poison_pages PROTO ((void));
#endif
return base[L1][L2];
}
-
/* Set the page table entry for a page. */
+
static void
set_page_table_entry(p, entry)
void *p;
base[L1][L2] = entry;
}
-
/* Prints the page-entry for object size ORDER, for debugging. */
+
void
debug_print_page_list (order)
int order;
fflush (stdout);
}
-#ifdef GGC_POISON
-/* `Poisons' the region of memory starting at START and extending for
- LEN bytes. */
-static inline void
-poison (start, len)
- void *start;
- size_t len;
-{
- memset (start, 0xa5, len);
-}
-#endif
-
/* Allocate SIZE bytes of anonymous memory, preferably near PREF,
(if non-null). */
+
static inline char *
alloc_anon (pref, size)
char *pref ATTRIBUTE_UNUSED;
/* Allocate a new page for allocating objects of size 2^ORDER,
and return an entry for it. The entry is not added to the
appropriate page_table list. */
+
static inline struct page_entry *
alloc_page (order)
unsigned order;
}
else
{
- /* Actually allocate the memory, using mmap. */
+ /* Actually allocate the memory. */
page = alloc_anon (NULL, entry_size);
}
return entry;
}
+/* For a page that is no longer needed, put it on the free page list. */
-/* Free a page when it's no longer needed. */
static inline void
free_page (entry)
page_entry *entry;
G.free_pages = entry;
}
+/* Release the free page cache to the system. */
-/* Release the page cache to the system. */
static inline void
release_pages ()
{
G.free_pages = NULL;
}
-
/* This table provides a fast way to determine ceil(log_2(size)) for
allocation requests. The minimum allocation size is four bytes. */
+
static unsigned char const size_lookup[257] =
{
2, 2, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4,
/* Allocate a chunk of memory of SIZE bytes. If ZERO is non-zero, the
memory is zeroed; otherwise, its contents are undefined. */
+
void *
ggc_alloc_obj (size, zero)
size_t size;
#ifdef GGC_POISON
/* `Poison' the entire allocated object before zeroing the requested area,
so that bytes beyond the end, if any, will not necessarily be zero. */
- poison (result, 1 << order);
+ memset (result, 0xaf, 1 << order);
#endif
+
if (zero)
memset (result, 0, size);
return result;
}
-
-/* If P is not marked, marks it and returns 0. Otherwise returns 1.
+/* If P is not marked, marks it and return false. Otherwise return true.
P must have been allocated by the GC allocator; it mustn't point to
static objects, stack variables, or memory allocated with malloc. */
+
int
ggc_set_mark (p)
void *p;
return 0;
}
+/* Mark P, but check first that it was allocated by the collector. */
+
void
ggc_mark_if_gcable (p)
void *p;
ggc_set_mark (p);
}
+/* Return the size of the gc-able object P. */
+
size_t
ggc_get_size (p)
void *p;
}
\f
/* Initialize the ggc-mmap allocator. */
+
void
init_ggc ()
{
ggc_add_string_root (&empty_string, 1);
}
+/* Increment the `GC context'. Objects allocated in an outer context
+ are never freed, eliminating the need to register their roots. */
void
ggc_push_context ()
abort ();
}
+/* Decrement the `GC context'. All objects allocated since the
+ previous ggc_push_context are migrated to the outer context. */
void
ggc_pop_context ()
}
}
\f
+/* Unmark all objects. */
+
static inline void
clear_marks ()
{
}
}
+/* Free all empty pages. Partially empty pages need no attention
+ because the `mark' bit doubles as an `unused' bit. */
+
static inline void
sweep_pages ()
{
}
#ifdef GGC_POISON
+/* Clobber all free objects. */
+
static inline void
poison_pages ()
{
word = i / HOST_BITS_PER_LONG;
bit = i % HOST_BITS_PER_LONG;
if (((p->in_use_p[word] >> bit) & 1) == 0)
- poison (p->page + i * size, size);
+ memset (p->page + i * size, 0xa5, size);
}
}
}
}
#endif
+/* Top level mark-and-sweep routine. */
+
void
ggc_collect ()
{
clear_marks ();
ggc_mark_roots ();
- sweep_pages ();
#ifdef GGC_POISON
poison_pages ();
#endif
+ sweep_pages ();
+
G.allocated_last_gc = G.allocated;
if (G.allocated_last_gc < GGC_MIN_LAST_ALLOCATED)
G.allocated_last_gc = GGC_MIN_LAST_ALLOCATED;