#endif /* __GNUC__ */
/* The ultimately general inline allocation macro. Allocate an object */
-/* of size bytes, putting the resulting pointer in result. Tiny_fl is */
-/* a "tiny" free list array, which will be used first, if the size */
-/* is appropriate. If bytes is too large, we allocate with */
+/* of size granules, putting the resulting pointer in result. Tiny_fl */
+/* is a "tiny" free list array, which will be used first, if the size */
+/* is appropriate. If granules is too large, we allocate with */
/* default_expr instead. If we need to refill the free list, we use */
/* GC_generic_malloc_many with the indicated kind. */
/* Tiny_fl should be an array of GC_TINY_FREELISTS void * pointers. */
/* be initialized to (void *)0. */
/* We rely on much of this hopefully getting optimized away in the */
/* num_direct = 0 case. */
-/* Particularly if bytes is constant, this should generate a small */
+/* Particularly if granules is constant, this should generate a small */
/* amount of code. */
# define GC_FAST_MALLOC_GRANS(result,granules,tiny_fl,num_direct,\
kind,default_expr,init) \
/* Entry contains counter or NULL */ \
if ((GC_word)my_entry - 1 < num_direct) { \
/* Small counter value, not NULL */ \
- *my_fl = (ptr_t)my_entry + granules + 1; \
+ *my_fl = (char *)my_entry + granules + 1; \
result = default_expr; \
goto out; \
} else { \
/* Large counter or NULL */ \
GC_generic_malloc_many(((granules) == 0? GC_GRANULE_BYTES : \
- RAW_BYTES_FROM_INDEX(granules)), \
+ GC_RAW_BYTES_FROM_INDEX(granules)), \
kind, my_fl); \
my_entry = *my_fl; \
if (my_entry == 0) { \
- result = GC_oom_fn(bytes); \
+ result = GC_oom_fn(granules*GC_GRANULE_BYTES); \
goto out; \
} \
} \
*my_fl = next; \
init; \
PREFETCH_FOR_WRITE(next); \
- GC_ASSERT(GC_size(result) >= bytes + EXTRA_BYTES); \
+ GC_ASSERT(GC_size(result) >= granules*GC_GRANULE_BYTES); \
GC_ASSERT((kind) == PTRFREE || ((GC_word *)result)[1] == 0); \
out: ; \
} \
/* a global array. */
# define GC_MALLOC_WORDS(result,n,tiny_fl) \
{ \
- size_t grans = WORDS_TO_WHOLE_GRANULES(n); \
+ size_t grans = GC_WORDS_TO_WHOLE_GRANULES(n); \
GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \
- NORMAL, GC_malloc(grans*GRANULE_BYTES), \
+ NORMAL, GC_malloc(grans*GC_GRANULE_BYTES), \
*(void **)result = 0); \
}
# define GC_MALLOC_ATOMIC_WORDS(result,n,tiny_fl) \
{ \
- size_t grans = WORDS_TO_WHOLE_GRANULES(n); \
+ size_t grans = GC_WORDS_TO_WHOLE_GRANULES(n); \
GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \
- PTRFREE, GC_malloc_atomic(grans*GRANULE_BYTES), \
+ PTRFREE, GC_malloc_atomic(grans*GC_GRANULE_BYTES), \
/* no initialization */); \
}
/* And once more for two word initialized objects: */
# define GC_CONS(result, first, second, tiny_fl) \
{ \
- size_t grans = WORDS_TO_WHOLE_GRANULES(2); \
+ size_t grans = GC_WORDS_TO_WHOLE_GRANULES(2); \
GC_FAST_MALLOC_GRANS(result, grans, tiny_fl, 0, \
- NORMAL, GC_malloc(grans*GRANULE_BYTES), \
+ NORMAL, GC_malloc(grans*GC_GRANULE_BYTES), \
*(void **)result = (void *)(first)); \
((void **)(result))[1] = (void *)(second); \
}
*/
/*
- * We always set GRANULE_BYTES to twice the length of a pointer.
+ * We always set GC_GRANULE_BYTES to twice the length of a pointer.
* This means that all allocation requests are rounded up to the next
* multiple of 16 on 64-bit architectures or 8 on 32-bit architectures.
* This appears to be a reasonable compromise between fragmentation overhead
#if GC_GRANULE_WORDS == 2
# define GC_WORDS_TO_GRANULES(n) ((n)>>1)
#else
-# define GC_WORDS_TO_GRANULES(n) ((n)*sizeof(void *)/GRANULE_BYTES)
+# define GC_WORDS_TO_GRANULES(n) ((n)*sizeof(void *)/GC_GRANULE_BYTES)
#endif
/* A "tiny" free list header contains TINY_FREELISTS pointers to */
# endif
#endif /* !GC_TINY_FREELISTS */
-/* The ith free list corresponds to size i*GRANULE_BYTES */
+/* The ith free list corresponds to size i*GC_GRANULE_BYTES */
/* Internally to the collector, the index can be computed with */
/* ROUNDED_UP_GRANULES. Externally, we don't know whether */
/* DONT_ADD_BYTE_AT_END is set, but the client should know. */
/* Convert a free list index to the actual size of objects */
/* on that list, including extra space we added. Not an */
/* inverse of the above. */
-#define RAW_BYTES_FROM_INDEX(i) ((i) * GC_GRANULE_BYTES)
+#define GC_RAW_BYTES_FROM_INDEX(i) ((i) * GC_GRANULE_BYTES)
#endif /* GC_TINY_FL_H */