{
IF_CANCEL(int cancel_state;)
+ GC_ASSERT(I_HOLD_LOCK());
if (GC_dont_gc) return;
+
DISABLE_CANCEL(cancel_state);
if (GC_incremental && GC_collection_in_progress()) {
int i;
* Make sure the object free list for size gran (in granules) is not empty.
* Return a pointer to the first object on the free list.
* The object MUST BE REMOVED FROM THE FREE LIST BY THE CALLER.
- * Assumes we hold the allocator lock.
*/
GC_INNER ptr_t GC_allocobj(size_t gran, int kind)
{
GC_bool tried_minor = FALSE;
GC_bool retry = FALSE;
+ GC_ASSERT(I_HOLD_LOCK());
if (gran == 0) return(0);
while (*flh == 0) {
{
word * result = (word *)((oh *)p + 1);
+ GC_ASSERT(I_HOLD_LOCK());
GC_ASSERT(GC_size(p) >= sizeof(oh) + sz);
GC_ASSERT(!(SMALL_OBJ(sz) && CROSSES_HBLK((ptr_t)p, sz)));
# ifdef KEEP_BACK_PTRS
/* An allocation function for internal use. Normally internally */
/* allocated objects do not have debug information. But in this */
/* case, we need to make sure that all objects have debug headers. */
- /* We assume we already hold the GC lock. */
GC_INNER void * GC_debug_generic_malloc_inner(size_t lb, int k)
{
- void * result = GC_generic_malloc_inner(
- SIZET_SAT_ADD(lb, DEBUG_BYTES), k);
+ void * result;
- if (result == 0) {
+ GC_ASSERT(I_HOLD_LOCK());
+ result = GC_generic_malloc_inner(SIZET_SAT_ADD(lb, DEBUG_BYTES), k);
+ if (NULL == result) {
GC_err_printf("GC internal allocation (%lu bytes) returning NULL\n",
(unsigned long) lb);
return(0);
GC_INNER void * GC_debug_generic_malloc_inner_ignore_off_page(size_t lb,
int k)
{
- void * result = GC_generic_malloc_inner_ignore_off_page(
- SIZET_SAT_ADD(lb, DEBUG_BYTES), k);
+ void * result;
- if (result == 0) {
+ GC_ASSERT(I_HOLD_LOCK());
+ result = GC_generic_malloc_inner_ignore_off_page(
+ SIZET_SAT_ADD(lb, DEBUG_BYTES), k);
+ if (NULL == result) {
GC_err_printf("GC internal allocation (%lu bytes) returning NULL\n",
(unsigned long) lb);
return(0);
static GC_bool ensure_toggleref_capacity(int capacity_inc)
{
GC_ASSERT(capacity_inc >= 0);
+ GC_ASSERT(I_HOLD_LOCK());
if (NULL == GC_toggleref_arr) {
GC_toggleref_array_capacity = 32; /* initial capacity */
GC_toggleref_arr = (GCToggleRef *)GC_INTERNAL_MALLOC_IGNORE_OFF_PAGE(
{
int i;
int ret;
- tsd * result = (tsd *)MALLOC_CLEAR(sizeof(tsd));
+ tsd * result;
+ GC_ASSERT(I_HOLD_LOCK());
/* A quick alignment check, since we need atomic stores */
GC_ASSERT((word)(&invalid_tse.next) % sizeof(tse *) == 0);
- if (0 == result) return ENOMEM;
+ result = (tsd *)MALLOC_CLEAR(sizeof(tsd));
+ if (NULL == result) return ENOMEM;
ret = pthread_mutex_init(&result->lock, NULL);
if (ret != 0) return ret;
for (i = 0; i < TS_CACHE_SIZE; ++i) {