AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
: heap_(isolate->heap()), daf_(isolate) {
- // We shouldn't hit any nested scopes, because that requires
- // non-handle code to call handle code. The code still works but
- // performance will degrade, so we want to catch this situation
- // in debug mode.
- DCHECK(heap_->always_allocate_scope_depth_ == 0);
heap_->always_allocate_scope_depth_++;
}
AlwaysAllocateScope::~AlwaysAllocateScope() {
heap_->always_allocate_scope_depth_--;
- DCHECK(heap_->always_allocate_scope_depth_ == 0);
}
void Heap::Scavenge() {
RelocationLock relocation_lock(this);
+ // There are soft limits in the allocation code, designed to trigger a mark
+ // sweep collection by failing allocations. There is no sense in trying to
+ // trigger one during scavenge: scavenges allocation should always succeed.
+ AlwaysAllocateScope scope(isolate());
#ifdef VERIFY_HEAP
if (FLAG_verify_heap) VerifyNonPointerSpacePointers(this);
// If sweeper threads are active, wait for them at that point and steal
// elements form their free-lists.
HeapObject* object = WaitForSweeperThreadsAndRetryAllocation(size_in_bytes);
- if (object != NULL) return object;
+ return object;
}
// Try to expand the space and allocate in the new next page.