1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
10 #include "src/base/platform/platform.h"
11 #include "src/cpu-profiler.h"
12 #include "src/heap/heap.h"
13 #include "src/heap/store-buffer.h"
14 #include "src/heap/store-buffer-inl.h"
15 #include "src/heap-profiler.h"
16 #include "src/isolate.h"
17 #include "src/list-inl.h"
18 #include "src/objects.h"
23 void PromotionQueue::insert(HeapObject* target, int size) {
24 if (emergency_stack_ != NULL) {
25 emergency_stack_->Add(Entry(target, size));
29 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
30 NewSpacePage* rear_page =
31 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
32 DCHECK(!rear_page->prev_page()->is_anchor());
33 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
34 ActivateGuardIfOnTheSamePage();
38 DCHECK(GetHeadPage() ==
39 Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
41 if ((rear_ - 2) < limit_) {
43 emergency_stack_->Add(Entry(target, size));
48 *(--rear_) = reinterpret_cast<intptr_t>(target);
50 // Assert no overflow into live objects.
52 SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
53 reinterpret_cast<Address>(rear_));
58 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
60 heap_->new_space()->active_space()->current_page()->address() ==
61 GetHeadPage()->address();
66 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
67 // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
69 return chars == str.length();
74 bool inline Heap::IsOneByte(String* str, int chars) {
75 return str->IsOneByteRepresentation();
79 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
80 Vector<const char> str, int chars, uint32_t hash_field) {
81 if (IsOneByte(str, chars)) {
82 return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
85 return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
90 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
91 uint32_t hash_field) {
92 if (IsOneByte(t, chars)) {
93 return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
95 return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
99 AllocationResult Heap::AllocateOneByteInternalizedString(
100 Vector<const uint8_t> str, uint32_t hash_field) {
101 CHECK_GE(String::kMaxLength, str.length());
102 // Compute map and object size.
103 Map* map = ascii_internalized_string_map();
104 int size = SeqOneByteString::SizeFor(str.length());
105 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
110 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
111 if (!allocation.To(&result)) return allocation;
114 // String maps are all immortal immovable objects.
115 result->set_map_no_write_barrier(map);
116 // Set length and hash fields of the allocated string.
117 String* answer = String::cast(result);
118 answer->set_length(str.length());
119 answer->set_hash_field(hash_field);
121 DCHECK_EQ(size, answer->Size());
123 // Fill in the characters.
124 MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
132 uint32_t hash_field) {
133 CHECK_GE(String::kMaxLength, str.length());
134 // Compute map and object size.
135 Map* map = internalized_string_map();
136 int size = SeqTwoByteString::SizeFor(str.length());
137 AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
142 AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
143 if (!allocation.To(&result)) return allocation;
146 result->set_map(map);
147 // Set length and hash fields of the allocated string.
148 String* answer = String::cast(result);
149 answer->set_length(str.length());
150 answer->set_hash_field(hash_field);
152 DCHECK_EQ(size, answer->Size());
154 // Fill in the characters.
155 MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
156 str.length() * kUC16Size);
161 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
162 if (src->length() == 0) return src;
163 return CopyFixedArrayWithMap(src, src->map());
167 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
168 if (src->length() == 0) return src;
169 return CopyFixedDoubleArrayWithMap(src, src->map());
173 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
174 if (src->length() == 0) return src;
175 return CopyConstantPoolArrayWithMap(src, src->map());
179 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
180 AllocationSpace retry_space) {
181 DCHECK(AllowHandleAllocation::IsAllowed());
182 DCHECK(AllowHeapAllocation::IsAllowed());
183 DCHECK(gc_state_ == NOT_IN_GC);
185 if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
186 Heap::allocation_timeout_-- <= 0) {
187 return AllocationResult::Retry(space);
189 isolate_->counters()->objs_since_last_full()->Increment();
190 isolate_->counters()->objs_since_last_young()->Increment();
194 AllocationResult allocation;
195 if (NEW_SPACE == space) {
196 allocation = new_space_.AllocateRaw(size_in_bytes);
197 if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
200 if (allocation.To(&object)) {
201 OnAllocationEvent(object, size_in_bytes);
207 if (OLD_POINTER_SPACE == space) {
208 allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
209 } else if (OLD_DATA_SPACE == space) {
210 allocation = old_data_space_->AllocateRaw(size_in_bytes);
211 } else if (CODE_SPACE == space) {
212 if (size_in_bytes <= code_space()->AreaSize()) {
213 allocation = code_space_->AllocateRaw(size_in_bytes);
215 // Large code objects are allocated in large object space.
216 allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
218 } else if (LO_SPACE == space) {
219 allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
220 } else if (CELL_SPACE == space) {
221 allocation = cell_space_->AllocateRaw(size_in_bytes);
222 } else if (PROPERTY_CELL_SPACE == space) {
223 allocation = property_cell_space_->AllocateRaw(size_in_bytes);
225 DCHECK(MAP_SPACE == space);
226 allocation = map_space_->AllocateRaw(size_in_bytes);
228 if (allocation.To(&object)) {
229 OnAllocationEvent(object, size_in_bytes);
231 old_gen_exhausted_ = true;
237 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
238 HeapProfiler* profiler = isolate_->heap_profiler();
239 if (profiler->is_tracking_allocations()) {
240 profiler->AllocationEvent(object->address(), size_in_bytes);
243 if (FLAG_verify_predictable) {
244 ++allocations_count_;
246 UpdateAllocationsHash(object);
247 UpdateAllocationsHash(size_in_bytes);
249 if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
250 (--dump_allocations_hash_countdown_ == 0)) {
251 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
252 PrintAlloctionsHash();
258 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
260 HeapProfiler* heap_profiler = isolate_->heap_profiler();
261 if (heap_profiler->is_tracking_object_moves()) {
262 heap_profiler->ObjectMoveEvent(source->address(), target->address(),
266 if (isolate_->logger()->is_logging_code_events() ||
267 isolate_->cpu_profiler()->is_profiling()) {
268 if (target->IsSharedFunctionInfo()) {
269 PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(),
274 if (FLAG_verify_predictable) {
275 ++allocations_count_;
277 UpdateAllocationsHash(source);
278 UpdateAllocationsHash(target);
279 UpdateAllocationsHash(size_in_bytes);
281 if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
282 (--dump_allocations_hash_countdown_ == 0)) {
283 dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
284 PrintAlloctionsHash();
290 void Heap::UpdateAllocationsHash(HeapObject* object) {
291 Address object_address = object->address();
292 MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
293 AllocationSpace allocation_space = memory_chunk->owner()->identity();
295 STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
297 static_cast<uint32_t>(object_address - memory_chunk->address()) |
298 (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
300 UpdateAllocationsHash(value);
304 void Heap::UpdateAllocationsHash(uint32_t value) {
305 uint16_t c1 = static_cast<uint16_t>(value);
306 uint16_t c2 = static_cast<uint16_t>(value >> 16);
307 raw_allocations_hash_ =
308 StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
309 raw_allocations_hash_ =
310 StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
314 void Heap::PrintAlloctionsHash() {
315 uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
316 PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
320 void Heap::FinalizeExternalString(String* string) {
321 DCHECK(string->IsExternalString());
322 v8::String::ExternalStringResourceBase** resource_addr =
323 reinterpret_cast<v8::String::ExternalStringResourceBase**>(
324 reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
327 // Dispose of the C++ object if it has not already been disposed.
328 if (*resource_addr != NULL) {
329 (*resource_addr)->Dispose();
330 *resource_addr = NULL;
335 bool Heap::InNewSpace(Object* object) {
336 bool result = new_space_.Contains(object);
337 DCHECK(!result || // Either not in new space
338 gc_state_ != NOT_IN_GC || // ... or in the middle of GC
339 InToSpace(object)); // ... or in to-space (where we allocate).
344 bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
347 bool Heap::InFromSpace(Object* object) {
348 return new_space_.FromSpaceContains(object);
352 bool Heap::InToSpace(Object* object) {
353 return new_space_.ToSpaceContains(object);
357 bool Heap::InOldPointerSpace(Address address) {
358 return old_pointer_space_->Contains(address);
362 bool Heap::InOldPointerSpace(Object* object) {
363 return InOldPointerSpace(reinterpret_cast<Address>(object));
367 bool Heap::InOldDataSpace(Address address) {
368 return old_data_space_->Contains(address);
372 bool Heap::InOldDataSpace(Object* object) {
373 return InOldDataSpace(reinterpret_cast<Address>(object));
377 bool Heap::OldGenerationAllocationLimitReached() {
378 if (!incremental_marking()->IsStopped()) return false;
379 return OldGenerationSpaceAvailable() < 0;
383 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
384 NewSpacePage* page = NewSpacePage::FromAddress(old_address);
385 Address age_mark = new_space_.age_mark();
386 return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
387 (!page->ContainsLimit(age_mark) || old_address < age_mark);
391 void Heap::RecordWrite(Address address, int offset) {
392 if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
396 void Heap::RecordWrites(Address address, int start, int len) {
397 if (!InNewSpace(address)) {
398 for (int i = 0; i < len; i++) {
399 store_buffer_.Mark(address + start + i * kPointerSize);
405 OldSpace* Heap::TargetSpace(HeapObject* object) {
406 InstanceType type = object->map()->instance_type();
407 AllocationSpace space = TargetSpaceId(type);
408 return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
412 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
413 // Heap numbers and sequential strings are promoted to old data space, all
414 // other object types are promoted to old pointer space. We do not use
415 // object->IsHeapNumber() and object->IsSeqString() because we already
416 // know that object has the heap object tag.
418 // These objects are never allocated in new space.
419 DCHECK(type != MAP_TYPE);
420 DCHECK(type != CODE_TYPE);
421 DCHECK(type != ODDBALL_TYPE);
422 DCHECK(type != CELL_TYPE);
423 DCHECK(type != PROPERTY_CELL_TYPE);
425 if (type <= LAST_NAME_TYPE) {
426 if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
427 DCHECK(type < FIRST_NONSTRING_TYPE);
428 // There are four string representations: sequential strings, external
429 // strings, cons strings, and sliced strings.
430 // Only the latter two contain non-map-word pointers to heap objects.
431 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
435 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
440 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
441 // Object migration is governed by the following rules:
443 // 1) Objects in new-space can be migrated to one of the old spaces
444 // that matches their target space or they stay in new-space.
445 // 2) Objects in old-space stay in the same space when migrating.
446 // 3) Fillers (two or more words) can migrate due to left-trimming of
447 // fixed arrays in new-space, old-data-space and old-pointer-space.
448 // 4) Fillers (one word) can never migrate, they are skipped by
449 // incremental marking explicitly to prevent invalid pattern.
450 // 5) Short external strings can end up in old pointer space when a cons
451 // string in old pointer space is made external (String::MakeExternal).
453 // Since this function is used for debugging only, we do not place
454 // asserts here, but check everything explicitly.
455 if (obj->map() == one_pointer_filler_map()) return false;
456 InstanceType type = obj->map()->instance_type();
457 MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
458 AllocationSpace src = chunk->owner()->identity();
461 return dst == src || dst == TargetSpaceId(type);
462 case OLD_POINTER_SPACE:
463 return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
464 (obj->IsExternalString() &&
465 ExternalString::cast(obj)->is_short()));
467 return dst == src && dst == TargetSpaceId(type);
469 return dst == src && type == CODE_TYPE;
472 case PROPERTY_CELL_SPACE:
483 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
484 CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
485 static_cast<size_t>(byte_size / kPointerSize));
489 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
490 DCHECK(IsAligned(byte_size, kPointerSize));
492 int size_in_words = byte_size / kPointerSize;
494 if ((dst < src) || (dst >= (src + byte_size))) {
495 Object** src_slot = reinterpret_cast<Object**>(src);
496 Object** dst_slot = reinterpret_cast<Object**>(dst);
497 Object** end_slot = src_slot + size_in_words;
499 while (src_slot != end_slot) {
500 *dst_slot++ = *src_slot++;
503 MemMove(dst, src, static_cast<size_t>(byte_size));
508 void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
511 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
512 // Check if there is potentially a memento behind the object. If
513 // the last word of the momento is on another page we return
515 Address object_address = object->address();
516 Address memento_address = object_address + object->Size();
517 Address last_memento_word_address = memento_address + kPointerSize;
518 if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
522 HeapObject* candidate = HeapObject::FromAddress(memento_address);
523 if (candidate->map() != allocation_memento_map()) return NULL;
525 // Either the object is the last object in the new space, or there is another
526 // object of at least word size (the header map word) following it, so
527 // suffices to compare ptr and top here. Note that technically we do not have
528 // to compare with the current top pointer of the from space page during GC,
529 // since we always install filler objects above the top pointer of a from
530 // space page when performing a garbage collection. However, always performing
531 // the test makes it possible to have a single, unified version of
532 // FindAllocationMemento that is used both by the GC and the mutator.
533 Address top = NewSpaceTop();
534 DCHECK(memento_address == top ||
535 memento_address + HeapObject::kHeaderSize <= top ||
536 !NewSpacePage::OnSamePage(memento_address, top));
537 if (memento_address == top) return NULL;
539 AllocationMemento* memento = AllocationMemento::cast(candidate);
540 if (!memento->IsValid()) return NULL;
545 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
546 ScratchpadSlotMode mode) {
547 Heap* heap = object->GetHeap();
548 DCHECK(heap->InFromSpace(object));
550 if (!FLAG_allocation_site_pretenuring ||
551 !AllocationSite::CanTrack(object->map()->instance_type()))
554 AllocationMemento* memento = heap->FindAllocationMemento(object);
555 if (memento == NULL) return;
557 if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
558 heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
563 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
564 DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
566 // We use the first word (where the map pointer usually is) of a heap
567 // object to record the forwarding pointer. A forwarding pointer can
568 // point to an old space, the code space, or the to space of the new
570 MapWord first_word = object->map_word();
572 // If the first word is a forwarding address, the object has already been
574 if (first_word.IsForwardingAddress()) {
575 HeapObject* dest = first_word.ToForwardingAddress();
576 DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
581 UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
583 // AllocationMementos are unrooted and shouldn't survive a scavenge
584 DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
585 // Call the slow part of scavenge object.
586 return ScavengeObjectSlow(p, object);
590 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
591 const v8::GCCallbackFlags callbackFlags) {
592 const char* collector_reason = NULL;
593 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
594 return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
598 Isolate* Heap::isolate() {
599 return reinterpret_cast<Isolate*>(
600 reinterpret_cast<intptr_t>(this) -
601 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
605 // Calls the FUNCTION_CALL function and retries it up to three times
606 // to guarantee that any allocations performed during the call will
607 // succeed if there's enough memory.
609 // Warning: Do not use the identifiers __object__, __maybe_object__ or
610 // __scope__ in a call to this macro.
612 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
613 if (__allocation__.To(&__object__)) { \
614 DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
618 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY) \
620 AllocationResult __allocation__ = FUNCTION_CALL; \
621 Object* __object__ = NULL; \
622 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
623 (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(), \
624 "allocation failure"); \
625 __allocation__ = FUNCTION_CALL; \
626 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
627 (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment(); \
628 (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc"); \
630 AlwaysAllocateScope __scope__(ISOLATE); \
631 __allocation__ = FUNCTION_CALL; \
633 RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
634 /* TODO(1181417): Fix this. */ \
635 v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
639 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
641 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
643 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
644 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, \
645 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
646 return Handle<TYPE>())
649 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
650 CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
653 void ExternalStringTable::AddString(String* string) {
654 DCHECK(string->IsExternalString());
655 if (heap_->InNewSpace(string)) {
656 new_space_strings_.Add(string);
658 old_space_strings_.Add(string);
663 void ExternalStringTable::Iterate(ObjectVisitor* v) {
664 if (!new_space_strings_.is_empty()) {
665 Object** start = &new_space_strings_[0];
666 v->VisitPointers(start, start + new_space_strings_.length());
668 if (!old_space_strings_.is_empty()) {
669 Object** start = &old_space_strings_[0];
670 v->VisitPointers(start, start + old_space_strings_.length());
675 // Verify() is inline to avoid ifdef-s around its calls in release
677 void ExternalStringTable::Verify() {
679 for (int i = 0; i < new_space_strings_.length(); ++i) {
680 Object* obj = Object::cast(new_space_strings_[i]);
681 DCHECK(heap_->InNewSpace(obj));
682 DCHECK(obj != heap_->the_hole_value());
684 for (int i = 0; i < old_space_strings_.length(); ++i) {
685 Object* obj = Object::cast(old_space_strings_[i]);
686 DCHECK(!heap_->InNewSpace(obj));
687 DCHECK(obj != heap_->the_hole_value());
693 void ExternalStringTable::AddOldString(String* string) {
694 DCHECK(string->IsExternalString());
695 DCHECK(!heap_->InNewSpace(string));
696 old_space_strings_.Add(string);
700 void ExternalStringTable::ShrinkNewStrings(int position) {
701 new_space_strings_.Rewind(position);
703 if (FLAG_verify_heap) {
710 void Heap::ClearInstanceofCache() {
711 set_instanceof_cache_function(the_hole_value());
715 Object* Heap::ToBoolean(bool condition) {
716 return condition ? true_value() : false_value();
720 void Heap::CompletelyClearInstanceofCache() {
721 set_instanceof_cache_map(the_hole_value());
722 set_instanceof_cache_function(the_hole_value());
726 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
727 : heap_(isolate->heap()), daf_(isolate) {
728 // We shouldn't hit any nested scopes, because that requires
729 // non-handle code to call handle code. The code still works but
730 // performance will degrade, so we want to catch this situation
732 DCHECK(heap_->always_allocate_scope_depth_ == 0);
733 heap_->always_allocate_scope_depth_++;
737 AlwaysAllocateScope::~AlwaysAllocateScope() {
738 heap_->always_allocate_scope_depth_--;
739 DCHECK(heap_->always_allocate_scope_depth_ == 0);
744 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
745 Isolate* isolate = Isolate::Current();
746 isolate->heap()->no_weak_object_verification_scope_depth_++;
750 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
751 Isolate* isolate = Isolate::Current();
752 isolate->heap()->no_weak_object_verification_scope_depth_--;
757 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
758 heap_->gc_callbacks_depth_++;
762 GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
765 bool GCCallbacksScope::CheckReenter() {
766 return heap_->gc_callbacks_depth_ == 1;
770 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
771 for (Object** current = start; current < end; current++) {
772 if ((*current)->IsHeapObject()) {
773 HeapObject* object = HeapObject::cast(*current);
774 CHECK(object->GetIsolate()->heap()->Contains(object));
775 CHECK(object->map()->IsMap());
781 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
782 for (Object** current = start; current < end; current++) {
783 CHECK((*current)->IsSmi());
787 } // namespace v8::internal
789 #endif // V8_HEAP_HEAP_INL_H_