1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
36 #include "v8-counters.h"
37 #include "store-buffer.h"
38 #include "store-buffer-inl.h"
43 void PromotionQueue::insert(HeapObject* target, int size) {
44 if (emergency_stack_ != NULL) {
45 emergency_stack_->Add(Entry(target, size));
49 if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
50 NewSpacePage* rear_page =
51 NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
52 ASSERT(!rear_page->prev_page()->is_anchor());
53 rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
54 ActivateGuardIfOnTheSamePage();
58 ASSERT(GetHeadPage() ==
59 Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
61 if ((rear_ - 2) < limit_) {
63 emergency_stack_->Add(Entry(target, size));
68 *(--rear_) = reinterpret_cast<intptr_t>(target);
70 // Assert no overflow into live objects.
72 SemiSpace::AssertValidRange(HEAP->new_space()->top(),
73 reinterpret_cast<Address>(rear_));
78 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
80 heap_->new_space()->active_space()->current_page()->address() ==
81 GetHeadPage()->address();
85 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
86 PretenureFlag pretenure) {
87 // Check for ASCII first since this is the common case.
88 if (String::IsAscii(str.start(), str.length())) {
89 // If the string is ASCII, we do not need to convert the characters
90 // since UTF8 is backwards compatible with ASCII.
91 return AllocateStringFromAscii(str, pretenure);
93 // Non-ASCII and we need to decode.
94 return AllocateStringFromUtf8Slow(str, pretenure);
98 MaybeObject* Heap::AllocateSymbol(Vector<const char> str,
100 uint32_t hash_field) {
101 unibrow::Utf8InputBuffer<> buffer(str.start(),
102 static_cast<unsigned>(str.length()));
103 return AllocateInternalSymbol(&buffer, chars, hash_field);
107 MaybeObject* Heap::AllocateAsciiSymbol(Vector<const char> str,
108 uint32_t hash_field) {
109 if (str.length() > SeqAsciiString::kMaxLength) {
110 return Failure::OutOfMemoryException();
112 // Compute map and object size.
113 Map* map = ascii_symbol_map();
114 int size = SeqAsciiString::SizeFor(str.length());
118 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
119 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
120 : old_data_space_->AllocateRaw(size);
121 if (!maybe_result->ToObject(&result)) return maybe_result;
124 // String maps are all immortal immovable objects.
125 reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
126 // Set length and hash fields of the allocated string.
127 String* answer = String::cast(result);
128 answer->set_length(str.length());
129 answer->set_hash_field(hash_field);
130 SeqString::cast(answer)->set_symbol_id(0);
132 ASSERT_EQ(size, answer->Size());
134 // Fill in the characters.
135 memcpy(answer->address() + SeqAsciiString::kHeaderSize,
136 str.start(), str.length());
142 MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
143 uint32_t hash_field) {
144 if (str.length() > SeqTwoByteString::kMaxLength) {
145 return Failure::OutOfMemoryException();
147 // Compute map and object size.
148 Map* map = symbol_map();
149 int size = SeqTwoByteString::SizeFor(str.length());
153 { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
154 ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
155 : old_data_space_->AllocateRaw(size);
156 if (!maybe_result->ToObject(&result)) return maybe_result;
159 reinterpret_cast<HeapObject*>(result)->set_map(map);
160 // Set length and hash fields of the allocated string.
161 String* answer = String::cast(result);
162 answer->set_length(str.length());
163 answer->set_hash_field(hash_field);
164 SeqString::cast(answer)->set_symbol_id(0);
166 ASSERT_EQ(size, answer->Size());
168 // Fill in the characters.
169 memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
170 str.start(), str.length() * kUC16Size);
175 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
176 return CopyFixedArrayWithMap(src, src->map());
180 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
181 return CopyFixedDoubleArrayWithMap(src, src->map());
185 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
186 AllocationSpace space,
187 AllocationSpace retry_space) {
188 ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
189 ASSERT(space != NEW_SPACE ||
190 retry_space == OLD_POINTER_SPACE ||
191 retry_space == OLD_DATA_SPACE ||
192 retry_space == LO_SPACE);
194 if (FLAG_gc_interval >= 0 &&
195 !disallow_allocation_failure_ &&
196 Heap::allocation_timeout_-- <= 0) {
197 return Failure::RetryAfterGC(space);
199 isolate_->counters()->objs_since_last_full()->Increment();
200 isolate_->counters()->objs_since_last_young()->Increment();
203 if (NEW_SPACE == space) {
204 result = new_space_.AllocateRaw(size_in_bytes);
205 if (always_allocate() && result->IsFailure()) {
212 if (OLD_POINTER_SPACE == space) {
213 result = old_pointer_space_->AllocateRaw(size_in_bytes);
214 } else if (OLD_DATA_SPACE == space) {
215 result = old_data_space_->AllocateRaw(size_in_bytes);
216 } else if (CODE_SPACE == space) {
217 result = code_space_->AllocateRaw(size_in_bytes);
218 } else if (LO_SPACE == space) {
219 result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
220 } else if (CELL_SPACE == space) {
221 result = cell_space_->AllocateRaw(size_in_bytes);
223 ASSERT(MAP_SPACE == space);
224 result = map_space_->AllocateRaw(size_in_bytes);
226 if (result->IsFailure()) old_gen_exhausted_ = true;
231 MaybeObject* Heap::NumberFromInt32(
232 int32_t value, PretenureFlag pretenure) {
233 if (Smi::IsValid(value)) return Smi::FromInt(value);
234 // Bypass NumberFromDouble to avoid various redundant checks.
235 return AllocateHeapNumber(FastI2D(value), pretenure);
239 MaybeObject* Heap::NumberFromUint32(
240 uint32_t value, PretenureFlag pretenure) {
241 if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
242 return Smi::FromInt((int32_t)value);
244 // Bypass NumberFromDouble to avoid various redundant checks.
245 return AllocateHeapNumber(FastUI2D(value), pretenure);
249 void Heap::FinalizeExternalString(HeapObject* string) {
250 ASSERT(string->IsExternalString() || string->map()->has_external_resource());
252 if (string->IsExternalString()) {
253 v8::String::ExternalStringResourceBase** resource_addr =
254 reinterpret_cast<v8::String::ExternalStringResourceBase**>(
255 reinterpret_cast<byte*>(string) +
256 ExternalString::kResourceOffset -
259 // Dispose of the C++ object if it has not already been disposed.
260 if (*resource_addr != NULL) {
261 (*resource_addr)->Dispose();
262 *resource_addr = NULL;
265 JSObject *object = JSObject::cast(string);
266 Object *value = object->GetExternalResourceObject();
267 v8::Object::ExternalResource *resource = 0;
268 if (value->IsSmi()) {
269 resource = reinterpret_cast<v8::Object::ExternalResource*>(Internals::GetExternalPointerFromSmi(value));
270 } else if (value->IsForeign()) {
271 resource = reinterpret_cast<v8::Object::ExternalResource*>(Foreign::cast(value)->foreign_address());
280 MaybeObject* Heap::AllocateRawMap() {
282 isolate_->counters()->objs_since_last_full()->Increment();
283 isolate_->counters()->objs_since_last_young()->Increment();
285 MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
286 if (result->IsFailure()) old_gen_exhausted_ = true;
288 if (!result->IsFailure()) {
289 // Maps have their own alignment.
290 CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
291 static_cast<intptr_t>(kHeapObjectTag));
298 MaybeObject* Heap::AllocateRawCell() {
300 isolate_->counters()->objs_since_last_full()->Increment();
301 isolate_->counters()->objs_since_last_young()->Increment();
303 MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
304 if (result->IsFailure()) old_gen_exhausted_ = true;
309 bool Heap::InNewSpace(Object* object) {
310 bool result = new_space_.Contains(object);
311 ASSERT(!result || // Either not in new space
312 gc_state_ != NOT_IN_GC || // ... or in the middle of GC
313 InToSpace(object)); // ... or in to-space (where we allocate).
318 bool Heap::InNewSpace(Address addr) {
319 return new_space_.Contains(addr);
323 bool Heap::InFromSpace(Object* object) {
324 return new_space_.FromSpaceContains(object);
328 bool Heap::InToSpace(Object* object) {
329 return new_space_.ToSpaceContains(object);
333 bool Heap::OldGenerationAllocationLimitReached() {
334 if (!incremental_marking()->IsStopped()) return false;
335 return OldGenerationSpaceAvailable() < 0;
339 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
340 // An object should be promoted if:
341 // - the object has survived a scavenge operation or
342 // - to space is already 25% full.
343 NewSpacePage* page = NewSpacePage::FromAddress(old_address);
344 Address age_mark = new_space_.age_mark();
345 bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
346 (!page->ContainsLimit(age_mark) || old_address < age_mark);
347 return below_mark || (new_space_.Size() + object_size) >=
348 (new_space_.EffectiveCapacity() >> 2);
352 void Heap::RecordWrite(Address address, int offset) {
353 if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
357 void Heap::RecordWrites(Address address, int start, int len) {
358 if (!InNewSpace(address)) {
359 for (int i = 0; i < len; i++) {
360 store_buffer_.Mark(address + start + i * kPointerSize);
366 OldSpace* Heap::TargetSpace(HeapObject* object) {
367 InstanceType type = object->map()->instance_type();
368 AllocationSpace space = TargetSpaceId(type);
369 return (space == OLD_POINTER_SPACE)
375 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
376 // Heap numbers and sequential strings are promoted to old data space, all
377 // other object types are promoted to old pointer space. We do not use
378 // object->IsHeapNumber() and object->IsSeqString() because we already
379 // know that object has the heap object tag.
381 // These objects are never allocated in new space.
382 ASSERT(type != MAP_TYPE);
383 ASSERT(type != CODE_TYPE);
384 ASSERT(type != ODDBALL_TYPE);
385 ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE);
387 if (type < FIRST_NONSTRING_TYPE) {
388 // There are four string representations: sequential strings, external
389 // strings, cons strings, and sliced strings.
390 // Only the latter two contain non-map-word pointers to heap objects.
391 return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
395 return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
400 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
401 CopyWords(reinterpret_cast<Object**>(dst),
402 reinterpret_cast<Object**>(src),
403 byte_size / kPointerSize);
407 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
408 ASSERT(IsAligned(byte_size, kPointerSize));
410 int size_in_words = byte_size / kPointerSize;
412 if ((dst < src) || (dst >= (src + byte_size))) {
413 Object** src_slot = reinterpret_cast<Object**>(src);
414 Object** dst_slot = reinterpret_cast<Object**>(dst);
415 Object** end_slot = src_slot + size_in_words;
417 while (src_slot != end_slot) {
418 *dst_slot++ = *src_slot++;
421 memmove(dst, src, byte_size);
426 void Heap::ScavengePointer(HeapObject** p) {
427 ScavengeObject(p, *p);
431 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
432 ASSERT(HEAP->InFromSpace(object));
434 // We use the first word (where the map pointer usually is) of a heap
435 // object to record the forwarding pointer. A forwarding pointer can
436 // point to an old space, the code space, or the to space of the new
438 MapWord first_word = object->map_word();
440 // If the first word is a forwarding address, the object has already been
442 if (first_word.IsForwardingAddress()) {
443 HeapObject* dest = first_word.ToForwardingAddress();
444 ASSERT(HEAP->InFromSpace(*p));
449 // Call the slow part of scavenge object.
450 return ScavengeObjectSlow(p, object);
454 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
455 const char* collector_reason = NULL;
456 GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
457 return CollectGarbage(space, collector, gc_reason, collector_reason);
461 MaybeObject* Heap::PrepareForCompare(String* str) {
462 // Always flatten small strings and force flattening of long strings
463 // after we have accumulated a certain amount we failed to flatten.
464 static const int kMaxAlwaysFlattenLength = 32;
465 static const int kFlattenLongThreshold = 16*KB;
467 const int length = str->length();
468 MaybeObject* obj = str->TryFlatten();
469 if (length <= kMaxAlwaysFlattenLength ||
470 unflattened_strings_length_ >= kFlattenLongThreshold) {
473 if (obj->IsFailure()) {
474 unflattened_strings_length_ += length;
480 intptr_t Heap::AdjustAmountOfExternalAllocatedMemory(
481 intptr_t change_in_bytes) {
482 ASSERT(HasBeenSetUp());
483 intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
484 if (change_in_bytes >= 0) {
486 if (amount > amount_of_external_allocated_memory_) {
487 amount_of_external_allocated_memory_ = amount;
489 intptr_t amount_since_last_global_gc =
490 amount_of_external_allocated_memory_ -
491 amount_of_external_allocated_memory_at_last_global_gc_;
492 if (amount_since_last_global_gc > external_allocation_limit_) {
493 CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
498 amount_of_external_allocated_memory_ = amount;
501 ASSERT(amount_of_external_allocated_memory_ >= 0);
502 return amount_of_external_allocated_memory_;
506 void Heap::SetLastScriptId(Object* last_script_id) {
507 roots_[kLastScriptIdRootIndex] = last_script_id;
511 Isolate* Heap::isolate() {
512 return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
513 reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
518 #define GC_GREEDY_CHECK() \
519 if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
521 #define GC_GREEDY_CHECK() { }
524 // Calls the FUNCTION_CALL function and retries it up to three times
525 // to guarantee that any allocations performed during the call will
526 // succeed if there's enough memory.
528 // Warning: Do not use the identifiers __object__, __maybe_object__ or
529 // __scope__ in a call to this macro.
531 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
534 MaybeObject* __maybe_object__ = FUNCTION_CALL; \
535 Object* __object__ = NULL; \
536 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
537 if (__maybe_object__->IsOutOfMemory()) { \
538 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
540 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
541 ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)-> \
542 allocation_space(), \
543 "allocation failure"); \
544 __maybe_object__ = FUNCTION_CALL; \
545 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
546 if (__maybe_object__->IsOutOfMemory()) { \
547 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
549 if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY; \
550 ISOLATE->counters()->gc_last_resort_from_handles()->Increment(); \
551 ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc"); \
553 AlwaysAllocateScope __scope__; \
554 __maybe_object__ = FUNCTION_CALL; \
556 if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE; \
557 if (__maybe_object__->IsOutOfMemory() || \
558 __maybe_object__->IsRetryAfterGC()) { \
559 /* TODO(1181417): Fix this. */ \
560 v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
566 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE) \
567 CALL_AND_RETRY(ISOLATE, \
569 return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
570 return Handle<TYPE>())
573 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
574 CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
579 inline bool Heap::allow_allocation(bool new_state) {
580 bool old = allocation_allowed_;
581 allocation_allowed_ = new_state;
588 void ExternalStringTable::AddString(String* string) {
589 ASSERT(string->IsExternalString());
590 if (heap_->InNewSpace(string)) {
591 new_space_strings_.Add(string);
593 old_space_strings_.Add(string);
598 void ExternalStringTable::AddObject(HeapObject* object) {
599 ASSERT(object->map()->has_external_resource());
600 if (heap_->InNewSpace(object)) {
601 new_space_strings_.Add(object);
603 old_space_strings_.Add(object);
608 void ExternalStringTable::Iterate(ObjectVisitor* v) {
609 if (!new_space_strings_.is_empty()) {
610 Object** start = &new_space_strings_[0];
611 v->VisitPointers(start, start + new_space_strings_.length());
613 if (!old_space_strings_.is_empty()) {
614 Object** start = &old_space_strings_[0];
615 v->VisitPointers(start, start + old_space_strings_.length());
620 // Verify() is inline to avoid ifdef-s around its calls in release
622 void ExternalStringTable::Verify() {
624 for (int i = 0; i < new_space_strings_.length(); ++i) {
625 ASSERT(heap_->InNewSpace(new_space_strings_[i]));
626 ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
628 for (int i = 0; i < old_space_strings_.length(); ++i) {
629 ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
630 ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
636 void ExternalStringTable::AddOldObject(HeapObject* object) {
637 ASSERT(object->IsExternalString() || object->map()->has_external_resource());
638 ASSERT(!heap_->InNewSpace(object));
639 old_space_strings_.Add(object);
643 void ExternalStringTable::ShrinkNewObjects(int position) {
644 new_space_strings_.Rewind(position);
645 if (FLAG_verify_heap) {
651 void Heap::ClearInstanceofCache() {
652 set_instanceof_cache_function(the_hole_value());
656 Object* Heap::ToBoolean(bool condition) {
657 return condition ? true_value() : false_value();
661 void Heap::CompletelyClearInstanceofCache() {
662 set_instanceof_cache_map(the_hole_value());
663 set_instanceof_cache_function(the_hole_value());
667 MaybeObject* TranscendentalCache::Get(Type type, double input) {
668 SubCache* cache = caches_[type];
670 caches_[type] = cache = new SubCache(type);
672 return cache->Get(input);
676 Address TranscendentalCache::cache_array_address() {
677 return reinterpret_cast<Address>(caches_);
681 double TranscendentalCache::SubCache::Calculate(double input) {
690 return fast_cos(input);
694 return fast_log(input);
696 return fast_sin(input);
698 return fast_tan(input);
700 return 0.0; // Never happens.
705 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
709 Element e = elements_[hash];
710 if (e.in[0] == c.integers[0] &&
711 e.in[1] == c.integers[1]) {
712 ASSERT(e.output != NULL);
713 isolate_->counters()->transcendental_cache_hit()->Increment();
716 double answer = Calculate(input);
717 isolate_->counters()->transcendental_cache_miss()->Increment();
719 { MaybeObject* maybe_heap_number =
720 isolate_->heap()->AllocateHeapNumber(answer);
721 if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
723 elements_[hash].in[0] = c.integers[0];
724 elements_[hash].in[1] = c.integers[1];
725 elements_[hash].output = heap_number;
730 AlwaysAllocateScope::AlwaysAllocateScope() {
731 // We shouldn't hit any nested scopes, because that requires
732 // non-handle code to call handle code. The code still works but
733 // performance will degrade, so we want to catch this situation
735 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
736 HEAP->always_allocate_scope_depth_++;
740 AlwaysAllocateScope::~AlwaysAllocateScope() {
741 HEAP->always_allocate_scope_depth_--;
742 ASSERT(HEAP->always_allocate_scope_depth_ == 0);
746 LinearAllocationScope::LinearAllocationScope() {
747 HEAP->linear_allocation_scope_depth_++;
751 LinearAllocationScope::~LinearAllocationScope() {
752 HEAP->linear_allocation_scope_depth_--;
753 ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
758 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
759 for (Object** current = start; current < end; current++) {
760 if ((*current)->IsHeapObject()) {
761 HeapObject* object = HeapObject::cast(*current);
762 ASSERT(HEAP->Contains(object));
763 ASSERT(object->map()->IsMap());
770 double GCTracer::SizeOfHeapObjects() {
771 return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
776 DisallowAllocationFailure::DisallowAllocationFailure() {
777 old_state_ = HEAP->disallow_allocation_failure_;
778 HEAP->disallow_allocation_failure_ = true;
782 DisallowAllocationFailure::~DisallowAllocationFailure() {
783 HEAP->disallow_allocation_failure_ = old_state_;
789 AssertNoAllocation::AssertNoAllocation() {
790 old_state_ = HEAP->allow_allocation(false);
794 AssertNoAllocation::~AssertNoAllocation() {
795 HEAP->allow_allocation(old_state_);
799 DisableAssertNoAllocation::DisableAssertNoAllocation() {
800 old_state_ = HEAP->allow_allocation(true);
804 DisableAssertNoAllocation::~DisableAssertNoAllocation() {
805 HEAP->allow_allocation(old_state_);
810 AssertNoAllocation::AssertNoAllocation() { }
811 AssertNoAllocation::~AssertNoAllocation() { }
812 DisableAssertNoAllocation::DisableAssertNoAllocation() { }
813 DisableAssertNoAllocation::~DisableAssertNoAllocation() { }
818 } } // namespace v8::internal
820 #endif // V8_HEAP_INL_H_