Upstream version 9.37.197.0
[platform/framework/web/crosswalk.git] / src / v8 / src / heap-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_INL_H_
6 #define V8_HEAP_INL_H_
7
8 #include <cmath>
9
10 #include "src/heap.h"
11 #include "src/heap-profiler.h"
12 #include "src/isolate.h"
13 #include "src/list-inl.h"
14 #include "src/objects.h"
15 #include "src/platform.h"
16 #include "src/store-buffer.h"
17 #include "src/store-buffer-inl.h"
18
19 namespace v8 {
20 namespace internal {
21
22 void PromotionQueue::insert(HeapObject* target, int size) {
23   if (emergency_stack_ != NULL) {
24     emergency_stack_->Add(Entry(target, size));
25     return;
26   }
27
28   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
29     NewSpacePage* rear_page =
30         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
31     ASSERT(!rear_page->prev_page()->is_anchor());
32     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
33     ActivateGuardIfOnTheSamePage();
34   }
35
36   if (guard_) {
37     ASSERT(GetHeadPage() ==
38            Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
39
40     if ((rear_ - 2) < limit_) {
41       RelocateQueueHead();
42       emergency_stack_->Add(Entry(target, size));
43       return;
44     }
45   }
46
47   *(--rear_) = reinterpret_cast<intptr_t>(target);
48   *(--rear_) = size;
49   // Assert no overflow into live objects.
50 #ifdef DEBUG
51   SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
52                               reinterpret_cast<Address>(rear_));
53 #endif
54 }
55
56
57 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
58   guard_ = guard_ ||
59       heap_->new_space()->active_space()->current_page()->address() ==
60       GetHeadPage()->address();
61 }
62
63
64 template<>
65 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
66   // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
67   // ASCII only check.
68   return chars == str.length();
69 }
70
71
72 template<>
73 bool inline Heap::IsOneByte(String* str, int chars) {
74   return str->IsOneByteRepresentation();
75 }
76
77
78 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
79     Vector<const char> str, int chars, uint32_t hash_field) {
80   if (IsOneByte(str, chars)) {
81     return AllocateOneByteInternalizedString(
82         Vector<const uint8_t>::cast(str), hash_field);
83   }
84   return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
85 }
86
87
88 template<typename T>
89 AllocationResult Heap::AllocateInternalizedStringImpl(
90     T t, int chars, uint32_t hash_field) {
91   if (IsOneByte(t, chars)) {
92     return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
93   }
94   return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
95 }
96
97
98 AllocationResult Heap::AllocateOneByteInternalizedString(
99     Vector<const uint8_t> str,
100     uint32_t hash_field) {
101   CHECK_GE(String::kMaxLength, str.length());
102   // Compute map and object size.
103   Map* map = ascii_internalized_string_map();
104   int size = SeqOneByteString::SizeFor(str.length());
105   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
106
107   // Allocate string.
108   HeapObject* result;
109   { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
110     if (!allocation.To(&result)) return allocation;
111   }
112
113   // String maps are all immortal immovable objects.
114   result->set_map_no_write_barrier(map);
115   // Set length and hash fields of the allocated string.
116   String* answer = String::cast(result);
117   answer->set_length(str.length());
118   answer->set_hash_field(hash_field);
119
120   ASSERT_EQ(size, answer->Size());
121
122   // Fill in the characters.
123   MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
124           str.length());
125
126   return answer;
127 }
128
129
130 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
131                                                          uint32_t hash_field) {
132   CHECK_GE(String::kMaxLength, str.length());
133   // Compute map and object size.
134   Map* map = internalized_string_map();
135   int size = SeqTwoByteString::SizeFor(str.length());
136   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
137
138   // Allocate string.
139   HeapObject* result;
140   { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
141     if (!allocation.To(&result)) return allocation;
142   }
143
144   result->set_map(map);
145   // Set length and hash fields of the allocated string.
146   String* answer = String::cast(result);
147   answer->set_length(str.length());
148   answer->set_hash_field(hash_field);
149
150   ASSERT_EQ(size, answer->Size());
151
152   // Fill in the characters.
153   MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
154           str.length() * kUC16Size);
155
156   return answer;
157 }
158
159 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
160   if (src->length() == 0) return src;
161   return CopyFixedArrayWithMap(src, src->map());
162 }
163
164
165 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
166   if (src->length() == 0) return src;
167   return CopyFixedDoubleArrayWithMap(src, src->map());
168 }
169
170
171 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
172   if (src->length() == 0) return src;
173   return CopyConstantPoolArrayWithMap(src, src->map());
174 }
175
176
177 AllocationResult Heap::AllocateRaw(int size_in_bytes,
178                                    AllocationSpace space,
179                                    AllocationSpace retry_space) {
180   ASSERT(AllowHandleAllocation::IsAllowed());
181   ASSERT(AllowHeapAllocation::IsAllowed());
182   ASSERT(gc_state_ == NOT_IN_GC);
183   HeapProfiler* profiler = isolate_->heap_profiler();
184 #ifdef DEBUG
185   if (FLAG_gc_interval >= 0 &&
186       AllowAllocationFailure::IsAllowed(isolate_) &&
187       Heap::allocation_timeout_-- <= 0) {
188     return AllocationResult::Retry(space);
189   }
190   isolate_->counters()->objs_since_last_full()->Increment();
191   isolate_->counters()->objs_since_last_young()->Increment();
192 #endif
193
194   HeapObject* object;
195   AllocationResult allocation;
196   if (NEW_SPACE == space) {
197     allocation = new_space_.AllocateRaw(size_in_bytes);
198     if (always_allocate() &&
199         allocation.IsRetry() &&
200         retry_space != NEW_SPACE) {
201       space = retry_space;
202     } else {
203       if (profiler->is_tracking_allocations() && allocation.To(&object)) {
204         profiler->AllocationEvent(object->address(), size_in_bytes);
205       }
206       return allocation;
207     }
208   }
209
210   if (OLD_POINTER_SPACE == space) {
211     allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
212   } else if (OLD_DATA_SPACE == space) {
213     allocation = old_data_space_->AllocateRaw(size_in_bytes);
214   } else if (CODE_SPACE == space) {
215     allocation = code_space_->AllocateRaw(size_in_bytes);
216   } else if (LO_SPACE == space) {
217     allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
218   } else if (CELL_SPACE == space) {
219     allocation = cell_space_->AllocateRaw(size_in_bytes);
220   } else if (PROPERTY_CELL_SPACE == space) {
221     allocation = property_cell_space_->AllocateRaw(size_in_bytes);
222   } else {
223     ASSERT(MAP_SPACE == space);
224     allocation = map_space_->AllocateRaw(size_in_bytes);
225   }
226   if (allocation.IsRetry()) old_gen_exhausted_ = true;
227   if (profiler->is_tracking_allocations() && allocation.To(&object)) {
228     profiler->AllocationEvent(object->address(), size_in_bytes);
229   }
230   return allocation;
231 }
232
233
234 void Heap::FinalizeExternalString(String* string) {
235   ASSERT(string->IsExternalString());
236   v8::String::ExternalStringResourceBase** resource_addr =
237       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
238           reinterpret_cast<byte*>(string) +
239           ExternalString::kResourceOffset -
240           kHeapObjectTag);
241
242   // Dispose of the C++ object if it has not already been disposed.
243   if (*resource_addr != NULL) {
244     (*resource_addr)->Dispose();
245     *resource_addr = NULL;
246   }
247 }
248
249
250 bool Heap::InNewSpace(Object* object) {
251   bool result = new_space_.Contains(object);
252   ASSERT(!result ||                  // Either not in new space
253          gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
254          InToSpace(object));         // ... or in to-space (where we allocate).
255   return result;
256 }
257
258
259 bool Heap::InNewSpace(Address address) {
260   return new_space_.Contains(address);
261 }
262
263
264 bool Heap::InFromSpace(Object* object) {
265   return new_space_.FromSpaceContains(object);
266 }
267
268
269 bool Heap::InToSpace(Object* object) {
270   return new_space_.ToSpaceContains(object);
271 }
272
273
274 bool Heap::InOldPointerSpace(Address address) {
275   return old_pointer_space_->Contains(address);
276 }
277
278
279 bool Heap::InOldPointerSpace(Object* object) {
280   return InOldPointerSpace(reinterpret_cast<Address>(object));
281 }
282
283
284 bool Heap::InOldDataSpace(Address address) {
285   return old_data_space_->Contains(address);
286 }
287
288
289 bool Heap::InOldDataSpace(Object* object) {
290   return InOldDataSpace(reinterpret_cast<Address>(object));
291 }
292
293
294 bool Heap::OldGenerationAllocationLimitReached() {
295   if (!incremental_marking()->IsStopped()) return false;
296   return OldGenerationSpaceAvailable() < 0;
297 }
298
299
300 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
301   // An object should be promoted if:
302   // - the object has survived a scavenge operation or
303   // - to space is already 25% full.
304   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
305   Address age_mark = new_space_.age_mark();
306   bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
307       (!page->ContainsLimit(age_mark) || old_address < age_mark);
308   return below_mark || (new_space_.Size() + object_size) >=
309                         (new_space_.EffectiveCapacity() >> 2);
310 }
311
312
313 void Heap::RecordWrite(Address address, int offset) {
314   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
315 }
316
317
318 void Heap::RecordWrites(Address address, int start, int len) {
319   if (!InNewSpace(address)) {
320     for (int i = 0; i < len; i++) {
321       store_buffer_.Mark(address + start + i * kPointerSize);
322     }
323   }
324 }
325
326
327 OldSpace* Heap::TargetSpace(HeapObject* object) {
328   InstanceType type = object->map()->instance_type();
329   AllocationSpace space = TargetSpaceId(type);
330   return (space == OLD_POINTER_SPACE)
331       ? old_pointer_space_
332       : old_data_space_;
333 }
334
335
336 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
337   // Heap numbers and sequential strings are promoted to old data space, all
338   // other object types are promoted to old pointer space.  We do not use
339   // object->IsHeapNumber() and object->IsSeqString() because we already
340   // know that object has the heap object tag.
341
342   // These objects are never allocated in new space.
343   ASSERT(type != MAP_TYPE);
344   ASSERT(type != CODE_TYPE);
345   ASSERT(type != ODDBALL_TYPE);
346   ASSERT(type != CELL_TYPE);
347   ASSERT(type != PROPERTY_CELL_TYPE);
348
349   if (type <= LAST_NAME_TYPE) {
350     if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
351     ASSERT(type < FIRST_NONSTRING_TYPE);
352     // There are four string representations: sequential strings, external
353     // strings, cons strings, and sliced strings.
354     // Only the latter two contain non-map-word pointers to heap objects.
355     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
356         ? OLD_POINTER_SPACE
357         : OLD_DATA_SPACE;
358   } else {
359     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
360   }
361 }
362
363
364 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
365   // Object migration is governed by the following rules:
366   //
367   // 1) Objects in new-space can be migrated to one of the old spaces
368   //    that matches their target space or they stay in new-space.
369   // 2) Objects in old-space stay in the same space when migrating.
370   // 3) Fillers (two or more words) can migrate due to left-trimming of
371   //    fixed arrays in new-space, old-data-space and old-pointer-space.
372   // 4) Fillers (one word) can never migrate, they are skipped by
373   //    incremental marking explicitly to prevent invalid pattern.
374   // 5) Short external strings can end up in old pointer space when a cons
375   //    string in old pointer space is made external (String::MakeExternal).
376   //
377   // Since this function is used for debugging only, we do not place
378   // asserts here, but check everything explicitly.
379   if (obj->map() == one_pointer_filler_map()) return false;
380   InstanceType type = obj->map()->instance_type();
381   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
382   AllocationSpace src = chunk->owner()->identity();
383   switch (src) {
384     case NEW_SPACE:
385       return dst == src || dst == TargetSpaceId(type);
386     case OLD_POINTER_SPACE:
387       return dst == src &&
388           (dst == TargetSpaceId(type) || obj->IsFiller() ||
389           (obj->IsExternalString() && ExternalString::cast(obj)->is_short()));
390     case OLD_DATA_SPACE:
391       return dst == src && dst == TargetSpaceId(type);
392     case CODE_SPACE:
393       return dst == src && type == CODE_TYPE;
394     case MAP_SPACE:
395     case CELL_SPACE:
396     case PROPERTY_CELL_SPACE:
397     case LO_SPACE:
398       return false;
399     case INVALID_SPACE:
400       break;
401   }
402   UNREACHABLE();
403   return false;
404 }
405
406
407 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
408   CopyWords(reinterpret_cast<Object**>(dst),
409             reinterpret_cast<Object**>(src),
410             static_cast<size_t>(byte_size / kPointerSize));
411 }
412
413
414 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
415   ASSERT(IsAligned(byte_size, kPointerSize));
416
417   int size_in_words = byte_size / kPointerSize;
418
419   if ((dst < src) || (dst >= (src + byte_size))) {
420     Object** src_slot = reinterpret_cast<Object**>(src);
421     Object** dst_slot = reinterpret_cast<Object**>(dst);
422     Object** end_slot = src_slot + size_in_words;
423
424     while (src_slot != end_slot) {
425       *dst_slot++ = *src_slot++;
426     }
427   } else {
428     MemMove(dst, src, static_cast<size_t>(byte_size));
429   }
430 }
431
432
433 void Heap::ScavengePointer(HeapObject** p) {
434   ScavengeObject(p, *p);
435 }
436
437
438 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
439   // Check if there is potentially a memento behind the object. If
440   // the last word of the momento is on another page we return
441   // immediately.
442   Address object_address = object->address();
443   Address memento_address = object_address + object->Size();
444   Address last_memento_word_address = memento_address + kPointerSize;
445   if (!NewSpacePage::OnSamePage(object_address,
446                                 last_memento_word_address)) {
447     return NULL;
448   }
449
450   HeapObject* candidate = HeapObject::FromAddress(memento_address);
451   if (candidate->map() != allocation_memento_map()) return NULL;
452
453   // Either the object is the last object in the new space, or there is another
454   // object of at least word size (the header map word) following it, so
455   // suffices to compare ptr and top here. Note that technically we do not have
456   // to compare with the current top pointer of the from space page during GC,
457   // since we always install filler objects above the top pointer of a from
458   // space page when performing a garbage collection. However, always performing
459   // the test makes it possible to have a single, unified version of
460   // FindAllocationMemento that is used both by the GC and the mutator.
461   Address top = NewSpaceTop();
462   ASSERT(memento_address == top ||
463          memento_address + HeapObject::kHeaderSize <= top ||
464          !NewSpacePage::OnSamePage(memento_address, top));
465   if (memento_address == top) return NULL;
466
467   AllocationMemento* memento = AllocationMemento::cast(candidate);
468   if (!memento->IsValid()) return NULL;
469   return memento;
470 }
471
472
473 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
474                                         ScratchpadSlotMode mode) {
475   Heap* heap = object->GetHeap();
476   ASSERT(heap->InFromSpace(object));
477
478   if (!FLAG_allocation_site_pretenuring ||
479       !AllocationSite::CanTrack(object->map()->instance_type())) return;
480
481   AllocationMemento* memento = heap->FindAllocationMemento(object);
482   if (memento == NULL) return;
483
484   if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
485     heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
486   }
487 }
488
489
490 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
491   ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
492
493   // We use the first word (where the map pointer usually is) of a heap
494   // object to record the forwarding pointer.  A forwarding pointer can
495   // point to an old space, the code space, or the to space of the new
496   // generation.
497   MapWord first_word = object->map_word();
498
499   // If the first word is a forwarding address, the object has already been
500   // copied.
501   if (first_word.IsForwardingAddress()) {
502     HeapObject* dest = first_word.ToForwardingAddress();
503     ASSERT(object->GetIsolate()->heap()->InFromSpace(*p));
504     *p = dest;
505     return;
506   }
507
508   UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
509
510   // AllocationMementos are unrooted and shouldn't survive a scavenge
511   ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
512   // Call the slow part of scavenge object.
513   return ScavengeObjectSlow(p, object);
514 }
515
516
517 bool Heap::CollectGarbage(AllocationSpace space,
518                           const char* gc_reason,
519                           const v8::GCCallbackFlags callbackFlags) {
520   const char* collector_reason = NULL;
521   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
522   return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
523 }
524
525
526 Isolate* Heap::isolate() {
527   return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
528       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
529 }
530
531
532 // Calls the FUNCTION_CALL function and retries it up to three times
533 // to guarantee that any allocations performed during the call will
534 // succeed if there's enough memory.
535
536 // Warning: Do not use the identifiers __object__, __maybe_object__ or
537 // __scope__ in a call to this macro.
538
539 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                      \
540   if (__allocation__.To(&__object__)) {                                        \
541     ASSERT(__object__ != (ISOLATE)->heap()->exception());                      \
542     RETURN_VALUE;                                                              \
543   }
544
545 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)     \
546   do {                                                                         \
547     AllocationResult __allocation__ = FUNCTION_CALL;                           \
548     Object* __object__ = NULL;                                                 \
549     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                          \
550     (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),             \
551                                       "allocation failure");                   \
552     __allocation__ = FUNCTION_CALL;                                            \
553     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                          \
554     (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();         \
555     (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");           \
556     {                                                                          \
557       AlwaysAllocateScope __scope__(ISOLATE);                                  \
558       __allocation__ = FUNCTION_CALL;                                          \
559     }                                                                          \
560     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                          \
561       /* TODO(1181417): Fix this. */                                           \
562     v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);  \
563     RETURN_EMPTY;                                                              \
564   } while (false)
565
566 #define CALL_AND_RETRY_OR_DIE(                                             \
567      ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)                   \
568   CALL_AND_RETRY(                                                          \
569       ISOLATE,                                                             \
570       FUNCTION_CALL,                                                       \
571       RETURN_VALUE,                                                        \
572       RETURN_EMPTY)
573
574 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
575   CALL_AND_RETRY_OR_DIE(ISOLATE,                                              \
576                         FUNCTION_CALL,                                        \
577                         return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
578                         return Handle<TYPE>())                                \
579
580
581 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL)  \
582   CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
583
584
585 void ExternalStringTable::AddString(String* string) {
586   ASSERT(string->IsExternalString());
587   if (heap_->InNewSpace(string)) {
588     new_space_strings_.Add(string);
589   } else {
590     old_space_strings_.Add(string);
591   }
592 }
593
594
595 void ExternalStringTable::Iterate(ObjectVisitor* v) {
596   if (!new_space_strings_.is_empty()) {
597     Object** start = &new_space_strings_[0];
598     v->VisitPointers(start, start + new_space_strings_.length());
599   }
600   if (!old_space_strings_.is_empty()) {
601     Object** start = &old_space_strings_[0];
602     v->VisitPointers(start, start + old_space_strings_.length());
603   }
604 }
605
606
607 // Verify() is inline to avoid ifdef-s around its calls in release
608 // mode.
609 void ExternalStringTable::Verify() {
610 #ifdef DEBUG
611   for (int i = 0; i < new_space_strings_.length(); ++i) {
612     Object* obj = Object::cast(new_space_strings_[i]);
613     ASSERT(heap_->InNewSpace(obj));
614     ASSERT(obj != heap_->the_hole_value());
615   }
616   for (int i = 0; i < old_space_strings_.length(); ++i) {
617     Object* obj = Object::cast(old_space_strings_[i]);
618     ASSERT(!heap_->InNewSpace(obj));
619     ASSERT(obj != heap_->the_hole_value());
620   }
621 #endif
622 }
623
624
625 void ExternalStringTable::AddOldString(String* string) {
626   ASSERT(string->IsExternalString());
627   ASSERT(!heap_->InNewSpace(string));
628   old_space_strings_.Add(string);
629 }
630
631
632 void ExternalStringTable::ShrinkNewStrings(int position) {
633   new_space_strings_.Rewind(position);
634 #ifdef VERIFY_HEAP
635   if (FLAG_verify_heap) {
636     Verify();
637   }
638 #endif
639 }
640
641
642 void Heap::ClearInstanceofCache() {
643   set_instanceof_cache_function(the_hole_value());
644 }
645
646
647 Object* Heap::ToBoolean(bool condition) {
648   return condition ? true_value() : false_value();
649 }
650
651
652 void Heap::CompletelyClearInstanceofCache() {
653   set_instanceof_cache_map(the_hole_value());
654   set_instanceof_cache_function(the_hole_value());
655 }
656
657
658 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
659     : heap_(isolate->heap()), daf_(isolate) {
660   // We shouldn't hit any nested scopes, because that requires
661   // non-handle code to call handle code. The code still works but
662   // performance will degrade, so we want to catch this situation
663   // in debug mode.
664   ASSERT(heap_->always_allocate_scope_depth_ == 0);
665   heap_->always_allocate_scope_depth_++;
666 }
667
668
669 AlwaysAllocateScope::~AlwaysAllocateScope() {
670   heap_->always_allocate_scope_depth_--;
671   ASSERT(heap_->always_allocate_scope_depth_ == 0);
672 }
673
674
675 #ifdef VERIFY_HEAP
676 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
677   Isolate* isolate = Isolate::Current();
678   isolate->heap()->no_weak_object_verification_scope_depth_++;
679 }
680
681
682 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
683   Isolate* isolate = Isolate::Current();
684   isolate->heap()->no_weak_object_verification_scope_depth_--;
685 }
686 #endif
687
688
689 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
690   heap_->gc_callbacks_depth_++;
691 }
692
693
694 GCCallbacksScope::~GCCallbacksScope() {
695   heap_->gc_callbacks_depth_--;
696 }
697
698
699 bool GCCallbacksScope::CheckReenter() {
700   return heap_->gc_callbacks_depth_ == 1;
701 }
702
703
704 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
705   for (Object** current = start; current < end; current++) {
706     if ((*current)->IsHeapObject()) {
707       HeapObject* object = HeapObject::cast(*current);
708       CHECK(object->GetIsolate()->heap()->Contains(object));
709       CHECK(object->map()->IsMap());
710     }
711   }
712 }
713
714
715 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
716   for (Object** current = start; current < end; current++) {
717      CHECK((*current)->IsSmi());
718   }
719 }
720
721
722 double GCTracer::SizeOfHeapObjects() {
723   return (static_cast<double>(heap_->SizeOfObjects())) / MB;
724 }
725
726
727 } }  // namespace v8::internal
728
729 #endif  // V8_HEAP_INL_H_