Update To 11.40.268.0
[platform/framework/web/crosswalk.git] / src / v8 / src / heap / heap-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
7
8 #include <cmath>
9
10 #include "src/base/platform/platform.h"
11 #include "src/cpu-profiler.h"
12 #include "src/heap/heap.h"
13 #include "src/heap/store-buffer.h"
14 #include "src/heap/store-buffer-inl.h"
15 #include "src/heap-profiler.h"
16 #include "src/isolate.h"
17 #include "src/list-inl.h"
18 #include "src/msan.h"
19 #include "src/objects.h"
20
21 namespace v8 {
22 namespace internal {
23
24 void PromotionQueue::insert(HeapObject* target, int size) {
25   if (emergency_stack_ != NULL) {
26     emergency_stack_->Add(Entry(target, size));
27     return;
28   }
29
30   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
31     NewSpacePage* rear_page =
32         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
33     DCHECK(!rear_page->prev_page()->is_anchor());
34     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
35   }
36
37   if ((rear_ - 2) < limit_) {
38     RelocateQueueHead();
39     emergency_stack_->Add(Entry(target, size));
40     return;
41   }
42
43   *(--rear_) = reinterpret_cast<intptr_t>(target);
44   *(--rear_) = size;
45 // Assert no overflow into live objects.
46 #ifdef DEBUG
47   SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
48                               reinterpret_cast<Address>(rear_));
49 #endif
50 }
51
52
53 template <>
54 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
55   // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
56   return chars == str.length();
57 }
58
59
60 template <>
61 bool inline Heap::IsOneByte(String* str, int chars) {
62   return str->IsOneByteRepresentation();
63 }
64
65
66 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
67     Vector<const char> str, int chars, uint32_t hash_field) {
68   if (IsOneByte(str, chars)) {
69     return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
70                                              hash_field);
71   }
72   return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
73 }
74
75
76 template <typename T>
77 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
78                                                       uint32_t hash_field) {
79   if (IsOneByte(t, chars)) {
80     return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
81   }
82   return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
83 }
84
85
86 AllocationResult Heap::AllocateOneByteInternalizedString(
87     Vector<const uint8_t> str, uint32_t hash_field) {
88   CHECK_GE(String::kMaxLength, str.length());
89   // Compute map and object size.
90   Map* map = one_byte_internalized_string_map();
91   int size = SeqOneByteString::SizeFor(str.length());
92   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
93
94   // Allocate string.
95   HeapObject* result;
96   {
97     AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
98     if (!allocation.To(&result)) return allocation;
99   }
100
101   // String maps are all immortal immovable objects.
102   result->set_map_no_write_barrier(map);
103   // Set length and hash fields of the allocated string.
104   String* answer = String::cast(result);
105   answer->set_length(str.length());
106   answer->set_hash_field(hash_field);
107
108   DCHECK_EQ(size, answer->Size());
109
110   // Fill in the characters.
111   MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
112           str.length());
113
114   return answer;
115 }
116
117
118 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
119                                                          uint32_t hash_field) {
120   CHECK_GE(String::kMaxLength, str.length());
121   // Compute map and object size.
122   Map* map = internalized_string_map();
123   int size = SeqTwoByteString::SizeFor(str.length());
124   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
125
126   // Allocate string.
127   HeapObject* result;
128   {
129     AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
130     if (!allocation.To(&result)) return allocation;
131   }
132
133   result->set_map(map);
134   // Set length and hash fields of the allocated string.
135   String* answer = String::cast(result);
136   answer->set_length(str.length());
137   answer->set_hash_field(hash_field);
138
139   DCHECK_EQ(size, answer->Size());
140
141   // Fill in the characters.
142   MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
143           str.length() * kUC16Size);
144
145   return answer;
146 }
147
148 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
149   if (src->length() == 0) return src;
150   return CopyFixedArrayWithMap(src, src->map());
151 }
152
153
154 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
155   if (src->length() == 0) return src;
156   return CopyFixedDoubleArrayWithMap(src, src->map());
157 }
158
159
160 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
161   if (src->length() == 0) return src;
162   return CopyConstantPoolArrayWithMap(src, src->map());
163 }
164
165
166 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
167                                    AllocationSpace retry_space) {
168   DCHECK(AllowHandleAllocation::IsAllowed());
169   DCHECK(AllowHeapAllocation::IsAllowed());
170   DCHECK(gc_state_ == NOT_IN_GC);
171 #ifdef DEBUG
172   if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
173       Heap::allocation_timeout_-- <= 0) {
174     return AllocationResult::Retry(space);
175   }
176   isolate_->counters()->objs_since_last_full()->Increment();
177   isolate_->counters()->objs_since_last_young()->Increment();
178 #endif
179
180   HeapObject* object;
181   AllocationResult allocation;
182   if (NEW_SPACE == space) {
183     allocation = new_space_.AllocateRaw(size_in_bytes);
184     if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
185       space = retry_space;
186     } else {
187       if (allocation.To(&object)) {
188         OnAllocationEvent(object, size_in_bytes);
189       }
190       return allocation;
191     }
192   }
193
194   if (OLD_POINTER_SPACE == space) {
195     allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
196   } else if (OLD_DATA_SPACE == space) {
197     allocation = old_data_space_->AllocateRaw(size_in_bytes);
198   } else if (CODE_SPACE == space) {
199     if (size_in_bytes <= code_space()->AreaSize()) {
200       allocation = code_space_->AllocateRaw(size_in_bytes);
201     } else {
202       // Large code objects are allocated in large object space.
203       allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
204     }
205   } else if (LO_SPACE == space) {
206     allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
207   } else if (CELL_SPACE == space) {
208     allocation = cell_space_->AllocateRaw(size_in_bytes);
209   } else if (PROPERTY_CELL_SPACE == space) {
210     allocation = property_cell_space_->AllocateRaw(size_in_bytes);
211   } else {
212     DCHECK(MAP_SPACE == space);
213     allocation = map_space_->AllocateRaw(size_in_bytes);
214   }
215   if (allocation.To(&object)) {
216     OnAllocationEvent(object, size_in_bytes);
217   } else {
218     old_gen_exhausted_ = true;
219   }
220   return allocation;
221 }
222
223
224 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
225   HeapProfiler* profiler = isolate_->heap_profiler();
226   if (profiler->is_tracking_allocations()) {
227     profiler->AllocationEvent(object->address(), size_in_bytes);
228   }
229
230   if (FLAG_verify_predictable) {
231     ++allocations_count_;
232
233     UpdateAllocationsHash(object);
234     UpdateAllocationsHash(size_in_bytes);
235
236     if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
237         (--dump_allocations_hash_countdown_ == 0)) {
238       dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
239       PrintAlloctionsHash();
240     }
241   }
242 }
243
244
245 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
246                        int size_in_bytes) {
247   HeapProfiler* heap_profiler = isolate_->heap_profiler();
248   if (heap_profiler->is_tracking_object_moves()) {
249     heap_profiler->ObjectMoveEvent(source->address(), target->address(),
250                                    size_in_bytes);
251   }
252
253   if (isolate_->logger()->is_logging_code_events() ||
254       isolate_->cpu_profiler()->is_profiling()) {
255     if (target->IsSharedFunctionInfo()) {
256       PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(),
257                                                     target->address()));
258     }
259   }
260
261   if (FLAG_verify_predictable) {
262     ++allocations_count_;
263
264     UpdateAllocationsHash(source);
265     UpdateAllocationsHash(target);
266     UpdateAllocationsHash(size_in_bytes);
267
268     if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
269         (--dump_allocations_hash_countdown_ == 0)) {
270       dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
271       PrintAlloctionsHash();
272     }
273   }
274 }
275
276
277 void Heap::UpdateAllocationsHash(HeapObject* object) {
278   Address object_address = object->address();
279   MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
280   AllocationSpace allocation_space = memory_chunk->owner()->identity();
281
282   STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
283   uint32_t value =
284       static_cast<uint32_t>(object_address - memory_chunk->address()) |
285       (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
286
287   UpdateAllocationsHash(value);
288 }
289
290
291 void Heap::UpdateAllocationsHash(uint32_t value) {
292   uint16_t c1 = static_cast<uint16_t>(value);
293   uint16_t c2 = static_cast<uint16_t>(value >> 16);
294   raw_allocations_hash_ =
295       StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
296   raw_allocations_hash_ =
297       StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
298 }
299
300
301 void Heap::PrintAlloctionsHash() {
302   uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
303   PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
304 }
305
306
307 void Heap::FinalizeExternalString(String* string) {
308   DCHECK(string->IsExternalString());
309   v8::String::ExternalStringResourceBase** resource_addr =
310       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
311           reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
312           kHeapObjectTag);
313
314   // Dispose of the C++ object if it has not already been disposed.
315   if (*resource_addr != NULL) {
316     (*resource_addr)->Dispose();
317     *resource_addr = NULL;
318   }
319 }
320
321
322 bool Heap::InNewSpace(Object* object) {
323   bool result = new_space_.Contains(object);
324   DCHECK(!result ||                 // Either not in new space
325          gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
326          InToSpace(object));        // ... or in to-space (where we allocate).
327   return result;
328 }
329
330
331 bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
332
333
334 bool Heap::InFromSpace(Object* object) {
335   return new_space_.FromSpaceContains(object);
336 }
337
338
339 bool Heap::InToSpace(Object* object) {
340   return new_space_.ToSpaceContains(object);
341 }
342
343
344 bool Heap::InOldPointerSpace(Address address) {
345   return old_pointer_space_->Contains(address);
346 }
347
348
349 bool Heap::InOldPointerSpace(Object* object) {
350   return InOldPointerSpace(reinterpret_cast<Address>(object));
351 }
352
353
354 bool Heap::InOldDataSpace(Address address) {
355   return old_data_space_->Contains(address);
356 }
357
358
359 bool Heap::InOldDataSpace(Object* object) {
360   return InOldDataSpace(reinterpret_cast<Address>(object));
361 }
362
363
364 bool Heap::OldGenerationAllocationLimitReached() {
365   if (!incremental_marking()->IsStopped()) return false;
366   return OldGenerationSpaceAvailable() < 0;
367 }
368
369
370 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
371   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
372   Address age_mark = new_space_.age_mark();
373   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
374          (!page->ContainsLimit(age_mark) || old_address < age_mark);
375 }
376
377
378 void Heap::RecordWrite(Address address, int offset) {
379   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
380 }
381
382
383 void Heap::RecordWrites(Address address, int start, int len) {
384   if (!InNewSpace(address)) {
385     for (int i = 0; i < len; i++) {
386       store_buffer_.Mark(address + start + i * kPointerSize);
387     }
388   }
389 }
390
391
392 OldSpace* Heap::TargetSpace(HeapObject* object) {
393   InstanceType type = object->map()->instance_type();
394   AllocationSpace space = TargetSpaceId(type);
395   return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
396 }
397
398
399 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
400   // Heap numbers and sequential strings are promoted to old data space, all
401   // other object types are promoted to old pointer space.  We do not use
402   // object->IsHeapNumber() and object->IsSeqString() because we already
403   // know that object has the heap object tag.
404
405   // These objects are never allocated in new space.
406   DCHECK(type != MAP_TYPE);
407   DCHECK(type != CODE_TYPE);
408   DCHECK(type != ODDBALL_TYPE);
409   DCHECK(type != CELL_TYPE);
410   DCHECK(type != PROPERTY_CELL_TYPE);
411
412   if (type <= LAST_NAME_TYPE) {
413     if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
414     DCHECK(type < FIRST_NONSTRING_TYPE);
415     // There are four string representations: sequential strings, external
416     // strings, cons strings, and sliced strings.
417     // Only the latter two contain non-map-word pointers to heap objects.
418     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
419                ? OLD_POINTER_SPACE
420                : OLD_DATA_SPACE;
421   } else {
422     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
423   }
424 }
425
426
427 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
428   // Object migration is governed by the following rules:
429   //
430   // 1) Objects in new-space can be migrated to one of the old spaces
431   //    that matches their target space or they stay in new-space.
432   // 2) Objects in old-space stay in the same space when migrating.
433   // 3) Fillers (two or more words) can migrate due to left-trimming of
434   //    fixed arrays in new-space, old-data-space and old-pointer-space.
435   // 4) Fillers (one word) can never migrate, they are skipped by
436   //    incremental marking explicitly to prevent invalid pattern.
437   // 5) Short external strings can end up in old pointer space when a cons
438   //    string in old pointer space is made external (String::MakeExternal).
439   //
440   // Since this function is used for debugging only, we do not place
441   // asserts here, but check everything explicitly.
442   if (obj->map() == one_pointer_filler_map()) return false;
443   InstanceType type = obj->map()->instance_type();
444   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
445   AllocationSpace src = chunk->owner()->identity();
446   switch (src) {
447     case NEW_SPACE:
448       return dst == src || dst == TargetSpaceId(type);
449     case OLD_POINTER_SPACE:
450       return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
451                             obj->IsExternalString());
452     case OLD_DATA_SPACE:
453       return dst == src && dst == TargetSpaceId(type);
454     case CODE_SPACE:
455       return dst == src && type == CODE_TYPE;
456     case MAP_SPACE:
457     case CELL_SPACE:
458     case PROPERTY_CELL_SPACE:
459     case LO_SPACE:
460       return false;
461   }
462   UNREACHABLE();
463   return false;
464 }
465
466
467 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
468   CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
469             static_cast<size_t>(byte_size / kPointerSize));
470 }
471
472
473 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
474   DCHECK(IsAligned(byte_size, kPointerSize));
475
476   int size_in_words = byte_size / kPointerSize;
477
478   if ((dst < src) || (dst >= (src + byte_size))) {
479     Object** src_slot = reinterpret_cast<Object**>(src);
480     Object** dst_slot = reinterpret_cast<Object**>(dst);
481     Object** end_slot = src_slot + size_in_words;
482
483     while (src_slot != end_slot) {
484       *dst_slot++ = *src_slot++;
485     }
486   } else {
487     MemMove(dst, src, static_cast<size_t>(byte_size));
488   }
489 }
490
491
492 void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
493
494
495 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
496   // Check if there is potentially a memento behind the object. If
497   // the last word of the memento is on another page we return
498   // immediately.
499   Address object_address = object->address();
500   Address memento_address = object_address + object->Size();
501   Address last_memento_word_address = memento_address + kPointerSize;
502   if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
503     return NULL;
504   }
505
506   HeapObject* candidate = HeapObject::FromAddress(memento_address);
507   Map* candidate_map = candidate->map();
508   // This fast check may peek at an uninitialized word. However, the slow check
509   // below (memento_address == top) ensures that this is safe. Mark the word as
510   // initialized to silence MemorySanitizer warnings.
511   MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
512   if (candidate_map != allocation_memento_map()) return NULL;
513
514   // Either the object is the last object in the new space, or there is another
515   // object of at least word size (the header map word) following it, so
516   // suffices to compare ptr and top here. Note that technically we do not have
517   // to compare with the current top pointer of the from space page during GC,
518   // since we always install filler objects above the top pointer of a from
519   // space page when performing a garbage collection. However, always performing
520   // the test makes it possible to have a single, unified version of
521   // FindAllocationMemento that is used both by the GC and the mutator.
522   Address top = NewSpaceTop();
523   DCHECK(memento_address == top ||
524          memento_address + HeapObject::kHeaderSize <= top ||
525          !NewSpacePage::OnSamePage(memento_address, top));
526   if (memento_address == top) return NULL;
527
528   AllocationMemento* memento = AllocationMemento::cast(candidate);
529   if (!memento->IsValid()) return NULL;
530   return memento;
531 }
532
533
534 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
535                                         ScratchpadSlotMode mode) {
536   Heap* heap = object->GetHeap();
537   DCHECK(heap->InFromSpace(object));
538
539   if (!FLAG_allocation_site_pretenuring ||
540       !AllocationSite::CanTrack(object->map()->instance_type()))
541     return;
542
543   AllocationMemento* memento = heap->FindAllocationMemento(object);
544   if (memento == NULL) return;
545
546   if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
547     heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
548   }
549 }
550
551
552 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
553   DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
554
555   // We use the first word (where the map pointer usually is) of a heap
556   // object to record the forwarding pointer.  A forwarding pointer can
557   // point to an old space, the code space, or the to space of the new
558   // generation.
559   MapWord first_word = object->map_word();
560
561   // If the first word is a forwarding address, the object has already been
562   // copied.
563   if (first_word.IsForwardingAddress()) {
564     HeapObject* dest = first_word.ToForwardingAddress();
565     DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
566     *p = dest;
567     return;
568   }
569
570   UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
571
572   // AllocationMementos are unrooted and shouldn't survive a scavenge
573   DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
574   // Call the slow part of scavenge object.
575   return ScavengeObjectSlow(p, object);
576 }
577
578
579 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
580                           const v8::GCCallbackFlags callbackFlags) {
581   const char* collector_reason = NULL;
582   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
583   return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
584 }
585
586
587 Isolate* Heap::isolate() {
588   return reinterpret_cast<Isolate*>(
589       reinterpret_cast<intptr_t>(this) -
590       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
591 }
592
593
594 // Calls the FUNCTION_CALL function and retries it up to three times
595 // to guarantee that any allocations performed during the call will
596 // succeed if there's enough memory.
597
598 // Warning: Do not use the identifiers __object__, __maybe_object__ or
599 // __scope__ in a call to this macro.
600
601 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
602   if (__allocation__.To(&__object__)) {                   \
603     DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
604     RETURN_VALUE;                                         \
605   }
606
607 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)    \
608   do {                                                                        \
609     AllocationResult __allocation__ = FUNCTION_CALL;                          \
610     Object* __object__ = NULL;                                                \
611     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
612     (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),            \
613                                       "allocation failure");                  \
614     __allocation__ = FUNCTION_CALL;                                           \
615     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
616     (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();        \
617     (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");          \
618     {                                                                         \
619       AlwaysAllocateScope __scope__(ISOLATE);                                 \
620       __allocation__ = FUNCTION_CALL;                                         \
621     }                                                                         \
622     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
623     /* TODO(1181417): Fix this. */                                            \
624     v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
625     RETURN_EMPTY;                                                             \
626   } while (false)
627
628 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
629                               RETURN_EMPTY)                         \
630   CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
631
632 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
633   CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL,                               \
634                         return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
635                         return Handle<TYPE>())
636
637
638 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
639   CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
640
641
642 void ExternalStringTable::AddString(String* string) {
643   DCHECK(string->IsExternalString());
644   if (heap_->InNewSpace(string)) {
645     new_space_strings_.Add(string);
646   } else {
647     old_space_strings_.Add(string);
648   }
649 }
650
651
652 void ExternalStringTable::Iterate(ObjectVisitor* v) {
653   if (!new_space_strings_.is_empty()) {
654     Object** start = &new_space_strings_[0];
655     v->VisitPointers(start, start + new_space_strings_.length());
656   }
657   if (!old_space_strings_.is_empty()) {
658     Object** start = &old_space_strings_[0];
659     v->VisitPointers(start, start + old_space_strings_.length());
660   }
661 }
662
663
664 // Verify() is inline to avoid ifdef-s around its calls in release
665 // mode.
666 void ExternalStringTable::Verify() {
667 #ifdef DEBUG
668   for (int i = 0; i < new_space_strings_.length(); ++i) {
669     Object* obj = Object::cast(new_space_strings_[i]);
670     DCHECK(heap_->InNewSpace(obj));
671     DCHECK(obj != heap_->the_hole_value());
672   }
673   for (int i = 0; i < old_space_strings_.length(); ++i) {
674     Object* obj = Object::cast(old_space_strings_[i]);
675     DCHECK(!heap_->InNewSpace(obj));
676     DCHECK(obj != heap_->the_hole_value());
677   }
678 #endif
679 }
680
681
682 void ExternalStringTable::AddOldString(String* string) {
683   DCHECK(string->IsExternalString());
684   DCHECK(!heap_->InNewSpace(string));
685   old_space_strings_.Add(string);
686 }
687
688
689 void ExternalStringTable::ShrinkNewStrings(int position) {
690   new_space_strings_.Rewind(position);
691 #ifdef VERIFY_HEAP
692   if (FLAG_verify_heap) {
693     Verify();
694   }
695 #endif
696 }
697
698
699 void Heap::ClearInstanceofCache() {
700   set_instanceof_cache_function(Smi::FromInt(0));
701 }
702
703
704 Object* Heap::ToBoolean(bool condition) {
705   return condition ? true_value() : false_value();
706 }
707
708
709 void Heap::CompletelyClearInstanceofCache() {
710   set_instanceof_cache_map(Smi::FromInt(0));
711   set_instanceof_cache_function(Smi::FromInt(0));
712 }
713
714
715 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
716     : heap_(isolate->heap()), daf_(isolate) {
717   // We shouldn't hit any nested scopes, because that requires
718   // non-handle code to call handle code. The code still works but
719   // performance will degrade, so we want to catch this situation
720   // in debug mode.
721   DCHECK(heap_->always_allocate_scope_depth_ == 0);
722   heap_->always_allocate_scope_depth_++;
723 }
724
725
726 AlwaysAllocateScope::~AlwaysAllocateScope() {
727   heap_->always_allocate_scope_depth_--;
728   DCHECK(heap_->always_allocate_scope_depth_ == 0);
729 }
730
731
732 #ifdef VERIFY_HEAP
733 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
734   Isolate* isolate = Isolate::Current();
735   isolate->heap()->no_weak_object_verification_scope_depth_++;
736 }
737
738
739 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
740   Isolate* isolate = Isolate::Current();
741   isolate->heap()->no_weak_object_verification_scope_depth_--;
742 }
743 #endif
744
745
746 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
747   heap_->gc_callbacks_depth_++;
748 }
749
750
751 GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
752
753
754 bool GCCallbacksScope::CheckReenter() {
755   return heap_->gc_callbacks_depth_ == 1;
756 }
757
758
759 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
760   for (Object** current = start; current < end; current++) {
761     if ((*current)->IsHeapObject()) {
762       HeapObject* object = HeapObject::cast(*current);
763       CHECK(object->GetIsolate()->heap()->Contains(object));
764       CHECK(object->map()->IsMap());
765     }
766   }
767 }
768
769
770 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
771   for (Object** current = start; current < end; current++) {
772     CHECK((*current)->IsSmi());
773   }
774 }
775 }
776 }  // namespace v8::internal
777
778 #endif  // V8_HEAP_HEAP_INL_H_