Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / v8 / src / heap / heap-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
7
8 #include <cmath>
9
10 #include "src/base/platform/platform.h"
11 #include "src/cpu-profiler.h"
12 #include "src/heap/heap.h"
13 #include "src/heap/store-buffer.h"
14 #include "src/heap/store-buffer-inl.h"
15 #include "src/heap-profiler.h"
16 #include "src/isolate.h"
17 #include "src/list-inl.h"
18 #include "src/objects.h"
19
20 namespace v8 {
21 namespace internal {
22
23 void PromotionQueue::insert(HeapObject* target, int size) {
24   if (emergency_stack_ != NULL) {
25     emergency_stack_->Add(Entry(target, size));
26     return;
27   }
28
29   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
30     NewSpacePage* rear_page =
31         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
32     DCHECK(!rear_page->prev_page()->is_anchor());
33     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
34     ActivateGuardIfOnTheSamePage();
35   }
36
37   if (guard_) {
38     DCHECK(GetHeadPage() ==
39            Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
40
41     if ((rear_ - 2) < limit_) {
42       RelocateQueueHead();
43       emergency_stack_->Add(Entry(target, size));
44       return;
45     }
46   }
47
48   *(--rear_) = reinterpret_cast<intptr_t>(target);
49   *(--rear_) = size;
50 // Assert no overflow into live objects.
51 #ifdef DEBUG
52   SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
53                               reinterpret_cast<Address>(rear_));
54 #endif
55 }
56
57
58 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
59   guard_ = guard_ ||
60            heap_->new_space()->active_space()->current_page()->address() ==
61                GetHeadPage()->address();
62 }
63
64
65 template <>
66 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
67   // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
68   // ASCII only check.
69   return chars == str.length();
70 }
71
72
73 template <>
74 bool inline Heap::IsOneByte(String* str, int chars) {
75   return str->IsOneByteRepresentation();
76 }
77
78
79 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
80     Vector<const char> str, int chars, uint32_t hash_field) {
81   if (IsOneByte(str, chars)) {
82     return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
83                                              hash_field);
84   }
85   return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
86 }
87
88
89 template <typename T>
90 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
91                                                       uint32_t hash_field) {
92   if (IsOneByte(t, chars)) {
93     return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
94   }
95   return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
96 }
97
98
99 AllocationResult Heap::AllocateOneByteInternalizedString(
100     Vector<const uint8_t> str, uint32_t hash_field) {
101   CHECK_GE(String::kMaxLength, str.length());
102   // Compute map and object size.
103   Map* map = ascii_internalized_string_map();
104   int size = SeqOneByteString::SizeFor(str.length());
105   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
106
107   // Allocate string.
108   HeapObject* result;
109   {
110     AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
111     if (!allocation.To(&result)) return allocation;
112   }
113
114   // String maps are all immortal immovable objects.
115   result->set_map_no_write_barrier(map);
116   // Set length and hash fields of the allocated string.
117   String* answer = String::cast(result);
118   answer->set_length(str.length());
119   answer->set_hash_field(hash_field);
120
121   DCHECK_EQ(size, answer->Size());
122
123   // Fill in the characters.
124   MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
125           str.length());
126
127   return answer;
128 }
129
130
131 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
132                                                          uint32_t hash_field) {
133   CHECK_GE(String::kMaxLength, str.length());
134   // Compute map and object size.
135   Map* map = internalized_string_map();
136   int size = SeqTwoByteString::SizeFor(str.length());
137   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
138
139   // Allocate string.
140   HeapObject* result;
141   {
142     AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
143     if (!allocation.To(&result)) return allocation;
144   }
145
146   result->set_map(map);
147   // Set length and hash fields of the allocated string.
148   String* answer = String::cast(result);
149   answer->set_length(str.length());
150   answer->set_hash_field(hash_field);
151
152   DCHECK_EQ(size, answer->Size());
153
154   // Fill in the characters.
155   MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
156           str.length() * kUC16Size);
157
158   return answer;
159 }
160
161 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
162   if (src->length() == 0) return src;
163   return CopyFixedArrayWithMap(src, src->map());
164 }
165
166
167 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
168   if (src->length() == 0) return src;
169   return CopyFixedDoubleArrayWithMap(src, src->map());
170 }
171
172
173 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
174   if (src->length() == 0) return src;
175   return CopyConstantPoolArrayWithMap(src, src->map());
176 }
177
178
179 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
180                                    AllocationSpace retry_space) {
181   DCHECK(AllowHandleAllocation::IsAllowed());
182   DCHECK(AllowHeapAllocation::IsAllowed());
183   DCHECK(gc_state_ == NOT_IN_GC);
184 #ifdef DEBUG
185   if (FLAG_gc_interval >= 0 && AllowAllocationFailure::IsAllowed(isolate_) &&
186       Heap::allocation_timeout_-- <= 0) {
187     return AllocationResult::Retry(space);
188   }
189   isolate_->counters()->objs_since_last_full()->Increment();
190   isolate_->counters()->objs_since_last_young()->Increment();
191 #endif
192
193   HeapObject* object;
194   AllocationResult allocation;
195   if (NEW_SPACE == space) {
196     allocation = new_space_.AllocateRaw(size_in_bytes);
197     if (always_allocate() && allocation.IsRetry() && retry_space != NEW_SPACE) {
198       space = retry_space;
199     } else {
200       if (allocation.To(&object)) {
201         OnAllocationEvent(object, size_in_bytes);
202       }
203       return allocation;
204     }
205   }
206
207   if (OLD_POINTER_SPACE == space) {
208     allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
209   } else if (OLD_DATA_SPACE == space) {
210     allocation = old_data_space_->AllocateRaw(size_in_bytes);
211   } else if (CODE_SPACE == space) {
212     if (size_in_bytes <= code_space()->AreaSize()) {
213       allocation = code_space_->AllocateRaw(size_in_bytes);
214     } else {
215       // Large code objects are allocated in large object space.
216       allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
217     }
218   } else if (LO_SPACE == space) {
219     allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
220   } else if (CELL_SPACE == space) {
221     allocation = cell_space_->AllocateRaw(size_in_bytes);
222   } else if (PROPERTY_CELL_SPACE == space) {
223     allocation = property_cell_space_->AllocateRaw(size_in_bytes);
224   } else {
225     DCHECK(MAP_SPACE == space);
226     allocation = map_space_->AllocateRaw(size_in_bytes);
227   }
228   if (allocation.To(&object)) {
229     OnAllocationEvent(object, size_in_bytes);
230   } else {
231     old_gen_exhausted_ = true;
232   }
233   return allocation;
234 }
235
236
237 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
238   HeapProfiler* profiler = isolate_->heap_profiler();
239   if (profiler->is_tracking_allocations()) {
240     profiler->AllocationEvent(object->address(), size_in_bytes);
241   }
242
243   if (FLAG_verify_predictable) {
244     ++allocations_count_;
245
246     UpdateAllocationsHash(object);
247     UpdateAllocationsHash(size_in_bytes);
248
249     if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
250         (--dump_allocations_hash_countdown_ == 0)) {
251       dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
252       PrintAlloctionsHash();
253     }
254   }
255 }
256
257
258 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
259                        int size_in_bytes) {
260   HeapProfiler* heap_profiler = isolate_->heap_profiler();
261   if (heap_profiler->is_tracking_object_moves()) {
262     heap_profiler->ObjectMoveEvent(source->address(), target->address(),
263                                    size_in_bytes);
264   }
265
266   if (isolate_->logger()->is_logging_code_events() ||
267       isolate_->cpu_profiler()->is_profiling()) {
268     if (target->IsSharedFunctionInfo()) {
269       PROFILE(isolate_, SharedFunctionInfoMoveEvent(source->address(),
270                                                     target->address()));
271     }
272   }
273
274   if (FLAG_verify_predictable) {
275     ++allocations_count_;
276
277     UpdateAllocationsHash(source);
278     UpdateAllocationsHash(target);
279     UpdateAllocationsHash(size_in_bytes);
280
281     if ((FLAG_dump_allocations_digest_at_alloc > 0) &&
282         (--dump_allocations_hash_countdown_ == 0)) {
283       dump_allocations_hash_countdown_ = FLAG_dump_allocations_digest_at_alloc;
284       PrintAlloctionsHash();
285     }
286   }
287 }
288
289
290 void Heap::UpdateAllocationsHash(HeapObject* object) {
291   Address object_address = object->address();
292   MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
293   AllocationSpace allocation_space = memory_chunk->owner()->identity();
294
295   STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
296   uint32_t value =
297       static_cast<uint32_t>(object_address - memory_chunk->address()) |
298       (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
299
300   UpdateAllocationsHash(value);
301 }
302
303
304 void Heap::UpdateAllocationsHash(uint32_t value) {
305   uint16_t c1 = static_cast<uint16_t>(value);
306   uint16_t c2 = static_cast<uint16_t>(value >> 16);
307   raw_allocations_hash_ =
308       StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
309   raw_allocations_hash_ =
310       StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
311 }
312
313
314 void Heap::PrintAlloctionsHash() {
315   uint32_t hash = StringHasher::GetHashCore(raw_allocations_hash_);
316   PrintF("\n### Allocations = %u, hash = 0x%08x\n", allocations_count_, hash);
317 }
318
319
320 void Heap::FinalizeExternalString(String* string) {
321   DCHECK(string->IsExternalString());
322   v8::String::ExternalStringResourceBase** resource_addr =
323       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
324           reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
325           kHeapObjectTag);
326
327   // Dispose of the C++ object if it has not already been disposed.
328   if (*resource_addr != NULL) {
329     (*resource_addr)->Dispose();
330     *resource_addr = NULL;
331   }
332 }
333
334
335 bool Heap::InNewSpace(Object* object) {
336   bool result = new_space_.Contains(object);
337   DCHECK(!result ||                 // Either not in new space
338          gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
339          InToSpace(object));        // ... or in to-space (where we allocate).
340   return result;
341 }
342
343
344 bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
345
346
347 bool Heap::InFromSpace(Object* object) {
348   return new_space_.FromSpaceContains(object);
349 }
350
351
352 bool Heap::InToSpace(Object* object) {
353   return new_space_.ToSpaceContains(object);
354 }
355
356
357 bool Heap::InOldPointerSpace(Address address) {
358   return old_pointer_space_->Contains(address);
359 }
360
361
362 bool Heap::InOldPointerSpace(Object* object) {
363   return InOldPointerSpace(reinterpret_cast<Address>(object));
364 }
365
366
367 bool Heap::InOldDataSpace(Address address) {
368   return old_data_space_->Contains(address);
369 }
370
371
372 bool Heap::InOldDataSpace(Object* object) {
373   return InOldDataSpace(reinterpret_cast<Address>(object));
374 }
375
376
377 bool Heap::OldGenerationAllocationLimitReached() {
378   if (!incremental_marking()->IsStopped()) return false;
379   return OldGenerationSpaceAvailable() < 0;
380 }
381
382
383 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
384   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
385   Address age_mark = new_space_.age_mark();
386   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
387          (!page->ContainsLimit(age_mark) || old_address < age_mark);
388 }
389
390
391 void Heap::RecordWrite(Address address, int offset) {
392   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
393 }
394
395
396 void Heap::RecordWrites(Address address, int start, int len) {
397   if (!InNewSpace(address)) {
398     for (int i = 0; i < len; i++) {
399       store_buffer_.Mark(address + start + i * kPointerSize);
400     }
401   }
402 }
403
404
405 OldSpace* Heap::TargetSpace(HeapObject* object) {
406   InstanceType type = object->map()->instance_type();
407   AllocationSpace space = TargetSpaceId(type);
408   return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
409 }
410
411
412 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
413   // Heap numbers and sequential strings are promoted to old data space, all
414   // other object types are promoted to old pointer space.  We do not use
415   // object->IsHeapNumber() and object->IsSeqString() because we already
416   // know that object has the heap object tag.
417
418   // These objects are never allocated in new space.
419   DCHECK(type != MAP_TYPE);
420   DCHECK(type != CODE_TYPE);
421   DCHECK(type != ODDBALL_TYPE);
422   DCHECK(type != CELL_TYPE);
423   DCHECK(type != PROPERTY_CELL_TYPE);
424
425   if (type <= LAST_NAME_TYPE) {
426     if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
427     DCHECK(type < FIRST_NONSTRING_TYPE);
428     // There are four string representations: sequential strings, external
429     // strings, cons strings, and sliced strings.
430     // Only the latter two contain non-map-word pointers to heap objects.
431     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
432                ? OLD_POINTER_SPACE
433                : OLD_DATA_SPACE;
434   } else {
435     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
436   }
437 }
438
439
440 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
441   // Object migration is governed by the following rules:
442   //
443   // 1) Objects in new-space can be migrated to one of the old spaces
444   //    that matches their target space or they stay in new-space.
445   // 2) Objects in old-space stay in the same space when migrating.
446   // 3) Fillers (two or more words) can migrate due to left-trimming of
447   //    fixed arrays in new-space, old-data-space and old-pointer-space.
448   // 4) Fillers (one word) can never migrate, they are skipped by
449   //    incremental marking explicitly to prevent invalid pattern.
450   // 5) Short external strings can end up in old pointer space when a cons
451   //    string in old pointer space is made external (String::MakeExternal).
452   //
453   // Since this function is used for debugging only, we do not place
454   // asserts here, but check everything explicitly.
455   if (obj->map() == one_pointer_filler_map()) return false;
456   InstanceType type = obj->map()->instance_type();
457   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
458   AllocationSpace src = chunk->owner()->identity();
459   switch (src) {
460     case NEW_SPACE:
461       return dst == src || dst == TargetSpaceId(type);
462     case OLD_POINTER_SPACE:
463       return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
464                             (obj->IsExternalString() &&
465                              ExternalString::cast(obj)->is_short()));
466     case OLD_DATA_SPACE:
467       return dst == src && dst == TargetSpaceId(type);
468     case CODE_SPACE:
469       return dst == src && type == CODE_TYPE;
470     case MAP_SPACE:
471     case CELL_SPACE:
472     case PROPERTY_CELL_SPACE:
473     case LO_SPACE:
474       return false;
475     case INVALID_SPACE:
476       break;
477   }
478   UNREACHABLE();
479   return false;
480 }
481
482
483 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
484   CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
485             static_cast<size_t>(byte_size / kPointerSize));
486 }
487
488
489 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
490   DCHECK(IsAligned(byte_size, kPointerSize));
491
492   int size_in_words = byte_size / kPointerSize;
493
494   if ((dst < src) || (dst >= (src + byte_size))) {
495     Object** src_slot = reinterpret_cast<Object**>(src);
496     Object** dst_slot = reinterpret_cast<Object**>(dst);
497     Object** end_slot = src_slot + size_in_words;
498
499     while (src_slot != end_slot) {
500       *dst_slot++ = *src_slot++;
501     }
502   } else {
503     MemMove(dst, src, static_cast<size_t>(byte_size));
504   }
505 }
506
507
508 void Heap::ScavengePointer(HeapObject** p) { ScavengeObject(p, *p); }
509
510
511 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
512   // Check if there is potentially a memento behind the object. If
513   // the last word of the momento is on another page we return
514   // immediately.
515   Address object_address = object->address();
516   Address memento_address = object_address + object->Size();
517   Address last_memento_word_address = memento_address + kPointerSize;
518   if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
519     return NULL;
520   }
521
522   HeapObject* candidate = HeapObject::FromAddress(memento_address);
523   if (candidate->map() != allocation_memento_map()) return NULL;
524
525   // Either the object is the last object in the new space, or there is another
526   // object of at least word size (the header map word) following it, so
527   // suffices to compare ptr and top here. Note that technically we do not have
528   // to compare with the current top pointer of the from space page during GC,
529   // since we always install filler objects above the top pointer of a from
530   // space page when performing a garbage collection. However, always performing
531   // the test makes it possible to have a single, unified version of
532   // FindAllocationMemento that is used both by the GC and the mutator.
533   Address top = NewSpaceTop();
534   DCHECK(memento_address == top ||
535          memento_address + HeapObject::kHeaderSize <= top ||
536          !NewSpacePage::OnSamePage(memento_address, top));
537   if (memento_address == top) return NULL;
538
539   AllocationMemento* memento = AllocationMemento::cast(candidate);
540   if (!memento->IsValid()) return NULL;
541   return memento;
542 }
543
544
545 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
546                                         ScratchpadSlotMode mode) {
547   Heap* heap = object->GetHeap();
548   DCHECK(heap->InFromSpace(object));
549
550   if (!FLAG_allocation_site_pretenuring ||
551       !AllocationSite::CanTrack(object->map()->instance_type()))
552     return;
553
554   AllocationMemento* memento = heap->FindAllocationMemento(object);
555   if (memento == NULL) return;
556
557   if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
558     heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
559   }
560 }
561
562
563 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
564   DCHECK(object->GetIsolate()->heap()->InFromSpace(object));
565
566   // We use the first word (where the map pointer usually is) of a heap
567   // object to record the forwarding pointer.  A forwarding pointer can
568   // point to an old space, the code space, or the to space of the new
569   // generation.
570   MapWord first_word = object->map_word();
571
572   // If the first word is a forwarding address, the object has already been
573   // copied.
574   if (first_word.IsForwardingAddress()) {
575     HeapObject* dest = first_word.ToForwardingAddress();
576     DCHECK(object->GetIsolate()->heap()->InFromSpace(*p));
577     *p = dest;
578     return;
579   }
580
581   UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
582
583   // AllocationMementos are unrooted and shouldn't survive a scavenge
584   DCHECK(object->map() != object->GetHeap()->allocation_memento_map());
585   // Call the slow part of scavenge object.
586   return ScavengeObjectSlow(p, object);
587 }
588
589
590 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
591                           const v8::GCCallbackFlags callbackFlags) {
592   const char* collector_reason = NULL;
593   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
594   return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
595 }
596
597
598 Isolate* Heap::isolate() {
599   return reinterpret_cast<Isolate*>(
600       reinterpret_cast<intptr_t>(this) -
601       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
602 }
603
604
605 // Calls the FUNCTION_CALL function and retries it up to three times
606 // to guarantee that any allocations performed during the call will
607 // succeed if there's enough memory.
608
609 // Warning: Do not use the identifiers __object__, __maybe_object__ or
610 // __scope__ in a call to this macro.
611
612 #define RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE) \
613   if (__allocation__.To(&__object__)) {                   \
614     DCHECK(__object__ != (ISOLATE)->heap()->exception()); \
615     RETURN_VALUE;                                         \
616   }
617
618 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)    \
619   do {                                                                        \
620     AllocationResult __allocation__ = FUNCTION_CALL;                          \
621     Object* __object__ = NULL;                                                \
622     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
623     (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),            \
624                                       "allocation failure");                  \
625     __allocation__ = FUNCTION_CALL;                                           \
626     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
627     (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();        \
628     (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");          \
629     {                                                                         \
630       AlwaysAllocateScope __scope__(ISOLATE);                                 \
631       __allocation__ = FUNCTION_CALL;                                         \
632     }                                                                         \
633     RETURN_OBJECT_UNLESS_RETRY(ISOLATE, RETURN_VALUE)                         \
634     /* TODO(1181417): Fix this. */                                            \
635     v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true); \
636     RETURN_EMPTY;                                                             \
637   } while (false)
638
639 #define CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, RETURN_VALUE, \
640                               RETURN_EMPTY)                         \
641   CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)
642
643 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
644   CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL,                               \
645                         return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
646                         return Handle<TYPE>())
647
648
649 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
650   CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
651
652
653 void ExternalStringTable::AddString(String* string) {
654   DCHECK(string->IsExternalString());
655   if (heap_->InNewSpace(string)) {
656     new_space_strings_.Add(string);
657   } else {
658     old_space_strings_.Add(string);
659   }
660 }
661
662
663 void ExternalStringTable::Iterate(ObjectVisitor* v) {
664   if (!new_space_strings_.is_empty()) {
665     Object** start = &new_space_strings_[0];
666     v->VisitPointers(start, start + new_space_strings_.length());
667   }
668   if (!old_space_strings_.is_empty()) {
669     Object** start = &old_space_strings_[0];
670     v->VisitPointers(start, start + old_space_strings_.length());
671   }
672 }
673
674
675 // Verify() is inline to avoid ifdef-s around its calls in release
676 // mode.
677 void ExternalStringTable::Verify() {
678 #ifdef DEBUG
679   for (int i = 0; i < new_space_strings_.length(); ++i) {
680     Object* obj = Object::cast(new_space_strings_[i]);
681     DCHECK(heap_->InNewSpace(obj));
682     DCHECK(obj != heap_->the_hole_value());
683   }
684   for (int i = 0; i < old_space_strings_.length(); ++i) {
685     Object* obj = Object::cast(old_space_strings_[i]);
686     DCHECK(!heap_->InNewSpace(obj));
687     DCHECK(obj != heap_->the_hole_value());
688   }
689 #endif
690 }
691
692
693 void ExternalStringTable::AddOldString(String* string) {
694   DCHECK(string->IsExternalString());
695   DCHECK(!heap_->InNewSpace(string));
696   old_space_strings_.Add(string);
697 }
698
699
700 void ExternalStringTable::ShrinkNewStrings(int position) {
701   new_space_strings_.Rewind(position);
702 #ifdef VERIFY_HEAP
703   if (FLAG_verify_heap) {
704     Verify();
705   }
706 #endif
707 }
708
709
710 void Heap::ClearInstanceofCache() {
711   set_instanceof_cache_function(the_hole_value());
712 }
713
714
715 Object* Heap::ToBoolean(bool condition) {
716   return condition ? true_value() : false_value();
717 }
718
719
720 void Heap::CompletelyClearInstanceofCache() {
721   set_instanceof_cache_map(the_hole_value());
722   set_instanceof_cache_function(the_hole_value());
723 }
724
725
726 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
727     : heap_(isolate->heap()), daf_(isolate) {
728   // We shouldn't hit any nested scopes, because that requires
729   // non-handle code to call handle code. The code still works but
730   // performance will degrade, so we want to catch this situation
731   // in debug mode.
732   DCHECK(heap_->always_allocate_scope_depth_ == 0);
733   heap_->always_allocate_scope_depth_++;
734 }
735
736
737 AlwaysAllocateScope::~AlwaysAllocateScope() {
738   heap_->always_allocate_scope_depth_--;
739   DCHECK(heap_->always_allocate_scope_depth_ == 0);
740 }
741
742
743 #ifdef VERIFY_HEAP
744 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
745   Isolate* isolate = Isolate::Current();
746   isolate->heap()->no_weak_object_verification_scope_depth_++;
747 }
748
749
750 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
751   Isolate* isolate = Isolate::Current();
752   isolate->heap()->no_weak_object_verification_scope_depth_--;
753 }
754 #endif
755
756
757 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
758   heap_->gc_callbacks_depth_++;
759 }
760
761
762 GCCallbacksScope::~GCCallbacksScope() { heap_->gc_callbacks_depth_--; }
763
764
765 bool GCCallbacksScope::CheckReenter() {
766   return heap_->gc_callbacks_depth_ == 1;
767 }
768
769
770 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
771   for (Object** current = start; current < end; current++) {
772     if ((*current)->IsHeapObject()) {
773       HeapObject* object = HeapObject::cast(*current);
774       CHECK(object->GetIsolate()->heap()->Contains(object));
775       CHECK(object->map()->IsMap());
776     }
777   }
778 }
779
780
781 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
782   for (Object** current = start; current < end; current++) {
783     CHECK((*current)->IsSmi());
784   }
785 }
786 }
787 }  // namespace v8::internal
788
789 #endif  // V8_HEAP_HEAP_INL_H_