Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / heap-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_INL_H_
6 #define V8_HEAP_INL_H_
7
8 #include <cmath>
9
10 #include "heap.h"
11 #include "heap-profiler.h"
12 #include "isolate.h"
13 #include "list-inl.h"
14 #include "objects.h"
15 #include "platform.h"
16 #include "v8-counters.h"
17 #include "store-buffer.h"
18 #include "store-buffer-inl.h"
19
20 namespace v8 {
21 namespace internal {
22
23 void PromotionQueue::insert(HeapObject* target, int size) {
24   if (emergency_stack_ != NULL) {
25     emergency_stack_->Add(Entry(target, size));
26     return;
27   }
28
29   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
30     NewSpacePage* rear_page =
31         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
32     ASSERT(!rear_page->prev_page()->is_anchor());
33     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
34     ActivateGuardIfOnTheSamePage();
35   }
36
37   if (guard_) {
38     ASSERT(GetHeadPage() ==
39            Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
40
41     if ((rear_ - 2) < limit_) {
42       RelocateQueueHead();
43       emergency_stack_->Add(Entry(target, size));
44       return;
45     }
46   }
47
48   *(--rear_) = reinterpret_cast<intptr_t>(target);
49   *(--rear_) = size;
50   // Assert no overflow into live objects.
51 #ifdef DEBUG
52   SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
53                               reinterpret_cast<Address>(rear_));
54 #endif
55 }
56
57
58 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
59   guard_ = guard_ ||
60       heap_->new_space()->active_space()->current_page()->address() ==
61       GetHeadPage()->address();
62 }
63
64
65 template<>
66 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
67   // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
68   // ASCII only check.
69   return chars == str.length();
70 }
71
72
73 template<>
74 bool inline Heap::IsOneByte(String* str, int chars) {
75   return str->IsOneByteRepresentation();
76 }
77
78
79 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
80     Vector<const char> str, int chars, uint32_t hash_field) {
81   if (IsOneByte(str, chars)) {
82     return AllocateOneByteInternalizedString(
83         Vector<const uint8_t>::cast(str), hash_field);
84   }
85   return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
86 }
87
88
89 template<typename T>
90 AllocationResult Heap::AllocateInternalizedStringImpl(
91     T t, int chars, uint32_t hash_field) {
92   if (IsOneByte(t, chars)) {
93     return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
94   }
95   return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
96 }
97
98
99 AllocationResult Heap::AllocateOneByteInternalizedString(
100     Vector<const uint8_t> str,
101     uint32_t hash_field) {
102   if (str.length() > String::kMaxLength) {
103     return isolate()->ThrowInvalidStringLength();
104   }
105   // Compute map and object size.
106   Map* map = ascii_internalized_string_map();
107   int size = SeqOneByteString::SizeFor(str.length());
108   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
109
110   // Allocate string.
111   HeapObject* result;
112   { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
113     if (!allocation.To(&result)) return allocation;
114   }
115
116   // String maps are all immortal immovable objects.
117   result->set_map_no_write_barrier(map);
118   // Set length and hash fields of the allocated string.
119   String* answer = String::cast(result);
120   answer->set_length(str.length());
121   answer->set_hash_field(hash_field);
122
123   ASSERT_EQ(size, answer->Size());
124
125   // Fill in the characters.
126   OS::MemCopy(answer->address() + SeqOneByteString::kHeaderSize,
127               str.start(), str.length());
128
129   return answer;
130 }
131
132
133 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
134                                                          uint32_t hash_field) {
135   if (str.length() > String::kMaxLength) {
136     return isolate()->ThrowInvalidStringLength();
137   }
138   // Compute map and object size.
139   Map* map = internalized_string_map();
140   int size = SeqTwoByteString::SizeFor(str.length());
141   AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
142
143   // Allocate string.
144   HeapObject* result;
145   { AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
146     if (!allocation.To(&result)) return allocation;
147   }
148
149   result->set_map(map);
150   // Set length and hash fields of the allocated string.
151   String* answer = String::cast(result);
152   answer->set_length(str.length());
153   answer->set_hash_field(hash_field);
154
155   ASSERT_EQ(size, answer->Size());
156
157   // Fill in the characters.
158   OS::MemCopy(answer->address() + SeqTwoByteString::kHeaderSize,
159               str.start(), str.length() * kUC16Size);
160
161   return answer;
162 }
163
164 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
165   if (src->length() == 0) return src;
166   return CopyFixedArrayWithMap(src, src->map());
167 }
168
169
170 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
171   if (src->length() == 0) return src;
172   return CopyFixedDoubleArrayWithMap(src, src->map());
173 }
174
175
176 AllocationResult Heap::CopyConstantPoolArray(ConstantPoolArray* src) {
177   if (src->length() == 0) return src;
178   return CopyConstantPoolArrayWithMap(src, src->map());
179 }
180
181
182 AllocationResult Heap::AllocateRaw(int size_in_bytes,
183                                    AllocationSpace space,
184                                    AllocationSpace retry_space) {
185   ASSERT(AllowHandleAllocation::IsAllowed());
186   ASSERT(AllowHeapAllocation::IsAllowed());
187   ASSERT(gc_state_ == NOT_IN_GC);
188   HeapProfiler* profiler = isolate_->heap_profiler();
189 #ifdef DEBUG
190   if (FLAG_gc_interval >= 0 &&
191       AllowAllocationFailure::IsAllowed(isolate_) &&
192       Heap::allocation_timeout_-- <= 0) {
193     return AllocationResult::Retry(space);
194   }
195   isolate_->counters()->objs_since_last_full()->Increment();
196   isolate_->counters()->objs_since_last_young()->Increment();
197 #endif
198
199   HeapObject* object;
200   AllocationResult allocation;
201   if (NEW_SPACE == space) {
202     allocation = new_space_.AllocateRaw(size_in_bytes);
203     if (always_allocate() &&
204         allocation.IsRetry() &&
205         retry_space != NEW_SPACE) {
206       space = retry_space;
207     } else {
208       if (profiler->is_tracking_allocations() && allocation.To(&object)) {
209         profiler->AllocationEvent(object->address(), size_in_bytes);
210       }
211       return allocation;
212     }
213   }
214
215   if (OLD_POINTER_SPACE == space) {
216     allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
217   } else if (OLD_DATA_SPACE == space) {
218     allocation = old_data_space_->AllocateRaw(size_in_bytes);
219   } else if (CODE_SPACE == space) {
220     allocation = code_space_->AllocateRaw(size_in_bytes);
221   } else if (LO_SPACE == space) {
222     allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
223   } else if (CELL_SPACE == space) {
224     allocation = cell_space_->AllocateRaw(size_in_bytes);
225   } else if (PROPERTY_CELL_SPACE == space) {
226     allocation = property_cell_space_->AllocateRaw(size_in_bytes);
227   } else {
228     ASSERT(MAP_SPACE == space);
229     allocation = map_space_->AllocateRaw(size_in_bytes);
230   }
231   if (allocation.IsRetry()) old_gen_exhausted_ = true;
232   if (profiler->is_tracking_allocations() && allocation.To(&object)) {
233     profiler->AllocationEvent(object->address(), size_in_bytes);
234   }
235   return allocation;
236 }
237
238
239 void Heap::FinalizeExternalString(String* string) {
240   ASSERT(string->IsExternalString());
241   v8::String::ExternalStringResourceBase** resource_addr =
242       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
243           reinterpret_cast<byte*>(string) +
244           ExternalString::kResourceOffset -
245           kHeapObjectTag);
246
247   // Dispose of the C++ object if it has not already been disposed.
248   if (*resource_addr != NULL) {
249     (*resource_addr)->Dispose();
250     *resource_addr = NULL;
251   }
252 }
253
254
255 bool Heap::InNewSpace(Object* object) {
256   bool result = new_space_.Contains(object);
257   ASSERT(!result ||                  // Either not in new space
258          gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
259          InToSpace(object));         // ... or in to-space (where we allocate).
260   return result;
261 }
262
263
264 bool Heap::InNewSpace(Address address) {
265   return new_space_.Contains(address);
266 }
267
268
269 bool Heap::InFromSpace(Object* object) {
270   return new_space_.FromSpaceContains(object);
271 }
272
273
274 bool Heap::InToSpace(Object* object) {
275   return new_space_.ToSpaceContains(object);
276 }
277
278
279 bool Heap::InOldPointerSpace(Address address) {
280   return old_pointer_space_->Contains(address);
281 }
282
283
284 bool Heap::InOldPointerSpace(Object* object) {
285   return InOldPointerSpace(reinterpret_cast<Address>(object));
286 }
287
288
289 bool Heap::InOldDataSpace(Address address) {
290   return old_data_space_->Contains(address);
291 }
292
293
294 bool Heap::InOldDataSpace(Object* object) {
295   return InOldDataSpace(reinterpret_cast<Address>(object));
296 }
297
298
299 bool Heap::OldGenerationAllocationLimitReached() {
300   if (!incremental_marking()->IsStopped()) return false;
301   return OldGenerationSpaceAvailable() < 0;
302 }
303
304
305 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
306   // An object should be promoted if:
307   // - the object has survived a scavenge operation or
308   // - to space is already 25% full.
309   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
310   Address age_mark = new_space_.age_mark();
311   bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
312       (!page->ContainsLimit(age_mark) || old_address < age_mark);
313   return below_mark || (new_space_.Size() + object_size) >=
314                         (new_space_.EffectiveCapacity() >> 2);
315 }
316
317
318 void Heap::RecordWrite(Address address, int offset) {
319   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
320 }
321
322
323 void Heap::RecordWrites(Address address, int start, int len) {
324   if (!InNewSpace(address)) {
325     for (int i = 0; i < len; i++) {
326       store_buffer_.Mark(address + start + i * kPointerSize);
327     }
328   }
329 }
330
331
332 OldSpace* Heap::TargetSpace(HeapObject* object) {
333   InstanceType type = object->map()->instance_type();
334   AllocationSpace space = TargetSpaceId(type);
335   return (space == OLD_POINTER_SPACE)
336       ? old_pointer_space_
337       : old_data_space_;
338 }
339
340
341 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
342   // Heap numbers and sequential strings are promoted to old data space, all
343   // other object types are promoted to old pointer space.  We do not use
344   // object->IsHeapNumber() and object->IsSeqString() because we already
345   // know that object has the heap object tag.
346
347   // These objects are never allocated in new space.
348   ASSERT(type != MAP_TYPE);
349   ASSERT(type != CODE_TYPE);
350   ASSERT(type != ODDBALL_TYPE);
351   ASSERT(type != CELL_TYPE);
352   ASSERT(type != PROPERTY_CELL_TYPE);
353
354   if (type <= LAST_NAME_TYPE) {
355     if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
356     ASSERT(type < FIRST_NONSTRING_TYPE);
357     // There are four string representations: sequential strings, external
358     // strings, cons strings, and sliced strings.
359     // Only the latter two contain non-map-word pointers to heap objects.
360     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
361         ? OLD_POINTER_SPACE
362         : OLD_DATA_SPACE;
363   } else {
364     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
365   }
366 }
367
368
369 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
370   // Object migration is governed by the following rules:
371   //
372   // 1) Objects in new-space can be migrated to one of the old spaces
373   //    that matches their target space or they stay in new-space.
374   // 2) Objects in old-space stay in the same space when migrating.
375   // 3) Fillers (two or more words) can migrate due to left-trimming of
376   //    fixed arrays in new-space, old-data-space and old-pointer-space.
377   // 4) Fillers (one word) can never migrate, they are skipped by
378   //    incremental marking explicitly to prevent invalid pattern.
379   // 5) Short external strings can end up in old pointer space when a cons
380   //    string in old pointer space is made external (String::MakeExternal).
381   //
382   // Since this function is used for debugging only, we do not place
383   // asserts here, but check everything explicitly.
384   if (obj->map() == one_pointer_filler_map()) return false;
385   InstanceType type = obj->map()->instance_type();
386   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
387   AllocationSpace src = chunk->owner()->identity();
388   switch (src) {
389     case NEW_SPACE:
390       return dst == src || dst == TargetSpaceId(type);
391     case OLD_POINTER_SPACE:
392       return dst == src &&
393           (dst == TargetSpaceId(type) || obj->IsFiller() ||
394           (obj->IsExternalString() && ExternalString::cast(obj)->is_short()));
395     case OLD_DATA_SPACE:
396       return dst == src && dst == TargetSpaceId(type);
397     case CODE_SPACE:
398       return dst == src && type == CODE_TYPE;
399     case MAP_SPACE:
400     case CELL_SPACE:
401     case PROPERTY_CELL_SPACE:
402     case LO_SPACE:
403       return false;
404     case INVALID_SPACE:
405       break;
406   }
407   UNREACHABLE();
408   return false;
409 }
410
411
412 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
413   CopyWords(reinterpret_cast<Object**>(dst),
414             reinterpret_cast<Object**>(src),
415             static_cast<size_t>(byte_size / kPointerSize));
416 }
417
418
419 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
420   ASSERT(IsAligned(byte_size, kPointerSize));
421
422   int size_in_words = byte_size / kPointerSize;
423
424   if ((dst < src) || (dst >= (src + byte_size))) {
425     Object** src_slot = reinterpret_cast<Object**>(src);
426     Object** dst_slot = reinterpret_cast<Object**>(dst);
427     Object** end_slot = src_slot + size_in_words;
428
429     while (src_slot != end_slot) {
430       *dst_slot++ = *src_slot++;
431     }
432   } else {
433     OS::MemMove(dst, src, static_cast<size_t>(byte_size));
434   }
435 }
436
437
438 void Heap::ScavengePointer(HeapObject** p) {
439   ScavengeObject(p, *p);
440 }
441
442
443 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
444   // Check if there is potentially a memento behind the object. If
445   // the last word of the momento is on another page we return
446   // immediately.
447   Address object_address = object->address();
448   Address memento_address = object_address + object->Size();
449   Address last_memento_word_address = memento_address + kPointerSize;
450   if (!NewSpacePage::OnSamePage(object_address,
451                                 last_memento_word_address)) {
452     return NULL;
453   }
454
455   HeapObject* candidate = HeapObject::FromAddress(memento_address);
456   if (candidate->map() != allocation_memento_map()) return NULL;
457
458   // Either the object is the last object in the new space, or there is another
459   // object of at least word size (the header map word) following it, so
460   // suffices to compare ptr and top here. Note that technically we do not have
461   // to compare with the current top pointer of the from space page during GC,
462   // since we always install filler objects above the top pointer of a from
463   // space page when performing a garbage collection. However, always performing
464   // the test makes it possible to have a single, unified version of
465   // FindAllocationMemento that is used both by the GC and the mutator.
466   Address top = NewSpaceTop();
467   ASSERT(memento_address == top ||
468          memento_address + HeapObject::kHeaderSize <= top ||
469          !NewSpacePage::OnSamePage(memento_address, top));
470   if (memento_address == top) return NULL;
471
472   AllocationMemento* memento = AllocationMemento::cast(candidate);
473   if (!memento->IsValid()) return NULL;
474   return memento;
475 }
476
477
478 void Heap::UpdateAllocationSiteFeedback(HeapObject* object,
479                                         ScratchpadSlotMode mode) {
480   Heap* heap = object->GetHeap();
481   ASSERT(heap->InFromSpace(object));
482
483   if (!FLAG_allocation_site_pretenuring ||
484       !AllocationSite::CanTrack(object->map()->instance_type())) return;
485
486   AllocationMemento* memento = heap->FindAllocationMemento(object);
487   if (memento == NULL) return;
488
489   if (memento->GetAllocationSite()->IncrementMementoFoundCount()) {
490     heap->AddAllocationSiteToScratchpad(memento->GetAllocationSite(), mode);
491   }
492 }
493
494
495 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
496   ASSERT(object->GetIsolate()->heap()->InFromSpace(object));
497
498   // We use the first word (where the map pointer usually is) of a heap
499   // object to record the forwarding pointer.  A forwarding pointer can
500   // point to an old space, the code space, or the to space of the new
501   // generation.
502   MapWord first_word = object->map_word();
503
504   // If the first word is a forwarding address, the object has already been
505   // copied.
506   if (first_word.IsForwardingAddress()) {
507     HeapObject* dest = first_word.ToForwardingAddress();
508     ASSERT(object->GetIsolate()->heap()->InFromSpace(*p));
509     *p = dest;
510     return;
511   }
512
513   UpdateAllocationSiteFeedback(object, IGNORE_SCRATCHPAD_SLOT);
514
515   // AllocationMementos are unrooted and shouldn't survive a scavenge
516   ASSERT(object->map() != object->GetHeap()->allocation_memento_map());
517   // Call the slow part of scavenge object.
518   return ScavengeObjectSlow(p, object);
519 }
520
521
522 bool Heap::CollectGarbage(AllocationSpace space,
523                           const char* gc_reason,
524                           const v8::GCCallbackFlags callbackFlags) {
525   const char* collector_reason = NULL;
526   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
527   return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
528 }
529
530
531 int64_t Heap::AdjustAmountOfExternalAllocatedMemory(
532     int64_t change_in_bytes) {
533   ASSERT(HasBeenSetUp());
534   int64_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
535   if (change_in_bytes > 0) {
536     // Avoid overflow.
537     if (amount > amount_of_external_allocated_memory_) {
538       amount_of_external_allocated_memory_ = amount;
539     } else {
540       // Give up and reset the counters in case of an overflow.
541       amount_of_external_allocated_memory_ = 0;
542       amount_of_external_allocated_memory_at_last_global_gc_ = 0;
543     }
544     int64_t amount_since_last_global_gc = PromotedExternalMemorySize();
545     if (amount_since_last_global_gc > external_allocation_limit_) {
546       CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
547     }
548   } else {
549     // Avoid underflow.
550     if (amount >= 0) {
551       amount_of_external_allocated_memory_ = amount;
552     } else {
553       // Give up and reset the counters in case of an underflow.
554       amount_of_external_allocated_memory_ = 0;
555       amount_of_external_allocated_memory_at_last_global_gc_ = 0;
556     }
557   }
558   if (FLAG_trace_external_memory) {
559     PrintPID("%8.0f ms: ", isolate()->time_millis_since_init());
560     PrintF("Adjust amount of external memory: delta=%6" V8_PTR_PREFIX "d KB, "
561            "amount=%6" V8_PTR_PREFIX "d KB, since_gc=%6" V8_PTR_PREFIX "d KB, "
562            "isolate=0x%08" V8PRIxPTR ".\n",
563            static_cast<intptr_t>(change_in_bytes / KB),
564            static_cast<intptr_t>(amount_of_external_allocated_memory_ / KB),
565            static_cast<intptr_t>(PromotedExternalMemorySize() / KB),
566            reinterpret_cast<intptr_t>(isolate()));
567   }
568   ASSERT(amount_of_external_allocated_memory_ >= 0);
569   return amount_of_external_allocated_memory_;
570 }
571
572
573 Isolate* Heap::isolate() {
574   return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
575       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
576 }
577
578
579 // Calls the FUNCTION_CALL function and retries it up to three times
580 // to guarantee that any allocations performed during the call will
581 // succeed if there's enough memory.
582
583 // Warning: Do not use the identifiers __object__, __maybe_object__ or
584 // __scope__ in a call to this macro.
585
586 #define RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY)    \
587   if (!__allocation__.IsRetry()) {                                             \
588     __object__ = __allocation__.ToObjectChecked();                             \
589     if (__object__ == (ISOLATE)->heap()->exception()) { RETURN_EMPTY; }        \
590     RETURN_VALUE;                                                              \
591   }
592
593 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)     \
594   do {                                                                         \
595     AllocationResult __allocation__ = FUNCTION_CALL;                           \
596     Object* __object__ = NULL;                                                 \
597     RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY)        \
598     (ISOLATE)->heap()->CollectGarbage(__allocation__.RetrySpace(),             \
599                                       "allocation failure");                   \
600     __allocation__ = FUNCTION_CALL;                                            \
601     RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY)        \
602     (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();         \
603     (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");           \
604     {                                                                          \
605       AlwaysAllocateScope __scope__(ISOLATE);                                  \
606       __allocation__ = FUNCTION_CALL;                                          \
607     }                                                                          \
608     RETURN_OBJECT_UNLESS_EXCEPTION(ISOLATE, RETURN_VALUE, RETURN_EMPTY)        \
609       /* TODO(1181417): Fix this. */                                           \
610     v8::internal::Heap::FatalProcessOutOfMemory("CALL_AND_RETRY_LAST", true);  \
611     RETURN_EMPTY;                                                              \
612   } while (false)
613
614 #define CALL_AND_RETRY_OR_DIE(                                             \
615      ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)                   \
616   CALL_AND_RETRY(                                                          \
617       ISOLATE,                                                             \
618       FUNCTION_CALL,                                                       \
619       RETURN_VALUE,                                                        \
620       RETURN_EMPTY)
621
622 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)                      \
623   CALL_AND_RETRY_OR_DIE(ISOLATE,                                              \
624                         FUNCTION_CALL,                                        \
625                         return Handle<TYPE>(TYPE::cast(__object__), ISOLATE), \
626                         return Handle<TYPE>())                                \
627
628
629 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL)  \
630   CALL_AND_RETRY_OR_DIE(ISOLATE, FUNCTION_CALL, return, return)
631
632
633 void ExternalStringTable::AddString(String* string) {
634   ASSERT(string->IsExternalString());
635   if (heap_->InNewSpace(string)) {
636     new_space_strings_.Add(string);
637   } else {
638     old_space_strings_.Add(string);
639   }
640 }
641
642
643 void ExternalStringTable::Iterate(ObjectVisitor* v) {
644   if (!new_space_strings_.is_empty()) {
645     Object** start = &new_space_strings_[0];
646     v->VisitPointers(start, start + new_space_strings_.length());
647   }
648   if (!old_space_strings_.is_empty()) {
649     Object** start = &old_space_strings_[0];
650     v->VisitPointers(start, start + old_space_strings_.length());
651   }
652 }
653
654
655 // Verify() is inline to avoid ifdef-s around its calls in release
656 // mode.
657 void ExternalStringTable::Verify() {
658 #ifdef DEBUG
659   for (int i = 0; i < new_space_strings_.length(); ++i) {
660     Object* obj = Object::cast(new_space_strings_[i]);
661     ASSERT(heap_->InNewSpace(obj));
662     ASSERT(obj != heap_->the_hole_value());
663   }
664   for (int i = 0; i < old_space_strings_.length(); ++i) {
665     Object* obj = Object::cast(old_space_strings_[i]);
666     ASSERT(!heap_->InNewSpace(obj));
667     ASSERT(obj != heap_->the_hole_value());
668   }
669 #endif
670 }
671
672
673 void ExternalStringTable::AddOldString(String* string) {
674   ASSERT(string->IsExternalString());
675   ASSERT(!heap_->InNewSpace(string));
676   old_space_strings_.Add(string);
677 }
678
679
680 void ExternalStringTable::ShrinkNewStrings(int position) {
681   new_space_strings_.Rewind(position);
682 #ifdef VERIFY_HEAP
683   if (FLAG_verify_heap) {
684     Verify();
685   }
686 #endif
687 }
688
689
690 void Heap::ClearInstanceofCache() {
691   set_instanceof_cache_function(the_hole_value());
692 }
693
694
695 Object* Heap::ToBoolean(bool condition) {
696   return condition ? true_value() : false_value();
697 }
698
699
700 void Heap::CompletelyClearInstanceofCache() {
701   set_instanceof_cache_map(the_hole_value());
702   set_instanceof_cache_function(the_hole_value());
703 }
704
705
706 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
707     : heap_(isolate->heap()), daf_(isolate) {
708   // We shouldn't hit any nested scopes, because that requires
709   // non-handle code to call handle code. The code still works but
710   // performance will degrade, so we want to catch this situation
711   // in debug mode.
712   ASSERT(heap_->always_allocate_scope_depth_ == 0);
713   heap_->always_allocate_scope_depth_++;
714 }
715
716
717 AlwaysAllocateScope::~AlwaysAllocateScope() {
718   heap_->always_allocate_scope_depth_--;
719   ASSERT(heap_->always_allocate_scope_depth_ == 0);
720 }
721
722
723 #ifdef VERIFY_HEAP
724 NoWeakObjectVerificationScope::NoWeakObjectVerificationScope() {
725   Isolate* isolate = Isolate::Current();
726   isolate->heap()->no_weak_object_verification_scope_depth_++;
727 }
728
729
730 NoWeakObjectVerificationScope::~NoWeakObjectVerificationScope() {
731   Isolate* isolate = Isolate::Current();
732   isolate->heap()->no_weak_object_verification_scope_depth_--;
733 }
734 #endif
735
736
737 GCCallbacksScope::GCCallbacksScope(Heap* heap) : heap_(heap) {
738   heap_->gc_callbacks_depth_++;
739 }
740
741
742 GCCallbacksScope::~GCCallbacksScope() {
743   heap_->gc_callbacks_depth_--;
744 }
745
746
747 bool GCCallbacksScope::CheckReenter() {
748   return heap_->gc_callbacks_depth_ == 1;
749 }
750
751
752 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
753   for (Object** current = start; current < end; current++) {
754     if ((*current)->IsHeapObject()) {
755       HeapObject* object = HeapObject::cast(*current);
756       CHECK(object->GetIsolate()->heap()->Contains(object));
757       CHECK(object->map()->IsMap());
758     }
759   }
760 }
761
762
763 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
764   for (Object** current = start; current < end; current++) {
765      CHECK((*current)->IsSmi());
766   }
767 }
768
769
770 double GCTracer::SizeOfHeapObjects() {
771   return (static_cast<double>(heap_->SizeOfObjects())) / MB;
772 }
773
774
775 } }  // namespace v8::internal
776
777 #endif  // V8_HEAP_INL_H_