[V8] Introduce a QML compilation mode
[profile/ivi/qtjsbackend.git] / src / 3rdparty / v8 / src / heap-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
30
31 #include "heap.h"
32 #include "isolate.h"
33 #include "list-inl.h"
34 #include "objects.h"
35 #include "platform.h"
36 #include "v8-counters.h"
37 #include "store-buffer.h"
38 #include "store-buffer-inl.h"
39
40 namespace v8 {
41 namespace internal {
42
43 void PromotionQueue::insert(HeapObject* target, int size) {
44   if (emergency_stack_ != NULL) {
45     emergency_stack_->Add(Entry(target, size));
46     return;
47   }
48
49   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
50     NewSpacePage* rear_page =
51         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
52     ASSERT(!rear_page->prev_page()->is_anchor());
53     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->area_end());
54     ActivateGuardIfOnTheSamePage();
55   }
56
57   if (guard_) {
58     ASSERT(GetHeadPage() ==
59            Page::FromAllocationTop(reinterpret_cast<Address>(limit_)));
60
61     if ((rear_ - 2) < limit_) {
62       RelocateQueueHead();
63       emergency_stack_->Add(Entry(target, size));
64       return;
65     }
66   }
67
68   *(--rear_) = reinterpret_cast<intptr_t>(target);
69   *(--rear_) = size;
70   // Assert no overflow into live objects.
71 #ifdef DEBUG
72   SemiSpace::AssertValidRange(HEAP->new_space()->top(),
73                               reinterpret_cast<Address>(rear_));
74 #endif
75 }
76
77
78 void PromotionQueue::ActivateGuardIfOnTheSamePage() {
79   guard_ = guard_ ||
80       heap_->new_space()->active_space()->current_page()->address() ==
81       GetHeadPage()->address();
82 }
83
84
85 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
86                                           PretenureFlag pretenure) {
87   // Check for ASCII first since this is the common case.
88   if (String::IsAscii(str.start(), str.length())) {
89     // If the string is ASCII, we do not need to convert the characters
90     // since UTF8 is backwards compatible with ASCII.
91     return AllocateStringFromAscii(str, pretenure);
92   }
93   // Non-ASCII and we need to decode.
94   return AllocateStringFromUtf8Slow(str, pretenure);
95 }
96
97
98 MaybeObject* Heap::AllocateSymbol(Vector<const char> str,
99                                   int chars,
100                                   uint32_t hash_field) {
101   unibrow::Utf8InputBuffer<> buffer(str.start(),
102                                     static_cast<unsigned>(str.length()));
103   return AllocateInternalSymbol(&buffer, chars, hash_field);
104 }
105
106
107 MaybeObject* Heap::AllocateAsciiSymbol(Vector<const char> str,
108                                        uint32_t hash_field) {
109   if (str.length() > SeqAsciiString::kMaxLength) {
110     return Failure::OutOfMemoryException();
111   }
112   // Compute map and object size.
113   Map* map = ascii_symbol_map();
114   int size = SeqAsciiString::SizeFor(str.length());
115
116   // Allocate string.
117   Object* result;
118   { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
119                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
120                    : old_data_space_->AllocateRaw(size);
121     if (!maybe_result->ToObject(&result)) return maybe_result;
122   }
123
124   // String maps are all immortal immovable objects.
125   reinterpret_cast<HeapObject*>(result)->set_map_no_write_barrier(map);
126   // Set length and hash fields of the allocated string.
127   String* answer = String::cast(result);
128   answer->set_length(str.length());
129   answer->set_hash_field(hash_field);
130   SeqString::cast(answer)->set_symbol_id(0);
131
132   ASSERT_EQ(size, answer->Size());
133
134   // Fill in the characters.
135   memcpy(answer->address() + SeqAsciiString::kHeaderSize,
136          str.start(), str.length());
137
138   return answer;
139 }
140
141
142 MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
143                                          uint32_t hash_field) {
144   if (str.length() > SeqTwoByteString::kMaxLength) {
145     return Failure::OutOfMemoryException();
146   }
147   // Compute map and object size.
148   Map* map = symbol_map();
149   int size = SeqTwoByteString::SizeFor(str.length());
150
151   // Allocate string.
152   Object* result;
153   { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
154                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
155                    : old_data_space_->AllocateRaw(size);
156     if (!maybe_result->ToObject(&result)) return maybe_result;
157   }
158
159   reinterpret_cast<HeapObject*>(result)->set_map(map);
160   // Set length and hash fields of the allocated string.
161   String* answer = String::cast(result);
162   answer->set_length(str.length());
163   answer->set_hash_field(hash_field);
164   SeqString::cast(answer)->set_symbol_id(0);
165
166   ASSERT_EQ(size, answer->Size());
167
168   // Fill in the characters.
169   memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
170          str.start(), str.length() * kUC16Size);
171
172   return answer;
173 }
174
175 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
176   return CopyFixedArrayWithMap(src, src->map());
177 }
178
179
180 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
181   return CopyFixedDoubleArrayWithMap(src, src->map());
182 }
183
184
185 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
186                                AllocationSpace space,
187                                AllocationSpace retry_space) {
188   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
189   ASSERT(space != NEW_SPACE ||
190          retry_space == OLD_POINTER_SPACE ||
191          retry_space == OLD_DATA_SPACE ||
192          retry_space == LO_SPACE);
193 #ifdef DEBUG
194   if (FLAG_gc_interval >= 0 &&
195       !disallow_allocation_failure_ &&
196       Heap::allocation_timeout_-- <= 0) {
197     return Failure::RetryAfterGC(space);
198   }
199   isolate_->counters()->objs_since_last_full()->Increment();
200   isolate_->counters()->objs_since_last_young()->Increment();
201 #endif
202   MaybeObject* result;
203   if (NEW_SPACE == space) {
204     result = new_space_.AllocateRaw(size_in_bytes);
205     if (always_allocate() && result->IsFailure()) {
206       space = retry_space;
207     } else {
208       return result;
209     }
210   }
211
212   if (OLD_POINTER_SPACE == space) {
213     result = old_pointer_space_->AllocateRaw(size_in_bytes);
214   } else if (OLD_DATA_SPACE == space) {
215     result = old_data_space_->AllocateRaw(size_in_bytes);
216   } else if (CODE_SPACE == space) {
217     result = code_space_->AllocateRaw(size_in_bytes);
218   } else if (LO_SPACE == space) {
219     result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
220   } else if (CELL_SPACE == space) {
221     result = cell_space_->AllocateRaw(size_in_bytes);
222   } else {
223     ASSERT(MAP_SPACE == space);
224     result = map_space_->AllocateRaw(size_in_bytes);
225   }
226   if (result->IsFailure()) old_gen_exhausted_ = true;
227   return result;
228 }
229
230
231 MaybeObject* Heap::NumberFromInt32(
232     int32_t value, PretenureFlag pretenure) {
233   if (Smi::IsValid(value)) return Smi::FromInt(value);
234   // Bypass NumberFromDouble to avoid various redundant checks.
235   return AllocateHeapNumber(FastI2D(value), pretenure);
236 }
237
238
239 MaybeObject* Heap::NumberFromUint32(
240     uint32_t value, PretenureFlag pretenure) {
241   if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
242     return Smi::FromInt((int32_t)value);
243   }
244   // Bypass NumberFromDouble to avoid various redundant checks.
245   return AllocateHeapNumber(FastUI2D(value), pretenure);
246 }
247
248
249 void Heap::FinalizeExternalString(HeapObject* string) {
250   ASSERT(string->IsExternalString() || string->map()->has_external_resource());
251
252   if (string->IsExternalString()) {
253     v8::String::ExternalStringResourceBase** resource_addr =
254         reinterpret_cast<v8::String::ExternalStringResourceBase**>(
255             reinterpret_cast<byte*>(string) +
256             ExternalString::kResourceOffset -
257             kHeapObjectTag);
258
259     // Dispose of the C++ object if it has not already been disposed.
260     if (*resource_addr != NULL) {
261       (*resource_addr)->Dispose();
262       *resource_addr = NULL;
263     }
264   } else {
265     JSObject *object = JSObject::cast(string);
266     Object *value = object->GetExternalResourceObject();
267     v8::Object::ExternalResource *resource = 0;
268     if (value->IsSmi()) {
269         resource = reinterpret_cast<v8::Object::ExternalResource*>(Internals::GetExternalPointerFromSmi(value));
270     } else if (value->IsForeign()) {
271         resource = reinterpret_cast<v8::Object::ExternalResource*>(Foreign::cast(value)->foreign_address());
272     }
273     if (resource) {
274         resource->Dispose();
275     }
276   }
277 }
278
279
280 MaybeObject* Heap::AllocateRawMap() {
281 #ifdef DEBUG
282   isolate_->counters()->objs_since_last_full()->Increment();
283   isolate_->counters()->objs_since_last_young()->Increment();
284 #endif
285   MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
286   if (result->IsFailure()) old_gen_exhausted_ = true;
287 #ifdef DEBUG
288   if (!result->IsFailure()) {
289     // Maps have their own alignment.
290     CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
291           static_cast<intptr_t>(kHeapObjectTag));
292   }
293 #endif
294   return result;
295 }
296
297
298 MaybeObject* Heap::AllocateRawCell() {
299 #ifdef DEBUG
300   isolate_->counters()->objs_since_last_full()->Increment();
301   isolate_->counters()->objs_since_last_young()->Increment();
302 #endif
303   MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
304   if (result->IsFailure()) old_gen_exhausted_ = true;
305   return result;
306 }
307
308
309 bool Heap::InNewSpace(Object* object) {
310   bool result = new_space_.Contains(object);
311   ASSERT(!result ||                  // Either not in new space
312          gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
313          InToSpace(object));         // ... or in to-space (where we allocate).
314   return result;
315 }
316
317
318 bool Heap::InNewSpace(Address addr) {
319   return new_space_.Contains(addr);
320 }
321
322
323 bool Heap::InFromSpace(Object* object) {
324   return new_space_.FromSpaceContains(object);
325 }
326
327
328 bool Heap::InToSpace(Object* object) {
329   return new_space_.ToSpaceContains(object);
330 }
331
332
333 bool Heap::OldGenerationAllocationLimitReached() {
334   if (!incremental_marking()->IsStopped()) return false;
335   return OldGenerationSpaceAvailable() < 0;
336 }
337
338
339 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
340   // An object should be promoted if:
341   // - the object has survived a scavenge operation or
342   // - to space is already 25% full.
343   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
344   Address age_mark = new_space_.age_mark();
345   bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
346       (!page->ContainsLimit(age_mark) || old_address < age_mark);
347   return below_mark || (new_space_.Size() + object_size) >=
348                         (new_space_.EffectiveCapacity() >> 2);
349 }
350
351
352 void Heap::RecordWrite(Address address, int offset) {
353   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
354 }
355
356
357 void Heap::RecordWrites(Address address, int start, int len) {
358   if (!InNewSpace(address)) {
359     for (int i = 0; i < len; i++) {
360       store_buffer_.Mark(address + start + i * kPointerSize);
361     }
362   }
363 }
364
365
366 OldSpace* Heap::TargetSpace(HeapObject* object) {
367   InstanceType type = object->map()->instance_type();
368   AllocationSpace space = TargetSpaceId(type);
369   return (space == OLD_POINTER_SPACE)
370       ? old_pointer_space_
371       : old_data_space_;
372 }
373
374
375 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
376   // Heap numbers and sequential strings are promoted to old data space, all
377   // other object types are promoted to old pointer space.  We do not use
378   // object->IsHeapNumber() and object->IsSeqString() because we already
379   // know that object has the heap object tag.
380
381   // These objects are never allocated in new space.
382   ASSERT(type != MAP_TYPE);
383   ASSERT(type != CODE_TYPE);
384   ASSERT(type != ODDBALL_TYPE);
385   ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE);
386
387   if (type < FIRST_NONSTRING_TYPE) {
388     // There are four string representations: sequential strings, external
389     // strings, cons strings, and sliced strings.
390     // Only the latter two contain non-map-word pointers to heap objects.
391     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
392         ? OLD_POINTER_SPACE
393         : OLD_DATA_SPACE;
394   } else {
395     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
396   }
397 }
398
399
400 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
401   CopyWords(reinterpret_cast<Object**>(dst),
402             reinterpret_cast<Object**>(src),
403             byte_size / kPointerSize);
404 }
405
406
407 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
408   ASSERT(IsAligned(byte_size, kPointerSize));
409
410   int size_in_words = byte_size / kPointerSize;
411
412   if ((dst < src) || (dst >= (src + byte_size))) {
413     Object** src_slot = reinterpret_cast<Object**>(src);
414     Object** dst_slot = reinterpret_cast<Object**>(dst);
415     Object** end_slot = src_slot + size_in_words;
416
417     while (src_slot != end_slot) {
418       *dst_slot++ = *src_slot++;
419     }
420   } else {
421     memmove(dst, src, byte_size);
422   }
423 }
424
425
426 void Heap::ScavengePointer(HeapObject** p) {
427   ScavengeObject(p, *p);
428 }
429
430
431 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
432   ASSERT(HEAP->InFromSpace(object));
433
434   // We use the first word (where the map pointer usually is) of a heap
435   // object to record the forwarding pointer.  A forwarding pointer can
436   // point to an old space, the code space, or the to space of the new
437   // generation.
438   MapWord first_word = object->map_word();
439
440   // If the first word is a forwarding address, the object has already been
441   // copied.
442   if (first_word.IsForwardingAddress()) {
443     HeapObject* dest = first_word.ToForwardingAddress();
444     ASSERT(HEAP->InFromSpace(*p));
445     *p = dest;
446     return;
447   }
448
449   // Call the slow part of scavenge object.
450   return ScavengeObjectSlow(p, object);
451 }
452
453
454 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason) {
455   const char* collector_reason = NULL;
456   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
457   return CollectGarbage(space, collector, gc_reason, collector_reason);
458 }
459
460
461 MaybeObject* Heap::PrepareForCompare(String* str) {
462   // Always flatten small strings and force flattening of long strings
463   // after we have accumulated a certain amount we failed to flatten.
464   static const int kMaxAlwaysFlattenLength = 32;
465   static const int kFlattenLongThreshold = 16*KB;
466
467   const int length = str->length();
468   MaybeObject* obj = str->TryFlatten();
469   if (length <= kMaxAlwaysFlattenLength ||
470       unflattened_strings_length_ >= kFlattenLongThreshold) {
471     return obj;
472   }
473   if (obj->IsFailure()) {
474     unflattened_strings_length_ += length;
475   }
476   return str;
477 }
478
479
480 intptr_t Heap::AdjustAmountOfExternalAllocatedMemory(
481     intptr_t change_in_bytes) {
482   ASSERT(HasBeenSetUp());
483   intptr_t amount = amount_of_external_allocated_memory_ + change_in_bytes;
484   if (change_in_bytes >= 0) {
485     // Avoid overflow.
486     if (amount > amount_of_external_allocated_memory_) {
487       amount_of_external_allocated_memory_ = amount;
488     }
489     intptr_t amount_since_last_global_gc =
490         amount_of_external_allocated_memory_ -
491         amount_of_external_allocated_memory_at_last_global_gc_;
492     if (amount_since_last_global_gc > external_allocation_limit_) {
493       CollectAllGarbage(kNoGCFlags, "external memory allocation limit reached");
494     }
495   } else {
496     // Avoid underflow.
497     if (amount >= 0) {
498       amount_of_external_allocated_memory_ = amount;
499     }
500   }
501   ASSERT(amount_of_external_allocated_memory_ >= 0);
502   return amount_of_external_allocated_memory_;
503 }
504
505
506 void Heap::SetLastScriptId(Object* last_script_id) {
507   roots_[kLastScriptIdRootIndex] = last_script_id;
508 }
509
510
511 Isolate* Heap::isolate() {
512   return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
513       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
514 }
515
516
517 #ifdef DEBUG
518 #define GC_GREEDY_CHECK() \
519   if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
520 #else
521 #define GC_GREEDY_CHECK() { }
522 #endif
523
524 // Calls the FUNCTION_CALL function and retries it up to three times
525 // to guarantee that any allocations performed during the call will
526 // succeed if there's enough memory.
527
528 // Warning: Do not use the identifiers __object__, __maybe_object__ or
529 // __scope__ in a call to this macro.
530
531 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
532   do {                                                                    \
533     GC_GREEDY_CHECK();                                                    \
534     MaybeObject* __maybe_object__ = FUNCTION_CALL;                        \
535     Object* __object__ = NULL;                                            \
536     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
537     if (__maybe_object__->IsOutOfMemory()) {                              \
538       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
539     }                                                                     \
540     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
541     ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)->     \
542                                     allocation_space(),                   \
543                                     "allocation failure");                \
544     __maybe_object__ = FUNCTION_CALL;                                     \
545     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
546     if (__maybe_object__->IsOutOfMemory()) {                              \
547       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
548     }                                                                     \
549     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
550     ISOLATE->counters()->gc_last_resort_from_handles()->Increment();      \
551     ISOLATE->heap()->CollectAllAvailableGarbage("last resort gc");        \
552     {                                                                     \
553       AlwaysAllocateScope __scope__;                                      \
554       __maybe_object__ = FUNCTION_CALL;                                   \
555     }                                                                     \
556     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
557     if (__maybe_object__->IsOutOfMemory() ||                              \
558         __maybe_object__->IsRetryAfterGC()) {                             \
559       /* TODO(1181417): Fix this. */                                      \
560       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
561     }                                                                     \
562     RETURN_EMPTY;                                                         \
563   } while (false)
564
565
566 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)       \
567   CALL_AND_RETRY(ISOLATE,                                      \
568                  FUNCTION_CALL,                                \
569                  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE),  \
570                  return Handle<TYPE>())
571
572
573 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
574   CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
575
576
577 #ifdef DEBUG
578
579 inline bool Heap::allow_allocation(bool new_state) {
580   bool old = allocation_allowed_;
581   allocation_allowed_ = new_state;
582   return old;
583 }
584
585 #endif
586
587
588 void ExternalStringTable::AddString(String* string) {
589   ASSERT(string->IsExternalString());
590   if (heap_->InNewSpace(string)) {
591     new_space_strings_.Add(string);
592   } else {
593     old_space_strings_.Add(string);
594   }
595 }
596
597
598 void ExternalStringTable::AddObject(HeapObject* object) {
599   ASSERT(object->map()->has_external_resource());
600   if (heap_->InNewSpace(object)) {
601     new_space_strings_.Add(object);
602   } else {
603     old_space_strings_.Add(object);
604   }
605 }
606
607
608 void ExternalStringTable::Iterate(ObjectVisitor* v) {
609   if (!new_space_strings_.is_empty()) {
610     Object** start = &new_space_strings_[0];
611     v->VisitPointers(start, start + new_space_strings_.length());
612   }
613   if (!old_space_strings_.is_empty()) {
614     Object** start = &old_space_strings_[0];
615     v->VisitPointers(start, start + old_space_strings_.length());
616   }
617 }
618
619
620 // Verify() is inline to avoid ifdef-s around its calls in release
621 // mode.
622 void ExternalStringTable::Verify() {
623 #ifdef DEBUG
624   for (int i = 0; i < new_space_strings_.length(); ++i) {
625     ASSERT(heap_->InNewSpace(new_space_strings_[i]));
626     ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
627   }
628   for (int i = 0; i < old_space_strings_.length(); ++i) {
629     ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
630     ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_the_hole_value());
631   }
632 #endif
633 }
634
635
636 void ExternalStringTable::AddOldObject(HeapObject* object) {
637   ASSERT(object->IsExternalString() || object->map()->has_external_resource());
638   ASSERT(!heap_->InNewSpace(object));
639   old_space_strings_.Add(object);
640 }
641
642
643 void ExternalStringTable::ShrinkNewObjects(int position) {
644   new_space_strings_.Rewind(position);
645   if (FLAG_verify_heap) {
646     Verify();
647   }
648 }
649
650
651 void Heap::ClearInstanceofCache() {
652   set_instanceof_cache_function(the_hole_value());
653 }
654
655
656 Object* Heap::ToBoolean(bool condition) {
657   return condition ? true_value() : false_value();
658 }
659
660
661 void Heap::CompletelyClearInstanceofCache() {
662   set_instanceof_cache_map(the_hole_value());
663   set_instanceof_cache_function(the_hole_value());
664 }
665
666
667 MaybeObject* TranscendentalCache::Get(Type type, double input) {
668   SubCache* cache = caches_[type];
669   if (cache == NULL) {
670     caches_[type] = cache = new SubCache(type);
671   }
672   return cache->Get(input);
673 }
674
675
676 Address TranscendentalCache::cache_array_address() {
677   return reinterpret_cast<Address>(caches_);
678 }
679
680
681 double TranscendentalCache::SubCache::Calculate(double input) {
682   switch (type_) {
683     case ACOS:
684       return acos(input);
685     case ASIN:
686       return asin(input);
687     case ATAN:
688       return atan(input);
689     case COS:
690       return fast_cos(input);
691     case EXP:
692       return exp(input);
693     case LOG:
694       return fast_log(input);
695     case SIN:
696       return fast_sin(input);
697     case TAN:
698       return fast_tan(input);
699     default:
700       return 0.0;  // Never happens.
701   }
702 }
703
704
705 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
706   Converter c;
707   c.dbl = input;
708   int hash = Hash(c);
709   Element e = elements_[hash];
710   if (e.in[0] == c.integers[0] &&
711       e.in[1] == c.integers[1]) {
712     ASSERT(e.output != NULL);
713     isolate_->counters()->transcendental_cache_hit()->Increment();
714     return e.output;
715   }
716   double answer = Calculate(input);
717   isolate_->counters()->transcendental_cache_miss()->Increment();
718   Object* heap_number;
719   { MaybeObject* maybe_heap_number =
720         isolate_->heap()->AllocateHeapNumber(answer);
721     if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
722   }
723   elements_[hash].in[0] = c.integers[0];
724   elements_[hash].in[1] = c.integers[1];
725   elements_[hash].output = heap_number;
726   return heap_number;
727 }
728
729
730 AlwaysAllocateScope::AlwaysAllocateScope() {
731   // We shouldn't hit any nested scopes, because that requires
732   // non-handle code to call handle code. The code still works but
733   // performance will degrade, so we want to catch this situation
734   // in debug mode.
735   ASSERT(HEAP->always_allocate_scope_depth_ == 0);
736   HEAP->always_allocate_scope_depth_++;
737 }
738
739
740 AlwaysAllocateScope::~AlwaysAllocateScope() {
741   HEAP->always_allocate_scope_depth_--;
742   ASSERT(HEAP->always_allocate_scope_depth_ == 0);
743 }
744
745
746 LinearAllocationScope::LinearAllocationScope() {
747   HEAP->linear_allocation_scope_depth_++;
748 }
749
750
751 LinearAllocationScope::~LinearAllocationScope() {
752   HEAP->linear_allocation_scope_depth_--;
753   ASSERT(HEAP->linear_allocation_scope_depth_ >= 0);
754 }
755
756
757 #ifdef DEBUG
758 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
759   for (Object** current = start; current < end; current++) {
760     if ((*current)->IsHeapObject()) {
761       HeapObject* object = HeapObject::cast(*current);
762       ASSERT(HEAP->Contains(object));
763       ASSERT(object->map()->IsMap());
764     }
765   }
766 }
767 #endif
768
769
770 double GCTracer::SizeOfHeapObjects() {
771   return (static_cast<double>(HEAP->SizeOfObjects())) / MB;
772 }
773
774
775 #ifdef DEBUG
776 DisallowAllocationFailure::DisallowAllocationFailure() {
777   old_state_ = HEAP->disallow_allocation_failure_;
778   HEAP->disallow_allocation_failure_ = true;
779 }
780
781
782 DisallowAllocationFailure::~DisallowAllocationFailure() {
783   HEAP->disallow_allocation_failure_ = old_state_;
784 }
785 #endif
786
787
788 #ifdef DEBUG
789 AssertNoAllocation::AssertNoAllocation() {
790   old_state_ = HEAP->allow_allocation(false);
791 }
792
793
794 AssertNoAllocation::~AssertNoAllocation() {
795   HEAP->allow_allocation(old_state_);
796 }
797
798
799 DisableAssertNoAllocation::DisableAssertNoAllocation() {
800   old_state_ = HEAP->allow_allocation(true);
801 }
802
803
804 DisableAssertNoAllocation::~DisableAssertNoAllocation() {
805   HEAP->allow_allocation(old_state_);
806 }
807
808 #else
809
810 AssertNoAllocation::AssertNoAllocation() { }
811 AssertNoAllocation::~AssertNoAllocation() { }
812 DisableAssertNoAllocation::DisableAssertNoAllocation() { }
813 DisableAssertNoAllocation::~DisableAssertNoAllocation() { }
814
815 #endif
816
817
818 } }  // namespace v8::internal
819
820 #endif  // V8_HEAP_INL_H_