4c55d637f145f50d01b44d09b6949f8aae812b25
[profile/ivi/qtjsbackend.git] / src / 3rdparty / v8 / src / heap-inl.h
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_HEAP_INL_H_
29 #define V8_HEAP_INL_H_
30
31 #include "heap.h"
32 #include "isolate.h"
33 #include "list-inl.h"
34 #include "objects.h"
35 #include "v8-counters.h"
36 #include "store-buffer.h"
37 #include "store-buffer-inl.h"
38
39 namespace v8 {
40 namespace internal {
41
42 void PromotionQueue::insert(HeapObject* target, int size) {
43   if (NewSpacePage::IsAtStart(reinterpret_cast<Address>(rear_))) {
44     NewSpacePage* rear_page =
45         NewSpacePage::FromAddress(reinterpret_cast<Address>(rear_));
46     ASSERT(!rear_page->prev_page()->is_anchor());
47     rear_ = reinterpret_cast<intptr_t*>(rear_page->prev_page()->body_limit());
48   }
49   *(--rear_) = reinterpret_cast<intptr_t>(target);
50   *(--rear_) = size;
51   // Assert no overflow into live objects.
52 #ifdef DEBUG
53   SemiSpace::AssertValidRange(HEAP->new_space()->top(),
54                               reinterpret_cast<Address>(rear_));
55 #endif
56 }
57
58
59 int Heap::MaxObjectSizeInPagedSpace() {
60   return Page::kMaxHeapObjectSize;
61 }
62
63
64 MaybeObject* Heap::AllocateStringFromUtf8(Vector<const char> str,
65                                           PretenureFlag pretenure) {
66   // Check for ASCII first since this is the common case.
67   if (String::IsAscii(str.start(), str.length())) {
68     // If the string is ASCII, we do not need to convert the characters
69     // since UTF8 is backwards compatible with ASCII.
70     return AllocateStringFromAscii(str, pretenure);
71   }
72   // Non-ASCII and we need to decode.
73   return AllocateStringFromUtf8Slow(str, pretenure);
74 }
75
76
77 MaybeObject* Heap::AllocateSymbol(Vector<const char> str,
78                                   int chars,
79                                   uint32_t hash_field) {
80   unibrow::Utf8InputBuffer<> buffer(str.start(),
81                                     static_cast<unsigned>(str.length()));
82   return AllocateInternalSymbol(&buffer, chars, hash_field);
83 }
84
85
86 MaybeObject* Heap::AllocateAsciiSymbol(Vector<const char> str,
87                                        uint32_t hash_field) {
88   if (str.length() > SeqAsciiString::kMaxLength) {
89     return Failure::OutOfMemoryException();
90   }
91   // Compute map and object size.
92   Map* map = ascii_symbol_map();
93   int size = SeqAsciiString::SizeFor(str.length());
94
95   // Allocate string.
96   Object* result;
97   { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
98                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
99                    : old_data_space_->AllocateRaw(size);
100     if (!maybe_result->ToObject(&result)) return maybe_result;
101   }
102
103   reinterpret_cast<HeapObject*>(result)->set_map(map);
104   // Set length and hash fields of the allocated string.
105   String* answer = String::cast(result);
106   answer->set_length(str.length());
107   answer->set_hash_field(hash_field);
108   SeqString::cast(answer)->set_symbol_id(0);
109
110   ASSERT_EQ(size, answer->Size());
111
112   // Fill in the characters.
113   memcpy(answer->address() + SeqAsciiString::kHeaderSize,
114          str.start(), str.length());
115
116   return answer;
117 }
118
119
120 MaybeObject* Heap::AllocateTwoByteSymbol(Vector<const uc16> str,
121                                          uint32_t hash_field) {
122   if (str.length() > SeqTwoByteString::kMaxLength) {
123     return Failure::OutOfMemoryException();
124   }
125   // Compute map and object size.
126   Map* map = symbol_map();
127   int size = SeqTwoByteString::SizeFor(str.length());
128
129   // Allocate string.
130   Object* result;
131   { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
132                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
133                    : old_data_space_->AllocateRaw(size);
134     if (!maybe_result->ToObject(&result)) return maybe_result;
135   }
136
137   reinterpret_cast<HeapObject*>(result)->set_map(map);
138   // Set length and hash fields of the allocated string.
139   String* answer = String::cast(result);
140   answer->set_length(str.length());
141   answer->set_hash_field(hash_field);
142   SeqString::cast(answer)->set_symbol_id(0);
143
144   ASSERT_EQ(size, answer->Size());
145
146   // Fill in the characters.
147   memcpy(answer->address() + SeqTwoByteString::kHeaderSize,
148          str.start(), str.length() * kUC16Size);
149
150   return answer;
151 }
152
153 MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
154   return CopyFixedArrayWithMap(src, src->map());
155 }
156
157
158 MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
159   return CopyFixedDoubleArrayWithMap(src, src->map());
160 }
161
162
163 MaybeObject* Heap::AllocateRaw(int size_in_bytes,
164                                AllocationSpace space,
165                                AllocationSpace retry_space) {
166   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
167   ASSERT(space != NEW_SPACE ||
168          retry_space == OLD_POINTER_SPACE ||
169          retry_space == OLD_DATA_SPACE ||
170          retry_space == LO_SPACE);
171 #ifdef DEBUG
172   if (FLAG_gc_interval >= 0 &&
173       !disallow_allocation_failure_ &&
174       Heap::allocation_timeout_-- <= 0) {
175     return Failure::RetryAfterGC(space);
176   }
177   isolate_->counters()->objs_since_last_full()->Increment();
178   isolate_->counters()->objs_since_last_young()->Increment();
179 #endif
180   MaybeObject* result;
181   if (NEW_SPACE == space) {
182     result = new_space_.AllocateRaw(size_in_bytes);
183     if (always_allocate() && result->IsFailure()) {
184       space = retry_space;
185     } else {
186       return result;
187     }
188   }
189
190   if (OLD_POINTER_SPACE == space) {
191     result = old_pointer_space_->AllocateRaw(size_in_bytes);
192   } else if (OLD_DATA_SPACE == space) {
193     result = old_data_space_->AllocateRaw(size_in_bytes);
194   } else if (CODE_SPACE == space) {
195     result = code_space_->AllocateRaw(size_in_bytes);
196   } else if (LO_SPACE == space) {
197     result = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
198   } else if (CELL_SPACE == space) {
199     result = cell_space_->AllocateRaw(size_in_bytes);
200   } else {
201     ASSERT(MAP_SPACE == space);
202     result = map_space_->AllocateRaw(size_in_bytes);
203   }
204   if (result->IsFailure()) old_gen_exhausted_ = true;
205   return result;
206 }
207
208
209 MaybeObject* Heap::NumberFromInt32(int32_t value) {
210   if (Smi::IsValid(value)) return Smi::FromInt(value);
211   // Bypass NumberFromDouble to avoid various redundant checks.
212   return AllocateHeapNumber(FastI2D(value));
213 }
214
215
216 MaybeObject* Heap::NumberFromUint32(uint32_t value) {
217   if ((int32_t)value >= 0 && Smi::IsValid((int32_t)value)) {
218     return Smi::FromInt((int32_t)value);
219   }
220   // Bypass NumberFromDouble to avoid various redundant checks.
221   return AllocateHeapNumber(FastUI2D(value));
222 }
223
224
225 void Heap::FinalizeExternalString(String* string) {
226   ASSERT(string->IsExternalString());
227   v8::String::ExternalStringResourceBase** resource_addr =
228       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
229           reinterpret_cast<byte*>(string) +
230           ExternalString::kResourceOffset -
231           kHeapObjectTag);
232
233   // Dispose of the C++ object if it has not already been disposed.
234   if (*resource_addr != NULL) {
235     (*resource_addr)->Dispose();
236   }
237
238   // Clear the resource pointer in the string.
239   *resource_addr = NULL;
240 }
241
242
243 MaybeObject* Heap::AllocateRawMap() {
244 #ifdef DEBUG
245   isolate_->counters()->objs_since_last_full()->Increment();
246   isolate_->counters()->objs_since_last_young()->Increment();
247 #endif
248   MaybeObject* result = map_space_->AllocateRaw(Map::kSize);
249   if (result->IsFailure()) old_gen_exhausted_ = true;
250 #ifdef DEBUG
251   if (!result->IsFailure()) {
252     // Maps have their own alignment.
253     CHECK((reinterpret_cast<intptr_t>(result) & kMapAlignmentMask) ==
254           static_cast<intptr_t>(kHeapObjectTag));
255   }
256 #endif
257   return result;
258 }
259
260
261 MaybeObject* Heap::AllocateRawCell() {
262 #ifdef DEBUG
263   isolate_->counters()->objs_since_last_full()->Increment();
264   isolate_->counters()->objs_since_last_young()->Increment();
265 #endif
266   MaybeObject* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize);
267   if (result->IsFailure()) old_gen_exhausted_ = true;
268   return result;
269 }
270
271
272 bool Heap::InNewSpace(Object* object) {
273   bool result = new_space_.Contains(object);
274   ASSERT(!result ||                  // Either not in new space
275          gc_state_ != NOT_IN_GC ||   // ... or in the middle of GC
276          InToSpace(object));         // ... or in to-space (where we allocate).
277   return result;
278 }
279
280
281 bool Heap::InNewSpace(Address addr) {
282   return new_space_.Contains(addr);
283 }
284
285
286 bool Heap::InFromSpace(Object* object) {
287   return new_space_.FromSpaceContains(object);
288 }
289
290
291 bool Heap::InToSpace(Object* object) {
292   return new_space_.ToSpaceContains(object);
293 }
294
295
296 bool Heap::OldGenerationAllocationLimitReached() {
297   if (!incremental_marking()->IsStopped()) return false;
298   return OldGenerationSpaceAvailable() < 0;
299 }
300
301
302 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
303   // An object should be promoted if:
304   // - the object has survived a scavenge operation or
305   // - to space is already 25% full.
306   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
307   Address age_mark = new_space_.age_mark();
308   bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
309       (!page->ContainsLimit(age_mark) || old_address < age_mark);
310   return below_mark || (new_space_.Size() + object_size) >=
311                         (new_space_.EffectiveCapacity() >> 2);
312 }
313
314
315 void Heap::RecordWrite(Address address, int offset) {
316   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
317 }
318
319
320 void Heap::RecordWrites(Address address, int start, int len) {
321   if (!InNewSpace(address)) {
322     for (int i = 0; i < len; i++) {
323       store_buffer_.Mark(address + start + i * kPointerSize);
324     }
325   }
326 }
327
328
329 OldSpace* Heap::TargetSpace(HeapObject* object) {
330   InstanceType type = object->map()->instance_type();
331   AllocationSpace space = TargetSpaceId(type);
332   return (space == OLD_POINTER_SPACE)
333       ? old_pointer_space_
334       : old_data_space_;
335 }
336
337
338 AllocationSpace Heap::TargetSpaceId(InstanceType type) {
339   // Heap numbers and sequential strings are promoted to old data space, all
340   // other object types are promoted to old pointer space.  We do not use
341   // object->IsHeapNumber() and object->IsSeqString() because we already
342   // know that object has the heap object tag.
343
344   // These objects are never allocated in new space.
345   ASSERT(type != MAP_TYPE);
346   ASSERT(type != CODE_TYPE);
347   ASSERT(type != ODDBALL_TYPE);
348   ASSERT(type != JS_GLOBAL_PROPERTY_CELL_TYPE);
349
350   if (type < FIRST_NONSTRING_TYPE) {
351     // There are four string representations: sequential strings, external
352     // strings, cons strings, and sliced strings.
353     // Only the latter two contain non-map-word pointers to heap objects.
354     return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
355         ? OLD_POINTER_SPACE
356         : OLD_DATA_SPACE;
357   } else {
358     return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
359   }
360 }
361
362
363 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
364   CopyWords(reinterpret_cast<Object**>(dst),
365             reinterpret_cast<Object**>(src),
366             byte_size / kPointerSize);
367 }
368
369
370 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
371   ASSERT(IsAligned(byte_size, kPointerSize));
372
373   int size_in_words = byte_size / kPointerSize;
374
375   if ((dst < src) || (dst >= (src + byte_size))) {
376     Object** src_slot = reinterpret_cast<Object**>(src);
377     Object** dst_slot = reinterpret_cast<Object**>(dst);
378     Object** end_slot = src_slot + size_in_words;
379
380     while (src_slot != end_slot) {
381       *dst_slot++ = *src_slot++;
382     }
383   } else {
384     memmove(dst, src, byte_size);
385   }
386 }
387
388
389 void Heap::ScavengePointer(HeapObject** p) {
390   ScavengeObject(p, *p);
391 }
392
393
394 void Heap::ScavengeObject(HeapObject** p, HeapObject* object) {
395   ASSERT(HEAP->InFromSpace(object));
396
397   // We use the first word (where the map pointer usually is) of a heap
398   // object to record the forwarding pointer.  A forwarding pointer can
399   // point to an old space, the code space, or the to space of the new
400   // generation.
401   MapWord first_word = object->map_word();
402
403   // If the first word is a forwarding address, the object has already been
404   // copied.
405   if (first_word.IsForwardingAddress()) {
406     HeapObject* dest = first_word.ToForwardingAddress();
407     ASSERT(HEAP->InFromSpace(*p));
408     *p = dest;
409     return;
410   }
411
412   // Call the slow part of scavenge object.
413   return ScavengeObjectSlow(p, object);
414 }
415
416
417 bool Heap::CollectGarbage(AllocationSpace space) {
418   return CollectGarbage(space, SelectGarbageCollector(space));
419 }
420
421
422 MaybeObject* Heap::PrepareForCompare(String* str) {
423   // Always flatten small strings and force flattening of long strings
424   // after we have accumulated a certain amount we failed to flatten.
425   static const int kMaxAlwaysFlattenLength = 32;
426   static const int kFlattenLongThreshold = 16*KB;
427
428   const int length = str->length();
429   MaybeObject* obj = str->TryFlatten();
430   if (length <= kMaxAlwaysFlattenLength ||
431       unflattened_strings_length_ >= kFlattenLongThreshold) {
432     return obj;
433   }
434   if (obj->IsFailure()) {
435     unflattened_strings_length_ += length;
436   }
437   return str;
438 }
439
440
441 int Heap::AdjustAmountOfExternalAllocatedMemory(int change_in_bytes) {
442   ASSERT(HasBeenSetup());
443   int amount = amount_of_external_allocated_memory_ + change_in_bytes;
444   if (change_in_bytes >= 0) {
445     // Avoid overflow.
446     if (amount > amount_of_external_allocated_memory_) {
447       amount_of_external_allocated_memory_ = amount;
448     }
449     int amount_since_last_global_gc =
450         amount_of_external_allocated_memory_ -
451         amount_of_external_allocated_memory_at_last_global_gc_;
452     if (amount_since_last_global_gc > external_allocation_limit_) {
453       CollectAllGarbage(kNoGCFlags);
454     }
455   } else {
456     // Avoid underflow.
457     if (amount >= 0) {
458       amount_of_external_allocated_memory_ = amount;
459     }
460   }
461   ASSERT(amount_of_external_allocated_memory_ >= 0);
462   return amount_of_external_allocated_memory_;
463 }
464
465
466 void Heap::SetLastScriptId(Object* last_script_id) {
467   roots_[kLastScriptIdRootIndex] = last_script_id;
468 }
469
470
471 Isolate* Heap::isolate() {
472   return reinterpret_cast<Isolate*>(reinterpret_cast<intptr_t>(this) -
473       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(4)->heap()) + 4);
474 }
475
476
477 #ifdef DEBUG
478 #define GC_GREEDY_CHECK() \
479   if (FLAG_gc_greedy) HEAP->GarbageCollectionGreedyCheck()
480 #else
481 #define GC_GREEDY_CHECK() { }
482 #endif
483
484
485 // Calls the FUNCTION_CALL function and retries it up to three times
486 // to guarantee that any allocations performed during the call will
487 // succeed if there's enough memory.
488
489 // Warning: Do not use the identifiers __object__, __maybe_object__ or
490 // __scope__ in a call to this macro.
491
492 #define CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, RETURN_VALUE, RETURN_EMPTY)\
493   do {                                                                    \
494     GC_GREEDY_CHECK();                                                    \
495     MaybeObject* __maybe_object__ = FUNCTION_CALL;                        \
496     Object* __object__ = NULL;                                            \
497     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
498     if (__maybe_object__->IsOutOfMemory()) {                              \
499       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
500     }                                                                     \
501     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
502     ISOLATE->heap()->CollectGarbage(Failure::cast(__maybe_object__)->     \
503                                     allocation_space());                  \
504     __maybe_object__ = FUNCTION_CALL;                                     \
505     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
506     if (__maybe_object__->IsOutOfMemory()) {                              \
507       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
508     }                                                                     \
509     if (!__maybe_object__->IsRetryAfterGC()) RETURN_EMPTY;                \
510     ISOLATE->counters()->gc_last_resort_from_handles()->Increment();      \
511     ISOLATE->heap()->CollectAllAvailableGarbage();                        \
512     {                                                                     \
513       AlwaysAllocateScope __scope__;                                      \
514       __maybe_object__ = FUNCTION_CALL;                                   \
515     }                                                                     \
516     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;            \
517     if (__maybe_object__->IsOutOfMemory() ||                              \
518         __maybe_object__->IsRetryAfterGC()) {                             \
519       /* TODO(1181417): Fix this. */                                      \
520       v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
521     }                                                                     \
522     RETURN_EMPTY;                                                         \
523   } while (false)
524
525
526 #define CALL_HEAP_FUNCTION(ISOLATE, FUNCTION_CALL, TYPE)       \
527   CALL_AND_RETRY(ISOLATE,                                      \
528                  FUNCTION_CALL,                                \
529                  return Handle<TYPE>(TYPE::cast(__object__), ISOLATE),  \
530                  return Handle<TYPE>())
531
532
533 #define CALL_HEAP_FUNCTION_VOID(ISOLATE, FUNCTION_CALL) \
534   CALL_AND_RETRY(ISOLATE, FUNCTION_CALL, return, return)
535
536
537 #ifdef DEBUG
538
539 inline bool Heap::allow_allocation(bool new_state) {
540   bool old = allocation_allowed_;
541   allocation_allowed_ = new_state;
542   return old;
543 }
544
545 #endif
546
547
548 void ExternalStringTable::AddString(String* string) {
549   ASSERT(string->IsExternalString());
550   if (heap_->InNewSpace(string)) {
551     new_space_strings_.Add(string);
552   } else {
553     old_space_strings_.Add(string);
554   }
555 }
556
557
558 void ExternalStringTable::Iterate(ObjectVisitor* v) {
559   if (!new_space_strings_.is_empty()) {
560     Object** start = &new_space_strings_[0];
561     v->VisitPointers(start, start + new_space_strings_.length());
562   }
563   if (!old_space_strings_.is_empty()) {
564     Object** start = &old_space_strings_[0];
565     v->VisitPointers(start, start + old_space_strings_.length());
566   }
567 }
568
569
570 // Verify() is inline to avoid ifdef-s around its calls in release
571 // mode.
572 void ExternalStringTable::Verify() {
573 #ifdef DEBUG
574   for (int i = 0; i < new_space_strings_.length(); ++i) {
575     ASSERT(heap_->InNewSpace(new_space_strings_[i]));
576     ASSERT(new_space_strings_[i] != HEAP->raw_unchecked_null_value());
577   }
578   for (int i = 0; i < old_space_strings_.length(); ++i) {
579     ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
580     ASSERT(old_space_strings_[i] != HEAP->raw_unchecked_null_value());
581   }
582 #endif
583 }
584
585
586 void ExternalStringTable::AddOldString(String* string) {
587   ASSERT(string->IsExternalString());
588   ASSERT(!heap_->InNewSpace(string));
589   old_space_strings_.Add(string);
590 }
591
592
593 void ExternalStringTable::ShrinkNewStrings(int position) {
594   new_space_strings_.Rewind(position);
595   if (FLAG_verify_heap) {
596     Verify();
597   }
598 }
599
600
601 void Heap::ClearInstanceofCache() {
602   set_instanceof_cache_function(the_hole_value());
603 }
604
605
606 Object* Heap::ToBoolean(bool condition) {
607   return condition ? true_value() : false_value();
608 }
609
610
611 void Heap::CompletelyClearInstanceofCache() {
612   set_instanceof_cache_map(the_hole_value());
613   set_instanceof_cache_function(the_hole_value());
614 }
615
616
617 MaybeObject* TranscendentalCache::Get(Type type, double input) {
618   SubCache* cache = caches_[type];
619   if (cache == NULL) {
620     caches_[type] = cache = new SubCache(type);
621   }
622   return cache->Get(input);
623 }
624
625
626 Address TranscendentalCache::cache_array_address() {
627   return reinterpret_cast<Address>(caches_);
628 }
629
630
631 double TranscendentalCache::SubCache::Calculate(double input) {
632   switch (type_) {
633     case ACOS:
634       return acos(input);
635     case ASIN:
636       return asin(input);
637     case ATAN:
638       return atan(input);
639     case COS:
640       return cos(input);
641     case EXP:
642       return exp(input);
643     case LOG:
644       return log(input);
645     case SIN:
646       return sin(input);
647     case TAN:
648       return tan(input);
649     default:
650       return 0.0;  // Never happens.
651   }
652 }
653
654
655 MaybeObject* TranscendentalCache::SubCache::Get(double input) {
656   Converter c;
657   c.dbl = input;
658   int hash = Hash(c);
659   Element e = elements_[hash];
660   if (e.in[0] == c.integers[0] &&
661       e.in[1] == c.integers[1]) {
662     ASSERT(e.output != NULL);
663     isolate_->counters()->transcendental_cache_hit()->Increment();
664     return e.output;
665   }
666   double answer = Calculate(input);
667   isolate_->counters()->transcendental_cache_miss()->Increment();
668   Object* heap_number;
669   { MaybeObject* maybe_heap_number =
670         isolate_->heap()->AllocateHeapNumber(answer);
671     if (!maybe_heap_number->ToObject(&heap_number)) return maybe_heap_number;
672   }
673   elements_[hash].in[0] = c.integers[0];
674   elements_[hash].in[1] = c.integers[1];
675   elements_[hash].output = heap_number;
676   return heap_number;
677 }
678
679
680 Heap* _inline_get_heap_() {
681   return HEAP;
682 }
683
684
685 } }  // namespace v8::internal
686
687 #endif  // V8_HEAP_INL_H_