b8f3eb13d4fa544a0ff2e60855aa375fc7d98909
[profile/ivi/qtjsbackend.git] / src / 3rdparty / v8 / src / heap.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "accessors.h"
31 #include "api.h"
32 #include "bootstrapper.h"
33 #include "codegen.h"
34 #include "compilation-cache.h"
35 #include "debug.h"
36 #include "deoptimizer.h"
37 #include "global-handles.h"
38 #include "heap-profiler.h"
39 #include "incremental-marking.h"
40 #include "liveobjectlist-inl.h"
41 #include "mark-compact.h"
42 #include "natives.h"
43 #include "objects-visiting.h"
44 #include "objects-visiting-inl.h"
45 #include "runtime-profiler.h"
46 #include "scopeinfo.h"
47 #include "snapshot.h"
48 #include "store-buffer.h"
49 #include "v8threads.h"
50 #include "vm-state-inl.h"
51 #if V8_TARGET_ARCH_ARM && !V8_INTERPRETED_REGEXP
52 #include "regexp-macro-assembler.h"
53 #include "arm/regexp-macro-assembler-arm.h"
54 #endif
55 #if V8_TARGET_ARCH_MIPS && !V8_INTERPRETED_REGEXP
56 #include "regexp-macro-assembler.h"
57 #include "mips/regexp-macro-assembler-mips.h"
58 #endif
59
60 namespace v8 {
61 namespace internal {
62
63
64 static Mutex* gc_initializer_mutex = OS::CreateMutex();
65
66
67 Heap::Heap()
68     : isolate_(NULL),
69 // semispace_size_ should be a power of 2 and old_generation_size_ should be
70 // a multiple of Page::kPageSize.
71 #if defined(ANDROID)
72 #define LUMP_OF_MEMORY (128 * KB)
73       code_range_size_(0),
74 #elif defined(V8_TARGET_ARCH_X64)
75 #define LUMP_OF_MEMORY (2 * MB)
76       code_range_size_(512*MB),
77 #else
78 #define LUMP_OF_MEMORY MB
79       code_range_size_(0),
80 #endif
81       reserved_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
82       max_semispace_size_(8 * Max(LUMP_OF_MEMORY, Page::kPageSize)),
83       initial_semispace_size_(Max(LUMP_OF_MEMORY, Page::kPageSize)),
84       max_old_generation_size_(700ul * LUMP_OF_MEMORY),
85       max_executable_size_(128l * LUMP_OF_MEMORY),
86
87 // Variables set based on semispace_size_ and old_generation_size_ in
88 // ConfigureHeap (survived_since_last_expansion_, external_allocation_limit_)
89 // Will be 4 * reserved_semispace_size_ to ensure that young
90 // generation can be aligned to its size.
91       survived_since_last_expansion_(0),
92       sweep_generation_(0),
93       always_allocate_scope_depth_(0),
94       linear_allocation_scope_depth_(0),
95       contexts_disposed_(0),
96       scan_on_scavenge_pages_(0),
97       new_space_(this),
98       old_pointer_space_(NULL),
99       old_data_space_(NULL),
100       code_space_(NULL),
101       map_space_(NULL),
102       cell_space_(NULL),
103       lo_space_(NULL),
104       gc_state_(NOT_IN_GC),
105       gc_post_processing_depth_(0),
106       ms_count_(0),
107       gc_count_(0),
108       unflattened_strings_length_(0),
109 #ifdef DEBUG
110       allocation_allowed_(true),
111       allocation_timeout_(0),
112       disallow_allocation_failure_(false),
113       debug_utils_(NULL),
114 #endif  // DEBUG
115       old_gen_promotion_limit_(kMinimumPromotionLimit),
116       old_gen_allocation_limit_(kMinimumAllocationLimit),
117       old_gen_limit_factor_(1),
118       size_of_old_gen_at_last_old_space_gc_(0),
119       external_allocation_limit_(0),
120       amount_of_external_allocated_memory_(0),
121       amount_of_external_allocated_memory_at_last_global_gc_(0),
122       old_gen_exhausted_(false),
123       store_buffer_rebuilder_(store_buffer()),
124       hidden_symbol_(NULL),
125       global_gc_prologue_callback_(NULL),
126       global_gc_epilogue_callback_(NULL),
127       gc_safe_size_of_old_object_(NULL),
128       total_regexp_code_generated_(0),
129       tracer_(NULL),
130       young_survivors_after_last_gc_(0),
131       high_survival_rate_period_length_(0),
132       survival_rate_(0),
133       previous_survival_rate_trend_(Heap::STABLE),
134       survival_rate_trend_(Heap::STABLE),
135       max_gc_pause_(0),
136       max_alive_after_gc_(0),
137       min_in_mutator_(kMaxInt),
138       alive_after_last_gc_(0),
139       last_gc_end_timestamp_(0.0),
140       store_buffer_(this),
141       marking_(this),
142       incremental_marking_(this),
143       number_idle_notifications_(0),
144       last_idle_notification_gc_count_(0),
145       last_idle_notification_gc_count_init_(false),
146       configured_(false),
147       chunks_queued_for_free_(NULL) {
148   // Allow build-time customization of the max semispace size. Building
149   // V8 with snapshots and a non-default max semispace size is much
150   // easier if you can define it as part of the build environment.
151 #if defined(V8_MAX_SEMISPACE_SIZE)
152   max_semispace_size_ = reserved_semispace_size_ = V8_MAX_SEMISPACE_SIZE;
153 #endif
154
155   intptr_t max_virtual = OS::MaxVirtualMemory();
156
157   if (max_virtual > 0) {
158     if (code_range_size_ > 0) {
159       // Reserve no more than 1/8 of the memory for the code range.
160       code_range_size_ = Min(code_range_size_, max_virtual >> 3);
161     }
162   }
163
164   memset(roots_, 0, sizeof(roots_[0]) * kRootListLength);
165   global_contexts_list_ = NULL;
166   mark_compact_collector_.heap_ = this;
167   external_string_table_.heap_ = this;
168 }
169
170
171 intptr_t Heap::Capacity() {
172   if (!HasBeenSetup()) return 0;
173
174   return new_space_.Capacity() +
175       old_pointer_space_->Capacity() +
176       old_data_space_->Capacity() +
177       code_space_->Capacity() +
178       map_space_->Capacity() +
179       cell_space_->Capacity();
180 }
181
182
183 intptr_t Heap::CommittedMemory() {
184   if (!HasBeenSetup()) return 0;
185
186   return new_space_.CommittedMemory() +
187       old_pointer_space_->CommittedMemory() +
188       old_data_space_->CommittedMemory() +
189       code_space_->CommittedMemory() +
190       map_space_->CommittedMemory() +
191       cell_space_->CommittedMemory() +
192       lo_space_->Size();
193 }
194
195 intptr_t Heap::CommittedMemoryExecutable() {
196   if (!HasBeenSetup()) return 0;
197
198   return isolate()->memory_allocator()->SizeExecutable();
199 }
200
201
202 intptr_t Heap::Available() {
203   if (!HasBeenSetup()) return 0;
204
205   return new_space_.Available() +
206       old_pointer_space_->Available() +
207       old_data_space_->Available() +
208       code_space_->Available() +
209       map_space_->Available() +
210       cell_space_->Available();
211 }
212
213
214 bool Heap::HasBeenSetup() {
215   return old_pointer_space_ != NULL &&
216          old_data_space_ != NULL &&
217          code_space_ != NULL &&
218          map_space_ != NULL &&
219          cell_space_ != NULL &&
220          lo_space_ != NULL;
221 }
222
223
224 int Heap::GcSafeSizeOfOldObject(HeapObject* object) {
225   if (IntrusiveMarking::IsMarked(object)) {
226     return IntrusiveMarking::SizeOfMarkedObject(object);
227   }
228   return object->SizeFromMap(object->map());
229 }
230
231
232 GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space) {
233   // Is global GC requested?
234   if (space != NEW_SPACE || FLAG_gc_global) {
235     isolate_->counters()->gc_compactor_caused_by_request()->Increment();
236     return MARK_COMPACTOR;
237   }
238
239   // Is enough data promoted to justify a global GC?
240   if (OldGenerationPromotionLimitReached()) {
241     isolate_->counters()->gc_compactor_caused_by_promoted_data()->Increment();
242     return MARK_COMPACTOR;
243   }
244
245   // Have allocation in OLD and LO failed?
246   if (old_gen_exhausted_) {
247     isolate_->counters()->
248         gc_compactor_caused_by_oldspace_exhaustion()->Increment();
249     return MARK_COMPACTOR;
250   }
251
252   // Is there enough space left in OLD to guarantee that a scavenge can
253   // succeed?
254   //
255   // Note that MemoryAllocator->MaxAvailable() undercounts the memory available
256   // for object promotion. It counts only the bytes that the memory
257   // allocator has not yet allocated from the OS and assigned to any space,
258   // and does not count available bytes already in the old space or code
259   // space.  Undercounting is safe---we may get an unrequested full GC when
260   // a scavenge would have succeeded.
261   if (isolate_->memory_allocator()->MaxAvailable() <= new_space_.Size()) {
262     isolate_->counters()->
263         gc_compactor_caused_by_oldspace_exhaustion()->Increment();
264     return MARK_COMPACTOR;
265   }
266
267   // Default
268   return SCAVENGER;
269 }
270
271
272 // TODO(1238405): Combine the infrastructure for --heap-stats and
273 // --log-gc to avoid the complicated preprocessor and flag testing.
274 void Heap::ReportStatisticsBeforeGC() {
275   // Heap::ReportHeapStatistics will also log NewSpace statistics when
276   // compiled --log-gc is set.  The following logic is used to avoid
277   // double logging.
278 #ifdef DEBUG
279   if (FLAG_heap_stats || FLAG_log_gc) new_space_.CollectStatistics();
280   if (FLAG_heap_stats) {
281     ReportHeapStatistics("Before GC");
282   } else if (FLAG_log_gc) {
283     new_space_.ReportStatistics();
284   }
285   if (FLAG_heap_stats || FLAG_log_gc) new_space_.ClearHistograms();
286 #else
287   if (FLAG_log_gc) {
288     new_space_.CollectStatistics();
289     new_space_.ReportStatistics();
290     new_space_.ClearHistograms();
291   }
292 #endif  // DEBUG
293 }
294
295
296 void Heap::PrintShortHeapStatistics() {
297   if (!FLAG_trace_gc_verbose) return;
298   PrintF("Memory allocator,   used: %8" V8_PTR_PREFIX "d"
299              ", available: %8" V8_PTR_PREFIX "d\n",
300          isolate_->memory_allocator()->Size(),
301          isolate_->memory_allocator()->Available());
302   PrintF("New space,          used: %8" V8_PTR_PREFIX "d"
303              ", available: %8" V8_PTR_PREFIX "d\n",
304          Heap::new_space_.Size(),
305          new_space_.Available());
306   PrintF("Old pointers,       used: %8" V8_PTR_PREFIX "d"
307              ", available: %8" V8_PTR_PREFIX "d"
308              ", waste: %8" V8_PTR_PREFIX "d\n",
309          old_pointer_space_->Size(),
310          old_pointer_space_->Available(),
311          old_pointer_space_->Waste());
312   PrintF("Old data space,     used: %8" V8_PTR_PREFIX "d"
313              ", available: %8" V8_PTR_PREFIX "d"
314              ", waste: %8" V8_PTR_PREFIX "d\n",
315          old_data_space_->Size(),
316          old_data_space_->Available(),
317          old_data_space_->Waste());
318   PrintF("Code space,         used: %8" V8_PTR_PREFIX "d"
319              ", available: %8" V8_PTR_PREFIX "d"
320              ", waste: %8" V8_PTR_PREFIX "d\n",
321          code_space_->Size(),
322          code_space_->Available(),
323          code_space_->Waste());
324   PrintF("Map space,          used: %8" V8_PTR_PREFIX "d"
325              ", available: %8" V8_PTR_PREFIX "d"
326              ", waste: %8" V8_PTR_PREFIX "d\n",
327          map_space_->Size(),
328          map_space_->Available(),
329          map_space_->Waste());
330   PrintF("Cell space,         used: %8" V8_PTR_PREFIX "d"
331              ", available: %8" V8_PTR_PREFIX "d"
332              ", waste: %8" V8_PTR_PREFIX "d\n",
333          cell_space_->Size(),
334          cell_space_->Available(),
335          cell_space_->Waste());
336   PrintF("Large object space, used: %8" V8_PTR_PREFIX "d"
337              ", available: %8" V8_PTR_PREFIX "d\n",
338          lo_space_->Size(),
339          lo_space_->Available());
340 }
341
342
343 // TODO(1238405): Combine the infrastructure for --heap-stats and
344 // --log-gc to avoid the complicated preprocessor and flag testing.
345 void Heap::ReportStatisticsAfterGC() {
346   // Similar to the before GC, we use some complicated logic to ensure that
347   // NewSpace statistics are logged exactly once when --log-gc is turned on.
348 #if defined(DEBUG)
349   if (FLAG_heap_stats) {
350     new_space_.CollectStatistics();
351     ReportHeapStatistics("After GC");
352   } else if (FLAG_log_gc) {
353     new_space_.ReportStatistics();
354   }
355 #else
356   if (FLAG_log_gc) new_space_.ReportStatistics();
357 #endif  // DEBUG
358 }
359
360
361 void Heap::GarbageCollectionPrologue() {
362   isolate_->transcendental_cache()->Clear();
363   ClearJSFunctionResultCaches();
364   gc_count_++;
365   unflattened_strings_length_ = 0;
366 #ifdef DEBUG
367   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
368   allow_allocation(false);
369
370   if (FLAG_verify_heap) {
371     Verify();
372   }
373
374   if (FLAG_gc_verbose) Print();
375 #endif  // DEBUG
376
377 #if defined(DEBUG)
378   ReportStatisticsBeforeGC();
379 #endif  // DEBUG
380
381   LiveObjectList::GCPrologue();
382   store_buffer()->GCPrologue();
383 }
384
385 intptr_t Heap::SizeOfObjects() {
386   intptr_t total = 0;
387   AllSpaces spaces;
388   for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
389     total += space->SizeOfObjects();
390   }
391   return total;
392 }
393
394 void Heap::GarbageCollectionEpilogue() {
395   store_buffer()->GCEpilogue();
396   LiveObjectList::GCEpilogue();
397 #ifdef DEBUG
398   allow_allocation(true);
399   ZapFromSpace();
400
401   if (FLAG_verify_heap) {
402     Verify();
403   }
404
405   if (FLAG_print_global_handles) isolate_->global_handles()->Print();
406   if (FLAG_print_handles) PrintHandles();
407   if (FLAG_gc_verbose) Print();
408   if (FLAG_code_stats) ReportCodeStatistics("After GC");
409 #endif
410
411   isolate_->counters()->alive_after_last_gc()->Set(
412       static_cast<int>(SizeOfObjects()));
413
414   isolate_->counters()->symbol_table_capacity()->Set(
415       symbol_table()->Capacity());
416   isolate_->counters()->number_of_symbols()->Set(
417       symbol_table()->NumberOfElements());
418 #if defined(DEBUG)
419   ReportStatisticsAfterGC();
420 #endif  // DEBUG
421 #ifdef ENABLE_DEBUGGER_SUPPORT
422   isolate_->debug()->AfterGarbageCollection();
423 #endif  // ENABLE_DEBUGGER_SUPPORT
424 }
425
426
427 void Heap::CollectAllGarbage(int flags) {
428   // Since we are ignoring the return value, the exact choice of space does
429   // not matter, so long as we do not specify NEW_SPACE, which would not
430   // cause a full GC.
431   mark_compact_collector_.SetFlags(flags);
432   CollectGarbage(OLD_POINTER_SPACE);
433   mark_compact_collector_.SetFlags(kNoGCFlags);
434 }
435
436
437 void Heap::CollectAllAvailableGarbage() {
438   // Since we are ignoring the return value, the exact choice of space does
439   // not matter, so long as we do not specify NEW_SPACE, which would not
440   // cause a full GC.
441   // Major GC would invoke weak handle callbacks on weakly reachable
442   // handles, but won't collect weakly reachable objects until next
443   // major GC.  Therefore if we collect aggressively and weak handle callback
444   // has been invoked, we rerun major GC to release objects which become
445   // garbage.
446   // Note: as weak callbacks can execute arbitrary code, we cannot
447   // hope that eventually there will be no weak callbacks invocations.
448   // Therefore stop recollecting after several attempts.
449   mark_compact_collector()->SetFlags(kMakeHeapIterableMask);
450   const int kMaxNumberOfAttempts = 7;
451   for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) {
452     if (!CollectGarbage(OLD_POINTER_SPACE, MARK_COMPACTOR)) {
453       break;
454     }
455   }
456   mark_compact_collector()->SetFlags(kNoGCFlags);
457 }
458
459
460 bool Heap::CollectGarbage(AllocationSpace space, GarbageCollector collector) {
461   // The VM is in the GC state until exiting this function.
462   VMState state(isolate_, GC);
463
464 #ifdef DEBUG
465   // Reset the allocation timeout to the GC interval, but make sure to
466   // allow at least a few allocations after a collection. The reason
467   // for this is that we have a lot of allocation sequences and we
468   // assume that a garbage collection will allow the subsequent
469   // allocation attempts to go through.
470   allocation_timeout_ = Max(6, FLAG_gc_interval);
471 #endif
472
473   if (collector == SCAVENGER && !incremental_marking()->IsStopped()) {
474     if (FLAG_trace_incremental_marking) {
475       PrintF("[IncrementalMarking] Scavenge during marking.\n");
476     }
477   }
478
479   if (collector == MARK_COMPACTOR &&
480       !mark_compact_collector()->PreciseSweepingRequired() &&
481       !incremental_marking()->IsStopped() &&
482       !incremental_marking()->should_hurry() &&
483       FLAG_incremental_marking_steps) {
484     if (FLAG_trace_incremental_marking) {
485       PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
486     }
487     collector = SCAVENGER;
488   }
489
490   bool next_gc_likely_to_collect_more = false;
491
492   { GCTracer tracer(this);
493     GarbageCollectionPrologue();
494     // The GC count was incremented in the prologue.  Tell the tracer about
495     // it.
496     tracer.set_gc_count(gc_count_);
497
498     // Tell the tracer which collector we've selected.
499     tracer.set_collector(collector);
500
501     HistogramTimer* rate = (collector == SCAVENGER)
502         ? isolate_->counters()->gc_scavenger()
503         : isolate_->counters()->gc_compactor();
504     rate->Start();
505     next_gc_likely_to_collect_more =
506         PerformGarbageCollection(collector, &tracer);
507     rate->Stop();
508
509     GarbageCollectionEpilogue();
510   }
511
512   ASSERT(collector == SCAVENGER || incremental_marking()->IsStopped());
513   if (incremental_marking()->IsStopped()) {
514     if (incremental_marking()->WorthActivating() && NextGCIsLikelyToBeFull()) {
515       incremental_marking()->Start();
516     }
517   }
518
519   return next_gc_likely_to_collect_more;
520 }
521
522
523 void Heap::PerformScavenge() {
524   GCTracer tracer(this);
525   if (incremental_marking()->IsStopped()) {
526     PerformGarbageCollection(SCAVENGER, &tracer);
527   } else {
528     PerformGarbageCollection(MARK_COMPACTOR, &tracer);
529   }
530 }
531
532
533 #ifdef DEBUG
534 // Helper class for verifying the symbol table.
535 class SymbolTableVerifier : public ObjectVisitor {
536  public:
537   void VisitPointers(Object** start, Object** end) {
538     // Visit all HeapObject pointers in [start, end).
539     for (Object** p = start; p < end; p++) {
540       if ((*p)->IsHeapObject()) {
541         // Check that the symbol is actually a symbol.
542         ASSERT((*p)->IsNull() || (*p)->IsUndefined() || (*p)->IsSymbol());
543       }
544     }
545   }
546 };
547 #endif  // DEBUG
548
549
550 static void VerifySymbolTable() {
551 #ifdef DEBUG
552   SymbolTableVerifier verifier;
553   HEAP->symbol_table()->IterateElements(&verifier);
554 #endif  // DEBUG
555 }
556
557
558 void Heap::ReserveSpace(
559     int new_space_size,
560     int pointer_space_size,
561     int data_space_size,
562     int code_space_size,
563     int map_space_size,
564     int cell_space_size,
565     int large_object_size) {
566   NewSpace* new_space = Heap::new_space();
567   PagedSpace* old_pointer_space = Heap::old_pointer_space();
568   PagedSpace* old_data_space = Heap::old_data_space();
569   PagedSpace* code_space = Heap::code_space();
570   PagedSpace* map_space = Heap::map_space();
571   PagedSpace* cell_space = Heap::cell_space();
572   LargeObjectSpace* lo_space = Heap::lo_space();
573   bool gc_performed = true;
574   while (gc_performed) {
575     gc_performed = false;
576     if (!new_space->ReserveSpace(new_space_size)) {
577       Heap::CollectGarbage(NEW_SPACE);
578       gc_performed = true;
579     }
580     if (!old_pointer_space->ReserveSpace(pointer_space_size)) {
581       Heap::CollectGarbage(OLD_POINTER_SPACE);
582       gc_performed = true;
583     }
584     if (!(old_data_space->ReserveSpace(data_space_size))) {
585       Heap::CollectGarbage(OLD_DATA_SPACE);
586       gc_performed = true;
587     }
588     if (!(code_space->ReserveSpace(code_space_size))) {
589       Heap::CollectGarbage(CODE_SPACE);
590       gc_performed = true;
591     }
592     if (!(map_space->ReserveSpace(map_space_size))) {
593       Heap::CollectGarbage(MAP_SPACE);
594       gc_performed = true;
595     }
596     if (!(cell_space->ReserveSpace(cell_space_size))) {
597       Heap::CollectGarbage(CELL_SPACE);
598       gc_performed = true;
599     }
600     // We add a slack-factor of 2 in order to have space for a series of
601     // large-object allocations that are only just larger than the page size.
602     large_object_size *= 2;
603     // The ReserveSpace method on the large object space checks how much
604     // we can expand the old generation.  This includes expansion caused by
605     // allocation in the other spaces.
606     large_object_size += cell_space_size + map_space_size + code_space_size +
607         data_space_size + pointer_space_size;
608     if (!(lo_space->ReserveSpace(large_object_size))) {
609       Heap::CollectGarbage(LO_SPACE);
610       gc_performed = true;
611     }
612   }
613 }
614
615
616 void Heap::EnsureFromSpaceIsCommitted() {
617   if (new_space_.CommitFromSpaceIfNeeded()) return;
618
619   // Committing memory to from space failed.
620   // Try shrinking and try again.
621   Shrink();
622   if (new_space_.CommitFromSpaceIfNeeded()) return;
623
624   // Committing memory to from space failed again.
625   // Memory is exhausted and we will die.
626   V8::FatalProcessOutOfMemory("Committing semi space failed.");
627 }
628
629
630 void Heap::ClearJSFunctionResultCaches() {
631   if (isolate_->bootstrapper()->IsActive()) return;
632
633   Object* context = global_contexts_list_;
634   while (!context->IsUndefined()) {
635     // Get the caches for this context:
636     FixedArray* caches =
637       Context::cast(context)->jsfunction_result_caches();
638     // Clear the caches:
639     int length = caches->length();
640     for (int i = 0; i < length; i++) {
641       JSFunctionResultCache::cast(caches->get(i))->Clear();
642     }
643     // Get the next context:
644     context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
645   }
646 }
647
648
649
650 void Heap::ClearNormalizedMapCaches() {
651   if (isolate_->bootstrapper()->IsActive() &&
652       !incremental_marking()->IsMarking()) {
653     return;
654   }
655
656   Object* context = global_contexts_list_;
657   while (!context->IsUndefined()) {
658     Context::cast(context)->normalized_map_cache()->Clear();
659     context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK);
660   }
661 }
662
663
664 void Heap::UpdateSurvivalRateTrend(int start_new_space_size) {
665   double survival_rate =
666       (static_cast<double>(young_survivors_after_last_gc_) * 100) /
667       start_new_space_size;
668
669   if (survival_rate > kYoungSurvivalRateThreshold) {
670     high_survival_rate_period_length_++;
671   } else {
672     high_survival_rate_period_length_ = 0;
673   }
674
675   double survival_rate_diff = survival_rate_ - survival_rate;
676
677   if (survival_rate_diff > kYoungSurvivalRateAllowedDeviation) {
678     set_survival_rate_trend(DECREASING);
679   } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) {
680     set_survival_rate_trend(INCREASING);
681   } else {
682     set_survival_rate_trend(STABLE);
683   }
684
685   survival_rate_ = survival_rate;
686 }
687
688 bool Heap::PerformGarbageCollection(GarbageCollector collector,
689                                     GCTracer* tracer) {
690   bool next_gc_likely_to_collect_more = false;
691
692   if (collector != SCAVENGER) {
693     PROFILE(isolate_, CodeMovingGCEvent());
694   }
695
696   if (FLAG_verify_heap) {
697     VerifySymbolTable();
698   }
699   if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) {
700     ASSERT(!allocation_allowed_);
701     GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
702     global_gc_prologue_callback_();
703   }
704
705   GCType gc_type =
706       collector == MARK_COMPACTOR ? kGCTypeMarkSweepCompact : kGCTypeScavenge;
707
708   for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
709     if (gc_type & gc_prologue_callbacks_[i].gc_type) {
710       gc_prologue_callbacks_[i].callback(gc_type, kNoGCCallbackFlags);
711     }
712   }
713
714   EnsureFromSpaceIsCommitted();
715
716   int start_new_space_size = Heap::new_space()->SizeAsInt();
717
718   if (IsHighSurvivalRate()) {
719     // We speed up the incremental marker if it is running so that it
720     // does not fall behind the rate of promotion, which would cause a
721     // constantly growing old space.
722     incremental_marking()->NotifyOfHighPromotionRate();
723   }
724
725   if (collector == MARK_COMPACTOR) {
726     // Perform mark-sweep with optional compaction.
727     MarkCompact(tracer);
728     sweep_generation_++;
729     bool high_survival_rate_during_scavenges = IsHighSurvivalRate() &&
730         IsStableOrIncreasingSurvivalTrend();
731
732     UpdateSurvivalRateTrend(start_new_space_size);
733
734     size_of_old_gen_at_last_old_space_gc_ = PromotedSpaceSize();
735
736     if (high_survival_rate_during_scavenges &&
737         IsStableOrIncreasingSurvivalTrend()) {
738       // Stable high survival rates of young objects both during partial and
739       // full collection indicate that mutator is either building or modifying
740       // a structure with a long lifetime.
741       // In this case we aggressively raise old generation memory limits to
742       // postpone subsequent mark-sweep collection and thus trade memory
743       // space for the mutation speed.
744       old_gen_limit_factor_ = 2;
745     } else {
746       old_gen_limit_factor_ = 1;
747     }
748
749     old_gen_promotion_limit_ =
750         OldGenPromotionLimit(size_of_old_gen_at_last_old_space_gc_);
751     old_gen_allocation_limit_ =
752         OldGenAllocationLimit(size_of_old_gen_at_last_old_space_gc_);
753
754     old_gen_exhausted_ = false;
755   } else {
756     tracer_ = tracer;
757     Scavenge();
758     tracer_ = NULL;
759
760     UpdateSurvivalRateTrend(start_new_space_size);
761   }
762
763   isolate_->counters()->objs_since_last_young()->Set(0);
764
765   gc_post_processing_depth_++;
766   { DisableAssertNoAllocation allow_allocation;
767     GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
768     next_gc_likely_to_collect_more =
769         isolate_->global_handles()->PostGarbageCollectionProcessing(collector);
770   }
771   gc_post_processing_depth_--;
772
773   // Update relocatables.
774   Relocatable::PostGarbageCollectionProcessing();
775
776   if (collector == MARK_COMPACTOR) {
777     // Register the amount of external allocated memory.
778     amount_of_external_allocated_memory_at_last_global_gc_ =
779         amount_of_external_allocated_memory_;
780   }
781
782   GCCallbackFlags callback_flags = kNoGCCallbackFlags;
783   for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
784     if (gc_type & gc_epilogue_callbacks_[i].gc_type) {
785       gc_epilogue_callbacks_[i].callback(gc_type, callback_flags);
786     }
787   }
788
789   if (collector == MARK_COMPACTOR && global_gc_epilogue_callback_) {
790     ASSERT(!allocation_allowed_);
791     GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
792     global_gc_epilogue_callback_();
793   }
794   if (FLAG_verify_heap) {
795     VerifySymbolTable();
796   }
797
798   return next_gc_likely_to_collect_more;
799 }
800
801
802 void Heap::MarkCompact(GCTracer* tracer) {
803   gc_state_ = MARK_COMPACT;
804   LOG(isolate_, ResourceEvent("markcompact", "begin"));
805
806   mark_compact_collector_.Prepare(tracer);
807
808   ms_count_++;
809   tracer->set_full_gc_count(ms_count_);
810
811   MarkCompactPrologue();
812
813   mark_compact_collector_.CollectGarbage();
814
815   LOG(isolate_, ResourceEvent("markcompact", "end"));
816
817   gc_state_ = NOT_IN_GC;
818
819   isolate_->counters()->objs_since_last_full()->Set(0);
820
821   contexts_disposed_ = 0;
822 }
823
824
825 void Heap::MarkCompactPrologue() {
826   // At any old GC clear the keyed lookup cache to enable collection of unused
827   // maps.
828   isolate_->keyed_lookup_cache()->Clear();
829   isolate_->context_slot_cache()->Clear();
830   isolate_->descriptor_lookup_cache()->Clear();
831   StringSplitCache::Clear(string_split_cache());
832
833   isolate_->compilation_cache()->MarkCompactPrologue();
834
835   CompletelyClearInstanceofCache();
836
837   // TODO(1605) select heuristic for flushing NumberString cache with
838   // FlushNumberStringCache
839   if (FLAG_cleanup_code_caches_at_gc) {
840     polymorphic_code_cache()->set_cache(undefined_value());
841   }
842
843   ClearNormalizedMapCaches();
844 }
845
846
847 Object* Heap::FindCodeObject(Address a) {
848   return isolate()->inner_pointer_to_code_cache()->
849       GcSafeFindCodeForInnerPointer(a);
850 }
851
852
853 // Helper class for copying HeapObjects
854 class ScavengeVisitor: public ObjectVisitor {
855  public:
856   explicit ScavengeVisitor(Heap* heap) : heap_(heap) {}
857
858   void VisitPointer(Object** p) { ScavengePointer(p); }
859
860   void VisitPointers(Object** start, Object** end) {
861     // Copy all HeapObject pointers in [start, end)
862     for (Object** p = start; p < end; p++) ScavengePointer(p);
863   }
864
865  private:
866   void ScavengePointer(Object** p) {
867     Object* object = *p;
868     if (!heap_->InNewSpace(object)) return;
869     Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
870                          reinterpret_cast<HeapObject*>(object));
871   }
872
873   Heap* heap_;
874 };
875
876
877 #ifdef DEBUG
878 // Visitor class to verify pointers in code or data space do not point into
879 // new space.
880 class VerifyNonPointerSpacePointersVisitor: public ObjectVisitor {
881  public:
882   void VisitPointers(Object** start, Object**end) {
883     for (Object** current = start; current < end; current++) {
884       if ((*current)->IsHeapObject()) {
885         ASSERT(!HEAP->InNewSpace(HeapObject::cast(*current)));
886       }
887     }
888   }
889 };
890
891
892 static void VerifyNonPointerSpacePointers() {
893   // Verify that there are no pointers to new space in spaces where we
894   // do not expect them.
895   VerifyNonPointerSpacePointersVisitor v;
896   HeapObjectIterator code_it(HEAP->code_space());
897   for (HeapObject* object = code_it.Next();
898        object != NULL; object = code_it.Next())
899     object->Iterate(&v);
900
901   // The old data space was normally swept conservatively so that the iterator
902   // doesn't work, so we normally skip the next bit.
903   if (!HEAP->old_data_space()->was_swept_conservatively()) {
904     HeapObjectIterator data_it(HEAP->old_data_space());
905     for (HeapObject* object = data_it.Next();
906          object != NULL; object = data_it.Next())
907       object->Iterate(&v);
908   }
909 }
910 #endif
911
912
913 void Heap::CheckNewSpaceExpansionCriteria() {
914   if (new_space_.Capacity() < new_space_.MaximumCapacity() &&
915       survived_since_last_expansion_ > new_space_.Capacity()) {
916     // Grow the size of new space if there is room to grow and enough
917     // data has survived scavenge since the last expansion.
918     new_space_.Grow();
919     survived_since_last_expansion_ = 0;
920   }
921 }
922
923
924 static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
925   return heap->InNewSpace(*p) &&
926       !HeapObject::cast(*p)->map_word().IsForwardingAddress();
927 }
928
929
930 void Heap::ScavengeStoreBufferCallback(
931     Heap* heap,
932     MemoryChunk* page,
933     StoreBufferEvent event) {
934   heap->store_buffer_rebuilder_.Callback(page, event);
935 }
936
937
938 void StoreBufferRebuilder::Callback(MemoryChunk* page, StoreBufferEvent event) {
939   if (event == kStoreBufferStartScanningPagesEvent) {
940     start_of_current_page_ = NULL;
941     current_page_ = NULL;
942   } else if (event == kStoreBufferScanningPageEvent) {
943     if (current_page_ != NULL) {
944       // If this page already overflowed the store buffer during this iteration.
945       if (current_page_->scan_on_scavenge()) {
946         // Then we should wipe out the entries that have been added for it.
947         store_buffer_->SetTop(start_of_current_page_);
948       } else if (store_buffer_->Top() - start_of_current_page_ >=
949                  (store_buffer_->Limit() - store_buffer_->Top()) >> 2) {
950         // Did we find too many pointers in the previous page?  The heuristic is
951         // that no page can take more then 1/5 the remaining slots in the store
952         // buffer.
953         current_page_->set_scan_on_scavenge(true);
954         store_buffer_->SetTop(start_of_current_page_);
955       } else {
956         // In this case the page we scanned took a reasonable number of slots in
957         // the store buffer.  It has now been rehabilitated and is no longer
958         // marked scan_on_scavenge.
959         ASSERT(!current_page_->scan_on_scavenge());
960       }
961     }
962     start_of_current_page_ = store_buffer_->Top();
963     current_page_ = page;
964   } else if (event == kStoreBufferFullEvent) {
965     // The current page overflowed the store buffer again.  Wipe out its entries
966     // in the store buffer and mark it scan-on-scavenge again.  This may happen
967     // several times while scanning.
968     if (current_page_ == NULL) {
969       // Store Buffer overflowed while scanning promoted objects.  These are not
970       // in any particular page, though they are likely to be clustered by the
971       // allocation routines.
972       store_buffer_->HandleFullness();
973     } else {
974       // Store Buffer overflowed while scanning a particular old space page for
975       // pointers to new space.
976       ASSERT(current_page_ == page);
977       ASSERT(page != NULL);
978       current_page_->set_scan_on_scavenge(true);
979       ASSERT(start_of_current_page_ != store_buffer_->Top());
980       store_buffer_->SetTop(start_of_current_page_);
981     }
982   } else {
983     UNREACHABLE();
984   }
985 }
986
987
988 void Heap::Scavenge() {
989 #ifdef DEBUG
990   if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
991 #endif
992
993   gc_state_ = SCAVENGE;
994
995   // Implements Cheney's copying algorithm
996   LOG(isolate_, ResourceEvent("scavenge", "begin"));
997
998   // Clear descriptor cache.
999   isolate_->descriptor_lookup_cache()->Clear();
1000
1001   // Used for updating survived_since_last_expansion_ at function end.
1002   intptr_t survived_watermark = PromotedSpaceSize();
1003
1004   CheckNewSpaceExpansionCriteria();
1005
1006   SelectScavengingVisitorsTable();
1007
1008   incremental_marking()->PrepareForScavenge();
1009
1010   old_pointer_space()->AdvanceSweeper(new_space_.Size());
1011   old_data_space()->AdvanceSweeper(new_space_.Size());
1012
1013   // Flip the semispaces.  After flipping, to space is empty, from space has
1014   // live objects.
1015   new_space_.Flip();
1016   new_space_.ResetAllocationInfo();
1017
1018   // We need to sweep newly copied objects which can be either in the
1019   // to space or promoted to the old generation.  For to-space
1020   // objects, we treat the bottom of the to space as a queue.  Newly
1021   // copied and unswept objects lie between a 'front' mark and the
1022   // allocation pointer.
1023   //
1024   // Promoted objects can go into various old-generation spaces, and
1025   // can be allocated internally in the spaces (from the free list).
1026   // We treat the top of the to space as a queue of addresses of
1027   // promoted objects.  The addresses of newly promoted and unswept
1028   // objects lie between a 'front' mark and a 'rear' mark that is
1029   // updated as a side effect of promoting an object.
1030   //
1031   // There is guaranteed to be enough room at the top of the to space
1032   // for the addresses of promoted objects: every object promoted
1033   // frees up its size in bytes from the top of the new space, and
1034   // objects are at least one pointer in size.
1035   Address new_space_front = new_space_.ToSpaceStart();
1036   promotion_queue_.Initialize(new_space_.ToSpaceEnd());
1037
1038 #ifdef DEBUG
1039   store_buffer()->Clean();
1040 #endif
1041
1042   ScavengeVisitor scavenge_visitor(this);
1043   // Copy roots.
1044   IterateRoots(&scavenge_visitor, VISIT_ALL_IN_SCAVENGE);
1045
1046   // Copy objects reachable from the old generation.
1047   {
1048     StoreBufferRebuildScope scope(this,
1049                                   store_buffer(),
1050                                   &ScavengeStoreBufferCallback);
1051     store_buffer()->IteratePointersToNewSpace(&ScavengeObject);
1052   }
1053
1054   // Copy objects reachable from cells by scavenging cell values directly.
1055   HeapObjectIterator cell_iterator(cell_space_);
1056   for (HeapObject* cell = cell_iterator.Next();
1057        cell != NULL; cell = cell_iterator.Next()) {
1058     if (cell->IsJSGlobalPropertyCell()) {
1059       Address value_address =
1060           reinterpret_cast<Address>(cell) +
1061           (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag);
1062       scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address));
1063     }
1064   }
1065
1066   // Scavenge object reachable from the global contexts list directly.
1067   scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
1068
1069   new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1070   isolate_->global_handles()->IdentifyNewSpaceWeakIndependentHandles(
1071       &IsUnscavengedHeapObject);
1072   isolate_->global_handles()->IterateNewSpaceWeakIndependentRoots(
1073       &scavenge_visitor);
1074   new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
1075
1076
1077   UpdateNewSpaceReferencesInExternalStringTable(
1078       &UpdateNewSpaceReferenceInExternalStringTableEntry);
1079
1080   LiveObjectList::UpdateReferencesForScavengeGC();
1081   isolate()->runtime_profiler()->UpdateSamplesAfterScavenge();
1082   incremental_marking()->UpdateMarkingDequeAfterScavenge();
1083
1084   ASSERT(new_space_front == new_space_.top());
1085
1086   // Set age mark.
1087   new_space_.set_age_mark(new_space_.top());
1088
1089   new_space_.LowerInlineAllocationLimit(
1090       new_space_.inline_allocation_limit_step());
1091
1092   // Update how much has survived scavenge.
1093   IncrementYoungSurvivorsCounter(static_cast<int>(
1094       (PromotedSpaceSize() - survived_watermark) + new_space_.Size()));
1095
1096   LOG(isolate_, ResourceEvent("scavenge", "end"));
1097
1098   gc_state_ = NOT_IN_GC;
1099 }
1100
1101
1102 String* Heap::UpdateNewSpaceReferenceInExternalStringTableEntry(Heap* heap,
1103                                                                 Object** p) {
1104   MapWord first_word = HeapObject::cast(*p)->map_word();
1105
1106   if (!first_word.IsForwardingAddress()) {
1107     // Unreachable external string can be finalized.
1108     heap->FinalizeExternalString(String::cast(*p));
1109     return NULL;
1110   }
1111
1112   // String is still reachable.
1113   return String::cast(first_word.ToForwardingAddress());
1114 }
1115
1116
1117 void Heap::UpdateNewSpaceReferencesInExternalStringTable(
1118     ExternalStringTableUpdaterCallback updater_func) {
1119   if (FLAG_verify_heap) {
1120     external_string_table_.Verify();
1121   }
1122
1123   if (external_string_table_.new_space_strings_.is_empty()) return;
1124
1125   Object** start = &external_string_table_.new_space_strings_[0];
1126   Object** end = start + external_string_table_.new_space_strings_.length();
1127   Object** last = start;
1128
1129   for (Object** p = start; p < end; ++p) {
1130     ASSERT(InFromSpace(*p));
1131     String* target = updater_func(this, p);
1132
1133     if (target == NULL) continue;
1134
1135     ASSERT(target->IsExternalString());
1136
1137     if (InNewSpace(target)) {
1138       // String is still in new space.  Update the table entry.
1139       *last = target;
1140       ++last;
1141     } else {
1142       // String got promoted.  Move it to the old string list.
1143       external_string_table_.AddOldString(target);
1144     }
1145   }
1146
1147   ASSERT(last <= end);
1148   external_string_table_.ShrinkNewStrings(static_cast<int>(last - start));
1149 }
1150
1151
1152 void Heap::UpdateReferencesInExternalStringTable(
1153     ExternalStringTableUpdaterCallback updater_func) {
1154
1155   // Update old space string references.
1156   if (external_string_table_.old_space_strings_.length() > 0) {
1157     Object** start = &external_string_table_.old_space_strings_[0];
1158     Object** end = start + external_string_table_.old_space_strings_.length();
1159     for (Object** p = start; p < end; ++p) *p = updater_func(this, p);
1160   }
1161
1162   UpdateNewSpaceReferencesInExternalStringTable(updater_func);
1163 }
1164
1165
1166 static Object* ProcessFunctionWeakReferences(Heap* heap,
1167                                              Object* function,
1168                                              WeakObjectRetainer* retainer) {
1169   Object* undefined = heap->undefined_value();
1170   Object* head = undefined;
1171   JSFunction* tail = NULL;
1172   Object* candidate = function;
1173   while (candidate != undefined) {
1174     // Check whether to keep the candidate in the list.
1175     JSFunction* candidate_function = reinterpret_cast<JSFunction*>(candidate);
1176     Object* retain = retainer->RetainAs(candidate);
1177     if (retain != NULL) {
1178       if (head == undefined) {
1179         // First element in the list.
1180         head = retain;
1181       } else {
1182         // Subsequent elements in the list.
1183         ASSERT(tail != NULL);
1184         tail->set_next_function_link(retain);
1185       }
1186       // Retained function is new tail.
1187       candidate_function = reinterpret_cast<JSFunction*>(retain);
1188       tail = candidate_function;
1189
1190       ASSERT(retain->IsUndefined() || retain->IsJSFunction());
1191
1192       if (retain == undefined) break;
1193     }
1194
1195     // Move to next element in the list.
1196     candidate = candidate_function->next_function_link();
1197   }
1198
1199   // Terminate the list if there is one or more elements.
1200   if (tail != NULL) {
1201     tail->set_next_function_link(undefined);
1202   }
1203
1204   return head;
1205 }
1206
1207
1208 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
1209   Object* undefined = undefined_value();
1210   Object* head = undefined;
1211   Context* tail = NULL;
1212   Object* candidate = global_contexts_list_;
1213   while (candidate != undefined) {
1214     // Check whether to keep the candidate in the list.
1215     Context* candidate_context = reinterpret_cast<Context*>(candidate);
1216     Object* retain = retainer->RetainAs(candidate);
1217     if (retain != NULL) {
1218       if (head == undefined) {
1219         // First element in the list.
1220         head = retain;
1221       } else {
1222         // Subsequent elements in the list.
1223         ASSERT(tail != NULL);
1224         tail->set_unchecked(this,
1225                             Context::NEXT_CONTEXT_LINK,
1226                             retain,
1227                             UPDATE_WRITE_BARRIER);
1228       }
1229       // Retained context is new tail.
1230       candidate_context = reinterpret_cast<Context*>(retain);
1231       tail = candidate_context;
1232
1233       if (retain == undefined) break;
1234
1235       // Process the weak list of optimized functions for the context.
1236       Object* function_list_head =
1237           ProcessFunctionWeakReferences(
1238               this,
1239               candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
1240               retainer);
1241       candidate_context->set_unchecked(this,
1242                                        Context::OPTIMIZED_FUNCTIONS_LIST,
1243                                        function_list_head,
1244                                        UPDATE_WRITE_BARRIER);
1245     }
1246
1247     // Move to next element in the list.
1248     candidate = candidate_context->get(Context::NEXT_CONTEXT_LINK);
1249   }
1250
1251   // Terminate the list if there is one or more elements.
1252   if (tail != NULL) {
1253     tail->set_unchecked(this,
1254                         Context::NEXT_CONTEXT_LINK,
1255                         Heap::undefined_value(),
1256                         UPDATE_WRITE_BARRIER);
1257   }
1258
1259   // Update the head of the list of contexts.
1260   global_contexts_list_ = head;
1261 }
1262
1263
1264 class NewSpaceScavenger : public StaticNewSpaceVisitor<NewSpaceScavenger> {
1265  public:
1266   static inline void VisitPointer(Heap* heap, Object** p) {
1267     Object* object = *p;
1268     if (!heap->InNewSpace(object)) return;
1269     Heap::ScavengeObject(reinterpret_cast<HeapObject**>(p),
1270                          reinterpret_cast<HeapObject*>(object));
1271   }
1272 };
1273
1274
1275 Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
1276                          Address new_space_front) {
1277   do {
1278     SemiSpace::AssertValidRange(new_space_front, new_space_.top());
1279     // The addresses new_space_front and new_space_.top() define a
1280     // queue of unprocessed copied objects.  Process them until the
1281     // queue is empty.
1282     while (new_space_front != new_space_.top()) {
1283       if (!NewSpacePage::IsAtEnd(new_space_front)) {
1284         HeapObject* object = HeapObject::FromAddress(new_space_front);
1285         new_space_front +=
1286           NewSpaceScavenger::IterateBody(object->map(), object);
1287       } else {
1288         new_space_front =
1289             NewSpacePage::FromLimit(new_space_front)->next_page()->body();
1290       }
1291     }
1292
1293     // Promote and process all the to-be-promoted objects.
1294     {
1295       StoreBufferRebuildScope scope(this,
1296                                     store_buffer(),
1297                                     &ScavengeStoreBufferCallback);
1298       while (!promotion_queue()->is_empty()) {
1299         HeapObject* target;
1300         int size;
1301         promotion_queue()->remove(&target, &size);
1302
1303         // Promoted object might be already partially visited
1304         // during old space pointer iteration. Thus we search specificly
1305         // for pointers to from semispace instead of looking for pointers
1306         // to new space.
1307         ASSERT(!target->IsMap());
1308         IterateAndMarkPointersToFromSpace(target->address(),
1309                                           target->address() + size,
1310                                           &ScavengeObject);
1311       }
1312     }
1313
1314     // Take another spin if there are now unswept objects in new space
1315     // (there are currently no more unswept promoted objects).
1316   } while (new_space_front != new_space_.top());
1317
1318   return new_space_front;
1319 }
1320
1321
1322 enum LoggingAndProfiling {
1323   LOGGING_AND_PROFILING_ENABLED,
1324   LOGGING_AND_PROFILING_DISABLED
1325 };
1326
1327
1328 enum MarksHandling { TRANSFER_MARKS, IGNORE_MARKS };
1329
1330
1331 template<MarksHandling marks_handling,
1332          LoggingAndProfiling logging_and_profiling_mode>
1333 class ScavengingVisitor : public StaticVisitorBase {
1334  public:
1335   static void Initialize() {
1336     table_.Register(kVisitSeqAsciiString, &EvacuateSeqAsciiString);
1337     table_.Register(kVisitSeqTwoByteString, &EvacuateSeqTwoByteString);
1338     table_.Register(kVisitShortcutCandidate, &EvacuateShortcutCandidate);
1339     table_.Register(kVisitByteArray, &EvacuateByteArray);
1340     table_.Register(kVisitFixedArray, &EvacuateFixedArray);
1341     table_.Register(kVisitFixedDoubleArray, &EvacuateFixedDoubleArray);
1342
1343     table_.Register(kVisitGlobalContext,
1344                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
1345                         template VisitSpecialized<Context::kSize>);
1346
1347     table_.Register(kVisitConsString,
1348                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
1349                         template VisitSpecialized<ConsString::kSize>);
1350
1351     table_.Register(kVisitSlicedString,
1352                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
1353                         template VisitSpecialized<SlicedString::kSize>);
1354
1355     table_.Register(kVisitSharedFunctionInfo,
1356                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
1357                         template VisitSpecialized<SharedFunctionInfo::kSize>);
1358
1359     table_.Register(kVisitJSWeakMap,
1360                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
1361                     Visit);
1362
1363     table_.Register(kVisitJSRegExp,
1364                     &ObjectEvacuationStrategy<POINTER_OBJECT>::
1365                     Visit);
1366
1367     if (marks_handling == IGNORE_MARKS) {
1368       table_.Register(kVisitJSFunction,
1369                       &ObjectEvacuationStrategy<POINTER_OBJECT>::
1370                           template VisitSpecialized<JSFunction::kSize>);
1371     } else {
1372       table_.Register(kVisitJSFunction, &EvacuateJSFunction);
1373     }
1374
1375     table_.RegisterSpecializations<ObjectEvacuationStrategy<DATA_OBJECT>,
1376                                    kVisitDataObject,
1377                                    kVisitDataObjectGeneric>();
1378
1379     table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1380                                    kVisitJSObject,
1381                                    kVisitJSObjectGeneric>();
1382
1383     table_.RegisterSpecializations<ObjectEvacuationStrategy<POINTER_OBJECT>,
1384                                    kVisitStruct,
1385                                    kVisitStructGeneric>();
1386   }
1387
1388   static VisitorDispatchTable<ScavengingCallback>* GetTable() {
1389     return &table_;
1390   }
1391
1392  private:
1393   enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
1394   enum SizeRestriction { SMALL, UNKNOWN_SIZE };
1395
1396   static void RecordCopiedObject(Heap* heap, HeapObject* obj) {
1397     bool should_record = false;
1398 #ifdef DEBUG
1399     should_record = FLAG_heap_stats;
1400 #endif
1401     should_record = should_record || FLAG_log_gc;
1402     if (should_record) {
1403       if (heap->new_space()->Contains(obj)) {
1404         heap->new_space()->RecordAllocation(obj);
1405       } else {
1406         heap->new_space()->RecordPromotion(obj);
1407       }
1408     }
1409   }
1410
1411   // Helper function used by CopyObject to copy a source object to an
1412   // allocated target object and update the forwarding pointer in the source
1413   // object.  Returns the target object.
1414   INLINE(static HeapObject* MigrateObject(Heap* heap,
1415                                           HeapObject* source,
1416                                           HeapObject* target,
1417                                           int size)) {
1418     // Copy the content of source to target.
1419     heap->CopyBlock(target->address(), source->address(), size);
1420
1421     // Set the forwarding address.
1422     source->set_map_word(MapWord::FromForwardingAddress(target));
1423
1424     if (logging_and_profiling_mode == LOGGING_AND_PROFILING_ENABLED) {
1425       // Update NewSpace stats if necessary.
1426       RecordCopiedObject(heap, target);
1427       HEAP_PROFILE(heap, ObjectMoveEvent(source->address(), target->address()));
1428       Isolate* isolate = heap->isolate();
1429       if (isolate->logger()->is_logging() ||
1430           CpuProfiler::is_profiling(isolate)) {
1431         if (target->IsSharedFunctionInfo()) {
1432           PROFILE(isolate, SharedFunctionInfoMoveEvent(
1433               source->address(), target->address()));
1434         }
1435       }
1436     }
1437
1438     if (marks_handling == TRANSFER_MARKS) {
1439       if (Marking::TransferColor(source, target)) {
1440         MemoryChunk::IncrementLiveBytes(target->address(), size);
1441       }
1442     }
1443
1444     return target;
1445   }
1446
1447   template<ObjectContents object_contents, SizeRestriction size_restriction>
1448   static inline void EvacuateObject(Map* map,
1449                                     HeapObject** slot,
1450                                     HeapObject* object,
1451                                     int object_size) {
1452     SLOW_ASSERT((size_restriction != SMALL) ||
1453                 (object_size <= Page::kMaxHeapObjectSize));
1454     SLOW_ASSERT(object->Size() == object_size);
1455
1456     Heap* heap = map->GetHeap();
1457     if (heap->ShouldBePromoted(object->address(), object_size)) {
1458       MaybeObject* maybe_result;
1459
1460       if ((size_restriction != SMALL) &&
1461           (object_size > Page::kMaxHeapObjectSize)) {
1462         maybe_result = heap->lo_space()->AllocateRaw(object_size,
1463                                                      NOT_EXECUTABLE);
1464       } else {
1465         if (object_contents == DATA_OBJECT) {
1466           maybe_result = heap->old_data_space()->AllocateRaw(object_size);
1467         } else {
1468           maybe_result = heap->old_pointer_space()->AllocateRaw(object_size);
1469         }
1470       }
1471
1472       Object* result = NULL;  // Initialization to please compiler.
1473       if (maybe_result->ToObject(&result)) {
1474         HeapObject* target = HeapObject::cast(result);
1475         *slot = MigrateObject(heap, object , target, object_size);
1476
1477         if (object_contents == POINTER_OBJECT) {
1478           heap->promotion_queue()->insert(target, object_size);
1479         }
1480
1481         heap->tracer()->increment_promoted_objects_size(object_size);
1482         return;
1483       }
1484     }
1485     MaybeObject* allocation = heap->new_space()->AllocateRaw(object_size);
1486     Object* result = allocation->ToObjectUnchecked();
1487
1488     *slot = MigrateObject(heap, object, HeapObject::cast(result), object_size);
1489     return;
1490   }
1491
1492
1493   static inline void EvacuateJSFunction(Map* map,
1494                                         HeapObject** slot,
1495                                         HeapObject* object) {
1496     ObjectEvacuationStrategy<POINTER_OBJECT>::
1497         template VisitSpecialized<JSFunction::kSize>(map, slot, object);
1498
1499     HeapObject* target = *slot;
1500     MarkBit mark_bit = Marking::MarkBitFrom(target);
1501     if (Marking::IsBlack(mark_bit)) {
1502       // This object is black and it might not be rescanned by marker.
1503       // We should explicitly record code entry slot for compaction because
1504       // promotion queue processing (IterateAndMarkPointersToFromSpace) will
1505       // miss it as it is not HeapObject-tagged.
1506       Address code_entry_slot =
1507           target->address() + JSFunction::kCodeEntryOffset;
1508       Code* code = Code::cast(Code::GetObjectFromEntryAddress(code_entry_slot));
1509       map->GetHeap()->mark_compact_collector()->
1510           RecordCodeEntrySlot(code_entry_slot, code);
1511     }
1512   }
1513
1514
1515   static inline void EvacuateFixedArray(Map* map,
1516                                         HeapObject** slot,
1517                                         HeapObject* object) {
1518     int object_size = FixedArray::BodyDescriptor::SizeOf(map, object);
1519     EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map,
1520                                                  slot,
1521                                                  object,
1522                                                  object_size);
1523   }
1524
1525
1526   static inline void EvacuateFixedDoubleArray(Map* map,
1527                                               HeapObject** slot,
1528                                               HeapObject* object) {
1529     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
1530     int object_size = FixedDoubleArray::SizeFor(length);
1531     EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map,
1532                                               slot,
1533                                               object,
1534                                               object_size);
1535   }
1536
1537
1538   static inline void EvacuateByteArray(Map* map,
1539                                        HeapObject** slot,
1540                                        HeapObject* object) {
1541     int object_size = reinterpret_cast<ByteArray*>(object)->ByteArraySize();
1542     EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1543   }
1544
1545
1546   static inline void EvacuateSeqAsciiString(Map* map,
1547                                             HeapObject** slot,
1548                                             HeapObject* object) {
1549     int object_size = SeqAsciiString::cast(object)->
1550         SeqAsciiStringSize(map->instance_type());
1551     EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1552   }
1553
1554
1555   static inline void EvacuateSeqTwoByteString(Map* map,
1556                                               HeapObject** slot,
1557                                               HeapObject* object) {
1558     int object_size = SeqTwoByteString::cast(object)->
1559         SeqTwoByteStringSize(map->instance_type());
1560     EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
1561   }
1562
1563
1564   static inline bool IsShortcutCandidate(int type) {
1565     return ((type & kShortcutTypeMask) == kShortcutTypeTag);
1566   }
1567
1568   static inline void EvacuateShortcutCandidate(Map* map,
1569                                                HeapObject** slot,
1570                                                HeapObject* object) {
1571     ASSERT(IsShortcutCandidate(map->instance_type()));
1572
1573     Heap* heap = map->GetHeap();
1574
1575     if (marks_handling == IGNORE_MARKS &&
1576         ConsString::cast(object)->unchecked_second() ==
1577         heap->empty_string()) {
1578       HeapObject* first =
1579           HeapObject::cast(ConsString::cast(object)->unchecked_first());
1580
1581       *slot = first;
1582
1583       if (!heap->InNewSpace(first)) {
1584         object->set_map_word(MapWord::FromForwardingAddress(first));
1585         return;
1586       }
1587
1588       MapWord first_word = first->map_word();
1589       if (first_word.IsForwardingAddress()) {
1590         HeapObject* target = first_word.ToForwardingAddress();
1591
1592         *slot = target;
1593         object->set_map_word(MapWord::FromForwardingAddress(target));
1594         return;
1595       }
1596
1597       heap->DoScavengeObject(first->map(), slot, first);
1598       object->set_map_word(MapWord::FromForwardingAddress(*slot));
1599       return;
1600     }
1601
1602     int object_size = ConsString::kSize;
1603     EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
1604   }
1605
1606   template<ObjectContents object_contents>
1607   class ObjectEvacuationStrategy {
1608    public:
1609     template<int object_size>
1610     static inline void VisitSpecialized(Map* map,
1611                                         HeapObject** slot,
1612                                         HeapObject* object) {
1613       EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
1614     }
1615
1616     static inline void Visit(Map* map,
1617                              HeapObject** slot,
1618                              HeapObject* object) {
1619       int object_size = map->instance_size();
1620       EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
1621     }
1622   };
1623
1624   static VisitorDispatchTable<ScavengingCallback> table_;
1625 };
1626
1627
1628 template<MarksHandling marks_handling,
1629          LoggingAndProfiling logging_and_profiling_mode>
1630 VisitorDispatchTable<ScavengingCallback>
1631     ScavengingVisitor<marks_handling, logging_and_profiling_mode>::table_;
1632
1633
1634 static void InitializeScavengingVisitorsTables() {
1635   ScavengingVisitor<TRANSFER_MARKS,
1636                     LOGGING_AND_PROFILING_DISABLED>::Initialize();
1637   ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_DISABLED>::Initialize();
1638   ScavengingVisitor<TRANSFER_MARKS,
1639                     LOGGING_AND_PROFILING_ENABLED>::Initialize();
1640   ScavengingVisitor<IGNORE_MARKS, LOGGING_AND_PROFILING_ENABLED>::Initialize();
1641 }
1642
1643
1644 void Heap::SelectScavengingVisitorsTable() {
1645   bool logging_and_profiling =
1646       isolate()->logger()->is_logging() ||
1647       CpuProfiler::is_profiling(isolate()) ||
1648       (isolate()->heap_profiler() != NULL &&
1649        isolate()->heap_profiler()->is_profiling());
1650
1651   if (!incremental_marking()->IsMarking()) {
1652     if (!logging_and_profiling) {
1653       scavenging_visitors_table_.CopyFrom(
1654           ScavengingVisitor<IGNORE_MARKS,
1655                             LOGGING_AND_PROFILING_DISABLED>::GetTable());
1656     } else {
1657       scavenging_visitors_table_.CopyFrom(
1658           ScavengingVisitor<IGNORE_MARKS,
1659                             LOGGING_AND_PROFILING_ENABLED>::GetTable());
1660     }
1661   } else {
1662     if (!logging_and_profiling) {
1663       scavenging_visitors_table_.CopyFrom(
1664           ScavengingVisitor<TRANSFER_MARKS,
1665                             LOGGING_AND_PROFILING_DISABLED>::GetTable());
1666     } else {
1667       scavenging_visitors_table_.CopyFrom(
1668           ScavengingVisitor<TRANSFER_MARKS,
1669                             LOGGING_AND_PROFILING_ENABLED>::GetTable());
1670     }
1671
1672     if (incremental_marking()->IsCompacting()) {
1673       // When compacting forbid short-circuiting of cons-strings.
1674       // Scavenging code relies on the fact that new space object
1675       // can't be evacuated into evacuation candidate but
1676       // short-circuiting violates this assumption.
1677       scavenging_visitors_table_.Register(
1678           StaticVisitorBase::kVisitShortcutCandidate,
1679           scavenging_visitors_table_.GetVisitorById(
1680               StaticVisitorBase::kVisitConsString));
1681     }
1682   }
1683 }
1684
1685
1686 void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
1687   SLOW_ASSERT(HEAP->InFromSpace(object));
1688   MapWord first_word = object->map_word();
1689   SLOW_ASSERT(!first_word.IsForwardingAddress());
1690   Map* map = first_word.ToMap();
1691   map->GetHeap()->DoScavengeObject(map, p, object);
1692 }
1693
1694
1695 MaybeObject* Heap::AllocatePartialMap(InstanceType instance_type,
1696                                       int instance_size) {
1697   Object* result;
1698   { MaybeObject* maybe_result = AllocateRawMap();
1699     if (!maybe_result->ToObject(&result)) return maybe_result;
1700   }
1701
1702   // Map::cast cannot be used due to uninitialized map field.
1703   reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
1704   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
1705   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
1706   reinterpret_cast<Map*>(result)->set_visitor_id(
1707         StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1708   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
1709   reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
1710   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
1711   reinterpret_cast<Map*>(result)->set_bit_field(0);
1712   reinterpret_cast<Map*>(result)->set_bit_field2(0);
1713   return result;
1714 }
1715
1716
1717 MaybeObject* Heap::AllocateMap(InstanceType instance_type,
1718                                int instance_size,
1719                                ElementsKind elements_kind) {
1720   Object* result;
1721   { MaybeObject* maybe_result = AllocateRawMap();
1722     if (!maybe_result->ToObject(&result)) return maybe_result;
1723   }
1724
1725   Map* map = reinterpret_cast<Map*>(result);
1726   map->set_map(meta_map());
1727   map->set_instance_type(instance_type);
1728   map->set_visitor_id(
1729       StaticVisitorBase::GetVisitorId(instance_type, instance_size));
1730   map->set_prototype(null_value());
1731   map->set_constructor(null_value());
1732   map->set_instance_size(instance_size);
1733   map->set_inobject_properties(0);
1734   map->set_pre_allocated_property_fields(0);
1735   map->init_instance_descriptors();
1736   map->set_code_cache(empty_fixed_array());
1737   map->set_prototype_transitions(empty_fixed_array());
1738   map->set_unused_property_fields(0);
1739   map->set_bit_field(0);
1740   map->set_bit_field2(1 << Map::kIsExtensible);
1741   map->set_elements_kind(elements_kind);
1742
1743   // If the map object is aligned fill the padding area with Smi 0 objects.
1744   if (Map::kPadStart < Map::kSize) {
1745     memset(reinterpret_cast<byte*>(map) + Map::kPadStart - kHeapObjectTag,
1746            0,
1747            Map::kSize - Map::kPadStart);
1748   }
1749   return map;
1750 }
1751
1752
1753 MaybeObject* Heap::AllocateCodeCache() {
1754   Object* result;
1755   { MaybeObject* maybe_result = AllocateStruct(CODE_CACHE_TYPE);
1756     if (!maybe_result->ToObject(&result)) return maybe_result;
1757   }
1758   CodeCache* code_cache = CodeCache::cast(result);
1759   code_cache->set_default_cache(empty_fixed_array());
1760   code_cache->set_normal_type_cache(undefined_value());
1761   return code_cache;
1762 }
1763
1764
1765 MaybeObject* Heap::AllocatePolymorphicCodeCache() {
1766   return AllocateStruct(POLYMORPHIC_CODE_CACHE_TYPE);
1767 }
1768
1769
1770 const Heap::StringTypeTable Heap::string_type_table[] = {
1771 #define STRING_TYPE_ELEMENT(type, size, name, camel_name)                      \
1772   {type, size, k##camel_name##MapRootIndex},
1773   STRING_TYPE_LIST(STRING_TYPE_ELEMENT)
1774 #undef STRING_TYPE_ELEMENT
1775 };
1776
1777
1778 const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = {
1779 #define CONSTANT_SYMBOL_ELEMENT(name, contents)                                \
1780   {contents, k##name##RootIndex},
1781   SYMBOL_LIST(CONSTANT_SYMBOL_ELEMENT)
1782 #undef CONSTANT_SYMBOL_ELEMENT
1783 };
1784
1785
1786 const Heap::StructTable Heap::struct_table[] = {
1787 #define STRUCT_TABLE_ELEMENT(NAME, Name, name)                                 \
1788   { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex },
1789   STRUCT_LIST(STRUCT_TABLE_ELEMENT)
1790 #undef STRUCT_TABLE_ELEMENT
1791 };
1792
1793
1794 bool Heap::CreateInitialMaps() {
1795   Object* obj;
1796   { MaybeObject* maybe_obj = AllocatePartialMap(MAP_TYPE, Map::kSize);
1797     if (!maybe_obj->ToObject(&obj)) return false;
1798   }
1799   // Map::cast cannot be used due to uninitialized map field.
1800   Map* new_meta_map = reinterpret_cast<Map*>(obj);
1801   set_meta_map(new_meta_map);
1802   new_meta_map->set_map(new_meta_map);
1803
1804   { MaybeObject* maybe_obj =
1805         AllocatePartialMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1806     if (!maybe_obj->ToObject(&obj)) return false;
1807   }
1808   set_fixed_array_map(Map::cast(obj));
1809
1810   { MaybeObject* maybe_obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize);
1811     if (!maybe_obj->ToObject(&obj)) return false;
1812   }
1813   set_oddball_map(Map::cast(obj));
1814
1815   // Allocate the empty array.
1816   { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1817     if (!maybe_obj->ToObject(&obj)) return false;
1818   }
1819   set_empty_fixed_array(FixedArray::cast(obj));
1820
1821   { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE);
1822     if (!maybe_obj->ToObject(&obj)) return false;
1823   }
1824   set_null_value(Oddball::cast(obj));
1825   Oddball::cast(obj)->set_kind(Oddball::kNull);
1826
1827   { MaybeObject* maybe_obj = Allocate(oddball_map(), OLD_POINTER_SPACE);
1828     if (!maybe_obj->ToObject(&obj)) return false;
1829   }
1830   set_undefined_value(Oddball::cast(obj));
1831   Oddball::cast(obj)->set_kind(Oddball::kUndefined);
1832   ASSERT(!InNewSpace(undefined_value()));
1833
1834   // Allocate the empty descriptor array.
1835   { MaybeObject* maybe_obj = AllocateEmptyFixedArray();
1836     if (!maybe_obj->ToObject(&obj)) return false;
1837   }
1838   set_empty_descriptor_array(DescriptorArray::cast(obj));
1839
1840   // Fix the instance_descriptors for the existing maps.
1841   meta_map()->init_instance_descriptors();
1842   meta_map()->set_code_cache(empty_fixed_array());
1843   meta_map()->set_prototype_transitions(empty_fixed_array());
1844
1845   fixed_array_map()->init_instance_descriptors();
1846   fixed_array_map()->set_code_cache(empty_fixed_array());
1847   fixed_array_map()->set_prototype_transitions(empty_fixed_array());
1848
1849   oddball_map()->init_instance_descriptors();
1850   oddball_map()->set_code_cache(empty_fixed_array());
1851   oddball_map()->set_prototype_transitions(empty_fixed_array());
1852
1853   // Fix prototype object for existing maps.
1854   meta_map()->set_prototype(null_value());
1855   meta_map()->set_constructor(null_value());
1856
1857   fixed_array_map()->set_prototype(null_value());
1858   fixed_array_map()->set_constructor(null_value());
1859
1860   oddball_map()->set_prototype(null_value());
1861   oddball_map()->set_constructor(null_value());
1862
1863   { MaybeObject* maybe_obj =
1864         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1865     if (!maybe_obj->ToObject(&obj)) return false;
1866   }
1867   set_fixed_cow_array_map(Map::cast(obj));
1868   ASSERT(fixed_array_map() != fixed_cow_array_map());
1869
1870   { MaybeObject* maybe_obj =
1871         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1872     if (!maybe_obj->ToObject(&obj)) return false;
1873   }
1874   set_serialized_scope_info_map(Map::cast(obj));
1875
1876   { MaybeObject* maybe_obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize);
1877     if (!maybe_obj->ToObject(&obj)) return false;
1878   }
1879   set_heap_number_map(Map::cast(obj));
1880
1881   { MaybeObject* maybe_obj = AllocateMap(FOREIGN_TYPE, Foreign::kSize);
1882     if (!maybe_obj->ToObject(&obj)) return false;
1883   }
1884   set_foreign_map(Map::cast(obj));
1885
1886   for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) {
1887     const StringTypeTable& entry = string_type_table[i];
1888     { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
1889       if (!maybe_obj->ToObject(&obj)) return false;
1890     }
1891     roots_[entry.index] = Map::cast(obj);
1892   }
1893
1894   { MaybeObject* maybe_obj = AllocateMap(STRING_TYPE, kVariableSizeSentinel);
1895     if (!maybe_obj->ToObject(&obj)) return false;
1896   }
1897   set_undetectable_string_map(Map::cast(obj));
1898   Map::cast(obj)->set_is_undetectable();
1899
1900   { MaybeObject* maybe_obj =
1901         AllocateMap(ASCII_STRING_TYPE, kVariableSizeSentinel);
1902     if (!maybe_obj->ToObject(&obj)) return false;
1903   }
1904   set_undetectable_ascii_string_map(Map::cast(obj));
1905   Map::cast(obj)->set_is_undetectable();
1906
1907   { MaybeObject* maybe_obj =
1908         AllocateMap(FIXED_DOUBLE_ARRAY_TYPE, kVariableSizeSentinel);
1909     if (!maybe_obj->ToObject(&obj)) return false;
1910   }
1911   set_fixed_double_array_map(Map::cast(obj));
1912
1913   { MaybeObject* maybe_obj =
1914         AllocateMap(BYTE_ARRAY_TYPE, kVariableSizeSentinel);
1915     if (!maybe_obj->ToObject(&obj)) return false;
1916   }
1917   set_byte_array_map(Map::cast(obj));
1918
1919   { MaybeObject* maybe_obj =
1920         AllocateMap(FREE_SPACE_TYPE, kVariableSizeSentinel);
1921     if (!maybe_obj->ToObject(&obj)) return false;
1922   }
1923   set_free_space_map(Map::cast(obj));
1924
1925   { MaybeObject* maybe_obj = AllocateByteArray(0, TENURED);
1926     if (!maybe_obj->ToObject(&obj)) return false;
1927   }
1928   set_empty_byte_array(ByteArray::cast(obj));
1929
1930   { MaybeObject* maybe_obj =
1931         AllocateMap(EXTERNAL_PIXEL_ARRAY_TYPE, ExternalArray::kAlignedSize);
1932     if (!maybe_obj->ToObject(&obj)) return false;
1933   }
1934   set_external_pixel_array_map(Map::cast(obj));
1935
1936   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_BYTE_ARRAY_TYPE,
1937                                          ExternalArray::kAlignedSize);
1938     if (!maybe_obj->ToObject(&obj)) return false;
1939   }
1940   set_external_byte_array_map(Map::cast(obj));
1941
1942   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE,
1943                                          ExternalArray::kAlignedSize);
1944     if (!maybe_obj->ToObject(&obj)) return false;
1945   }
1946   set_external_unsigned_byte_array_map(Map::cast(obj));
1947
1948   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_SHORT_ARRAY_TYPE,
1949                                          ExternalArray::kAlignedSize);
1950     if (!maybe_obj->ToObject(&obj)) return false;
1951   }
1952   set_external_short_array_map(Map::cast(obj));
1953
1954   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE,
1955                                          ExternalArray::kAlignedSize);
1956     if (!maybe_obj->ToObject(&obj)) return false;
1957   }
1958   set_external_unsigned_short_array_map(Map::cast(obj));
1959
1960   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_INT_ARRAY_TYPE,
1961                                          ExternalArray::kAlignedSize);
1962     if (!maybe_obj->ToObject(&obj)) return false;
1963   }
1964   set_external_int_array_map(Map::cast(obj));
1965
1966   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_UNSIGNED_INT_ARRAY_TYPE,
1967                                          ExternalArray::kAlignedSize);
1968     if (!maybe_obj->ToObject(&obj)) return false;
1969   }
1970   set_external_unsigned_int_array_map(Map::cast(obj));
1971
1972   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_FLOAT_ARRAY_TYPE,
1973                                          ExternalArray::kAlignedSize);
1974     if (!maybe_obj->ToObject(&obj)) return false;
1975   }
1976   set_external_float_array_map(Map::cast(obj));
1977
1978   { MaybeObject* maybe_obj =
1979         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
1980     if (!maybe_obj->ToObject(&obj)) return false;
1981   }
1982   set_non_strict_arguments_elements_map(Map::cast(obj));
1983
1984   { MaybeObject* maybe_obj = AllocateMap(EXTERNAL_DOUBLE_ARRAY_TYPE,
1985                                          ExternalArray::kAlignedSize);
1986     if (!maybe_obj->ToObject(&obj)) return false;
1987   }
1988   set_external_double_array_map(Map::cast(obj));
1989
1990   { MaybeObject* maybe_obj = AllocateMap(CODE_TYPE, kVariableSizeSentinel);
1991     if (!maybe_obj->ToObject(&obj)) return false;
1992   }
1993   set_code_map(Map::cast(obj));
1994
1995   { MaybeObject* maybe_obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE,
1996                                          JSGlobalPropertyCell::kSize);
1997     if (!maybe_obj->ToObject(&obj)) return false;
1998   }
1999   set_global_property_cell_map(Map::cast(obj));
2000
2001   { MaybeObject* maybe_obj = AllocateMap(FILLER_TYPE, kPointerSize);
2002     if (!maybe_obj->ToObject(&obj)) return false;
2003   }
2004   set_one_pointer_filler_map(Map::cast(obj));
2005
2006   { MaybeObject* maybe_obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize);
2007     if (!maybe_obj->ToObject(&obj)) return false;
2008   }
2009   set_two_pointer_filler_map(Map::cast(obj));
2010
2011   for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) {
2012     const StructTable& entry = struct_table[i];
2013     { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
2014       if (!maybe_obj->ToObject(&obj)) return false;
2015     }
2016     roots_[entry.index] = Map::cast(obj);
2017   }
2018
2019   { MaybeObject* maybe_obj =
2020         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2021     if (!maybe_obj->ToObject(&obj)) return false;
2022   }
2023   set_hash_table_map(Map::cast(obj));
2024
2025   { MaybeObject* maybe_obj =
2026         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2027     if (!maybe_obj->ToObject(&obj)) return false;
2028   }
2029   set_function_context_map(Map::cast(obj));
2030
2031   { MaybeObject* maybe_obj =
2032         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2033     if (!maybe_obj->ToObject(&obj)) return false;
2034   }
2035   set_catch_context_map(Map::cast(obj));
2036
2037   { MaybeObject* maybe_obj =
2038         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2039     if (!maybe_obj->ToObject(&obj)) return false;
2040   }
2041   set_with_context_map(Map::cast(obj));
2042
2043   { MaybeObject* maybe_obj =
2044         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2045     if (!maybe_obj->ToObject(&obj)) return false;
2046   }
2047   set_block_context_map(Map::cast(obj));
2048
2049   { MaybeObject* maybe_obj =
2050         AllocateMap(FIXED_ARRAY_TYPE, kVariableSizeSentinel);
2051     if (!maybe_obj->ToObject(&obj)) return false;
2052   }
2053   Map* global_context_map = Map::cast(obj);
2054   global_context_map->set_visitor_id(StaticVisitorBase::kVisitGlobalContext);
2055   set_global_context_map(global_context_map);
2056
2057   { MaybeObject* maybe_obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE,
2058                                          SharedFunctionInfo::kAlignedSize);
2059     if (!maybe_obj->ToObject(&obj)) return false;
2060   }
2061   set_shared_function_info_map(Map::cast(obj));
2062
2063   { MaybeObject* maybe_obj = AllocateMap(JS_MESSAGE_OBJECT_TYPE,
2064                                          JSMessageObject::kSize);
2065     if (!maybe_obj->ToObject(&obj)) return false;
2066   }
2067   set_message_object_map(Map::cast(obj));
2068
2069   ASSERT(!InNewSpace(empty_fixed_array()));
2070   return true;
2071 }
2072
2073
2074 MaybeObject* Heap::AllocateHeapNumber(double value, PretenureFlag pretenure) {
2075   // Statically ensure that it is safe to allocate heap numbers in paged
2076   // spaces.
2077   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
2078   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2079
2080   Object* result;
2081   { MaybeObject* maybe_result =
2082         AllocateRaw(HeapNumber::kSize, space, OLD_DATA_SPACE);
2083     if (!maybe_result->ToObject(&result)) return maybe_result;
2084   }
2085
2086   HeapObject::cast(result)->set_map(heap_number_map());
2087   HeapNumber::cast(result)->set_value(value);
2088   return result;
2089 }
2090
2091
2092 MaybeObject* Heap::AllocateHeapNumber(double value) {
2093   // Use general version, if we're forced to always allocate.
2094   if (always_allocate()) return AllocateHeapNumber(value, TENURED);
2095
2096   // This version of AllocateHeapNumber is optimized for
2097   // allocation in new space.
2098   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxHeapObjectSize);
2099   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
2100   Object* result;
2101   { MaybeObject* maybe_result = new_space_.AllocateRaw(HeapNumber::kSize);
2102     if (!maybe_result->ToObject(&result)) return maybe_result;
2103   }
2104   HeapObject::cast(result)->set_map(heap_number_map());
2105   HeapNumber::cast(result)->set_value(value);
2106   return result;
2107 }
2108
2109
2110 MaybeObject* Heap::AllocateJSGlobalPropertyCell(Object* value) {
2111   Object* result;
2112   { MaybeObject* maybe_result = AllocateRawCell();
2113     if (!maybe_result->ToObject(&result)) return maybe_result;
2114   }
2115   HeapObject::cast(result)->set_map(global_property_cell_map());
2116   JSGlobalPropertyCell::cast(result)->set_value(value);
2117   return result;
2118 }
2119
2120
2121 MaybeObject* Heap::CreateOddball(const char* to_string,
2122                                  Object* to_number,
2123                                  byte kind) {
2124   Object* result;
2125   { MaybeObject* maybe_result = Allocate(oddball_map(), OLD_POINTER_SPACE);
2126     if (!maybe_result->ToObject(&result)) return maybe_result;
2127   }
2128   return Oddball::cast(result)->Initialize(to_string, to_number, kind);
2129 }
2130
2131
2132 bool Heap::CreateApiObjects() {
2133   Object* obj;
2134
2135   { MaybeObject* maybe_obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize);
2136     if (!maybe_obj->ToObject(&obj)) return false;
2137   }
2138   // Don't use Smi-only elements optimizations for objects with the neander
2139   // map. There are too many cases where element values are set directly with a
2140   // bottleneck to trap the Smi-only -> fast elements transition, and there
2141   // appears to be no benefit for optimize this case.
2142   Map* new_neander_map = Map::cast(obj);
2143   new_neander_map->set_elements_kind(FAST_ELEMENTS);
2144   set_neander_map(new_neander_map);
2145
2146   { MaybeObject* maybe_obj = AllocateJSObjectFromMap(neander_map());
2147     if (!maybe_obj->ToObject(&obj)) return false;
2148   }
2149   Object* elements;
2150   { MaybeObject* maybe_elements = AllocateFixedArray(2);
2151     if (!maybe_elements->ToObject(&elements)) return false;
2152   }
2153   FixedArray::cast(elements)->set(0, Smi::FromInt(0));
2154   JSObject::cast(obj)->set_elements(FixedArray::cast(elements));
2155   set_message_listeners(JSObject::cast(obj));
2156
2157   return true;
2158 }
2159
2160
2161 void Heap::CreateJSEntryStub() {
2162   JSEntryStub stub;
2163   set_js_entry_code(*stub.GetCode());
2164 }
2165
2166
2167 void Heap::CreateJSConstructEntryStub() {
2168   JSConstructEntryStub stub;
2169   set_js_construct_entry_code(*stub.GetCode());
2170 }
2171
2172
2173 void Heap::CreateFixedStubs() {
2174   // Here we create roots for fixed stubs. They are needed at GC
2175   // for cooking and uncooking (check out frames.cc).
2176   // The eliminates the need for doing dictionary lookup in the
2177   // stub cache for these stubs.
2178   HandleScope scope;
2179   // gcc-4.4 has problem generating correct code of following snippet:
2180   // {  JSEntryStub stub;
2181   //    js_entry_code_ = *stub.GetCode();
2182   // }
2183   // {  JSConstructEntryStub stub;
2184   //    js_construct_entry_code_ = *stub.GetCode();
2185   // }
2186   // To workaround the problem, make separate functions without inlining.
2187   Heap::CreateJSEntryStub();
2188   Heap::CreateJSConstructEntryStub();
2189
2190   // Create stubs that should be there, so we don't unexpectedly have to
2191   // create them if we need them during the creation of another stub.
2192   // Stub creation mixes raw pointers and handles in an unsafe manner so
2193   // we cannot create stubs while we are creating stubs.
2194   CodeStub::GenerateStubsAheadOfTime();
2195 }
2196
2197
2198 bool Heap::CreateInitialObjects() {
2199   Object* obj;
2200
2201   // The -0 value must be set before NumberFromDouble works.
2202   { MaybeObject* maybe_obj = AllocateHeapNumber(-0.0, TENURED);
2203     if (!maybe_obj->ToObject(&obj)) return false;
2204   }
2205   set_minus_zero_value(HeapNumber::cast(obj));
2206   ASSERT(signbit(minus_zero_value()->Number()) != 0);
2207
2208   { MaybeObject* maybe_obj = AllocateHeapNumber(OS::nan_value(), TENURED);
2209     if (!maybe_obj->ToObject(&obj)) return false;
2210   }
2211   set_nan_value(HeapNumber::cast(obj));
2212
2213   { MaybeObject* maybe_obj = AllocateHeapNumber(V8_INFINITY, TENURED);
2214     if (!maybe_obj->ToObject(&obj)) return false;
2215   }
2216   set_infinity_value(HeapNumber::cast(obj));
2217
2218   // Allocate initial symbol table.
2219   { MaybeObject* maybe_obj = SymbolTable::Allocate(kInitialSymbolTableSize);
2220     if (!maybe_obj->ToObject(&obj)) return false;
2221   }
2222   // Don't use set_symbol_table() due to asserts.
2223   roots_[kSymbolTableRootIndex] = obj;
2224
2225   // Finish initializing oddballs after creating symboltable.
2226   { MaybeObject* maybe_obj =
2227         undefined_value()->Initialize("undefined",
2228                                       nan_value(),
2229                                       Oddball::kUndefined);
2230     if (!maybe_obj->ToObject(&obj)) return false;
2231   }
2232
2233   // Initialize the null_value.
2234   { MaybeObject* maybe_obj =
2235         null_value()->Initialize("null", Smi::FromInt(0), Oddball::kNull);
2236     if (!maybe_obj->ToObject(&obj)) return false;
2237   }
2238
2239   { MaybeObject* maybe_obj = CreateOddball("true",
2240                                            Smi::FromInt(1),
2241                                            Oddball::kTrue);
2242     if (!maybe_obj->ToObject(&obj)) return false;
2243   }
2244   set_true_value(Oddball::cast(obj));
2245
2246   { MaybeObject* maybe_obj = CreateOddball("false",
2247                                            Smi::FromInt(0),
2248                                            Oddball::kFalse);
2249     if (!maybe_obj->ToObject(&obj)) return false;
2250   }
2251   set_false_value(Oddball::cast(obj));
2252
2253   { MaybeObject* maybe_obj = CreateOddball("hole",
2254                                            Smi::FromInt(-1),
2255                                            Oddball::kTheHole);
2256     if (!maybe_obj->ToObject(&obj)) return false;
2257   }
2258   set_the_hole_value(Oddball::cast(obj));
2259
2260   { MaybeObject* maybe_obj = CreateOddball("arguments_marker",
2261                                            Smi::FromInt(-2),
2262                                            Oddball::kArgumentMarker);
2263     if (!maybe_obj->ToObject(&obj)) return false;
2264   }
2265   set_arguments_marker(Oddball::cast(obj));
2266
2267   { MaybeObject* maybe_obj = CreateOddball("no_interceptor_result_sentinel",
2268                                            Smi::FromInt(-3),
2269                                            Oddball::kOther);
2270     if (!maybe_obj->ToObject(&obj)) return false;
2271   }
2272   set_no_interceptor_result_sentinel(obj);
2273
2274   { MaybeObject* maybe_obj = CreateOddball("termination_exception",
2275                                            Smi::FromInt(-4),
2276                                            Oddball::kOther);
2277     if (!maybe_obj->ToObject(&obj)) return false;
2278   }
2279   set_termination_exception(obj);
2280
2281   { MaybeObject* maybe_obj = CreateOddball("frame_alignment_marker",
2282                                            Smi::FromInt(-5),
2283                                            Oddball::kOther);
2284     if (!maybe_obj->ToObject(&obj)) return false;
2285   }
2286   set_frame_alignment_marker(Oddball::cast(obj));
2287   STATIC_ASSERT(Oddball::kLeastHiddenOddballNumber == -5);
2288
2289   // Allocate the empty string.
2290   { MaybeObject* maybe_obj = AllocateRawAsciiString(0, TENURED);
2291     if (!maybe_obj->ToObject(&obj)) return false;
2292   }
2293   set_empty_string(String::cast(obj));
2294
2295   for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) {
2296     { MaybeObject* maybe_obj =
2297           LookupAsciiSymbol(constant_symbol_table[i].contents);
2298       if (!maybe_obj->ToObject(&obj)) return false;
2299     }
2300     roots_[constant_symbol_table[i].index] = String::cast(obj);
2301   }
2302
2303   // Allocate the hidden symbol which is used to identify the hidden properties
2304   // in JSObjects. The hash code has a special value so that it will not match
2305   // the empty string when searching for the property. It cannot be part of the
2306   // loop above because it needs to be allocated manually with the special
2307   // hash code in place. The hash code for the hidden_symbol is zero to ensure
2308   // that it will always be at the first entry in property descriptors.
2309   { MaybeObject* maybe_obj =
2310         AllocateSymbol(CStrVector(""), 0, String::kZeroHash);
2311     if (!maybe_obj->ToObject(&obj)) return false;
2312   }
2313   hidden_symbol_ = String::cast(obj);
2314
2315   // Allocate the foreign for __proto__.
2316   { MaybeObject* maybe_obj =
2317         AllocateForeign((Address) &Accessors::ObjectPrototype);
2318     if (!maybe_obj->ToObject(&obj)) return false;
2319   }
2320   set_prototype_accessors(Foreign::cast(obj));
2321
2322   // Allocate the code_stubs dictionary. The initial size is set to avoid
2323   // expanding the dictionary during bootstrapping.
2324   { MaybeObject* maybe_obj = NumberDictionary::Allocate(128);
2325     if (!maybe_obj->ToObject(&obj)) return false;
2326   }
2327   set_code_stubs(NumberDictionary::cast(obj));
2328
2329   // Allocate the non_monomorphic_cache used in stub-cache.cc. The initial size
2330   // is set to avoid expanding the dictionary during bootstrapping.
2331   { MaybeObject* maybe_obj = NumberDictionary::Allocate(64);
2332     if (!maybe_obj->ToObject(&obj)) return false;
2333   }
2334   set_non_monomorphic_cache(NumberDictionary::cast(obj));
2335
2336   { MaybeObject* maybe_obj = AllocatePolymorphicCodeCache();
2337     if (!maybe_obj->ToObject(&obj)) return false;
2338   }
2339   set_polymorphic_code_cache(PolymorphicCodeCache::cast(obj));
2340
2341   set_instanceof_cache_function(Smi::FromInt(0));
2342   set_instanceof_cache_map(Smi::FromInt(0));
2343   set_instanceof_cache_answer(Smi::FromInt(0));
2344
2345   CreateFixedStubs();
2346
2347   // Allocate the dictionary of intrinsic function names.
2348   { MaybeObject* maybe_obj = StringDictionary::Allocate(Runtime::kNumFunctions);
2349     if (!maybe_obj->ToObject(&obj)) return false;
2350   }
2351   { MaybeObject* maybe_obj = Runtime::InitializeIntrinsicFunctionNames(this,
2352                                                                        obj);
2353     if (!maybe_obj->ToObject(&obj)) return false;
2354   }
2355   set_intrinsic_function_names(StringDictionary::cast(obj));
2356
2357   if (InitializeNumberStringCache()->IsFailure()) return false;
2358
2359   // Allocate cache for single character ASCII strings.
2360   { MaybeObject* maybe_obj =
2361         AllocateFixedArray(String::kMaxAsciiCharCode + 1, TENURED);
2362     if (!maybe_obj->ToObject(&obj)) return false;
2363   }
2364   set_single_character_string_cache(FixedArray::cast(obj));
2365
2366   // Allocate cache for string split.
2367   { MaybeObject* maybe_obj =
2368         AllocateFixedArray(StringSplitCache::kStringSplitCacheSize, TENURED);
2369     if (!maybe_obj->ToObject(&obj)) return false;
2370   }
2371   set_string_split_cache(FixedArray::cast(obj));
2372
2373   // Allocate cache for external strings pointing to native source code.
2374   { MaybeObject* maybe_obj = AllocateFixedArray(Natives::GetBuiltinsCount());
2375     if (!maybe_obj->ToObject(&obj)) return false;
2376   }
2377   set_natives_source_cache(FixedArray::cast(obj));
2378
2379   // Handling of script id generation is in FACTORY->NewScript.
2380   set_last_script_id(undefined_value());
2381
2382   // Initialize keyed lookup cache.
2383   isolate_->keyed_lookup_cache()->Clear();
2384
2385   // Initialize context slot cache.
2386   isolate_->context_slot_cache()->Clear();
2387
2388   // Initialize descriptor cache.
2389   isolate_->descriptor_lookup_cache()->Clear();
2390
2391   // Initialize compilation cache.
2392   isolate_->compilation_cache()->Clear();
2393
2394   return true;
2395 }
2396
2397
2398 Object* StringSplitCache::Lookup(
2399     FixedArray* cache, String* string, String* pattern) {
2400   if (!string->IsSymbol() || !pattern->IsSymbol()) return Smi::FromInt(0);
2401   uint32_t hash = string->Hash();
2402   uint32_t index = ((hash & (kStringSplitCacheSize - 1)) &
2403       ~(kArrayEntriesPerCacheEntry - 1));
2404   if (cache->get(index + kStringOffset) == string &&
2405       cache->get(index + kPatternOffset) == pattern) {
2406     return cache->get(index + kArrayOffset);
2407   }
2408   index = ((index + kArrayEntriesPerCacheEntry) & (kStringSplitCacheSize - 1));
2409   if (cache->get(index + kStringOffset) == string &&
2410       cache->get(index + kPatternOffset) == pattern) {
2411     return cache->get(index + kArrayOffset);
2412   }
2413   return Smi::FromInt(0);
2414 }
2415
2416
2417 void StringSplitCache::Enter(Heap* heap,
2418                              FixedArray* cache,
2419                              String* string,
2420                              String* pattern,
2421                              FixedArray* array) {
2422   if (!string->IsSymbol() || !pattern->IsSymbol()) return;
2423   uint32_t hash = string->Hash();
2424   uint32_t index = ((hash & (kStringSplitCacheSize - 1)) &
2425       ~(kArrayEntriesPerCacheEntry - 1));
2426   if (cache->get(index + kStringOffset) == Smi::FromInt(0)) {
2427     cache->set(index + kStringOffset, string);
2428     cache->set(index + kPatternOffset, pattern);
2429     cache->set(index + kArrayOffset, array);
2430   } else {
2431     uint32_t index2 =
2432         ((index + kArrayEntriesPerCacheEntry) & (kStringSplitCacheSize - 1));
2433     if (cache->get(index2 + kStringOffset) == Smi::FromInt(0)) {
2434       cache->set(index2 + kStringOffset, string);
2435       cache->set(index2 + kPatternOffset, pattern);
2436       cache->set(index2 + kArrayOffset, array);
2437     } else {
2438       cache->set(index2 + kStringOffset, Smi::FromInt(0));
2439       cache->set(index2 + kPatternOffset, Smi::FromInt(0));
2440       cache->set(index2 + kArrayOffset, Smi::FromInt(0));
2441       cache->set(index + kStringOffset, string);
2442       cache->set(index + kPatternOffset, pattern);
2443       cache->set(index + kArrayOffset, array);
2444     }
2445   }
2446   if (array->length() < 100) {  // Limit how many new symbols we want to make.
2447     for (int i = 0; i < array->length(); i++) {
2448       String* str = String::cast(array->get(i));
2449       Object* symbol;
2450       MaybeObject* maybe_symbol = heap->LookupSymbol(str);
2451       if (maybe_symbol->ToObject(&symbol)) {
2452         array->set(i, symbol);
2453       }
2454     }
2455   }
2456   array->set_map(heap->fixed_cow_array_map());
2457 }
2458
2459
2460 void StringSplitCache::Clear(FixedArray* cache) {
2461   for (int i = 0; i < kStringSplitCacheSize; i++) {
2462     cache->set(i, Smi::FromInt(0));
2463   }
2464 }
2465
2466
2467 MaybeObject* Heap::InitializeNumberStringCache() {
2468   // Compute the size of the number string cache based on the max heap size.
2469   // max_semispace_size_ == 512 KB => number_string_cache_size = 32.
2470   // max_semispace_size_ ==   8 MB => number_string_cache_size = 16KB.
2471   int number_string_cache_size = max_semispace_size_ / 512;
2472   number_string_cache_size = Max(32, Min(16*KB, number_string_cache_size));
2473   Object* obj;
2474   MaybeObject* maybe_obj =
2475       AllocateFixedArray(number_string_cache_size * 2, TENURED);
2476   if (maybe_obj->ToObject(&obj)) set_number_string_cache(FixedArray::cast(obj));
2477   return maybe_obj;
2478 }
2479
2480
2481 void Heap::FlushNumberStringCache() {
2482   // Flush the number to string cache.
2483   int len = number_string_cache()->length();
2484   for (int i = 0; i < len; i++) {
2485     number_string_cache()->set_undefined(this, i);
2486   }
2487 }
2488
2489
2490 static inline int double_get_hash(double d) {
2491   DoubleRepresentation rep(d);
2492   return static_cast<int>(rep.bits) ^ static_cast<int>(rep.bits >> 32);
2493 }
2494
2495
2496 static inline int smi_get_hash(Smi* smi) {
2497   return smi->value();
2498 }
2499
2500
2501 Object* Heap::GetNumberStringCache(Object* number) {
2502   int hash;
2503   int mask = (number_string_cache()->length() >> 1) - 1;
2504   if (number->IsSmi()) {
2505     hash = smi_get_hash(Smi::cast(number)) & mask;
2506   } else {
2507     hash = double_get_hash(number->Number()) & mask;
2508   }
2509   Object* key = number_string_cache()->get(hash * 2);
2510   if (key == number) {
2511     return String::cast(number_string_cache()->get(hash * 2 + 1));
2512   } else if (key->IsHeapNumber() &&
2513              number->IsHeapNumber() &&
2514              key->Number() == number->Number()) {
2515     return String::cast(number_string_cache()->get(hash * 2 + 1));
2516   }
2517   return undefined_value();
2518 }
2519
2520
2521 void Heap::SetNumberStringCache(Object* number, String* string) {
2522   int hash;
2523   int mask = (number_string_cache()->length() >> 1) - 1;
2524   if (number->IsSmi()) {
2525     hash = smi_get_hash(Smi::cast(number)) & mask;
2526     number_string_cache()->set(hash * 2, Smi::cast(number));
2527   } else {
2528     hash = double_get_hash(number->Number()) & mask;
2529     number_string_cache()->set(hash * 2, number);
2530   }
2531   number_string_cache()->set(hash * 2 + 1, string);
2532 }
2533
2534
2535 MaybeObject* Heap::NumberToString(Object* number,
2536                                   bool check_number_string_cache) {
2537   isolate_->counters()->number_to_string_runtime()->Increment();
2538   if (check_number_string_cache) {
2539     Object* cached = GetNumberStringCache(number);
2540     if (cached != undefined_value()) {
2541       return cached;
2542     }
2543   }
2544
2545   char arr[100];
2546   Vector<char> buffer(arr, ARRAY_SIZE(arr));
2547   const char* str;
2548   if (number->IsSmi()) {
2549     int num = Smi::cast(number)->value();
2550     str = IntToCString(num, buffer);
2551   } else {
2552     double num = HeapNumber::cast(number)->value();
2553     str = DoubleToCString(num, buffer);
2554   }
2555
2556   Object* js_string;
2557   MaybeObject* maybe_js_string = AllocateStringFromAscii(CStrVector(str));
2558   if (maybe_js_string->ToObject(&js_string)) {
2559     SetNumberStringCache(number, String::cast(js_string));
2560   }
2561   return maybe_js_string;
2562 }
2563
2564
2565 MaybeObject* Heap::Uint32ToString(uint32_t value,
2566                                   bool check_number_string_cache) {
2567   Object* number;
2568   MaybeObject* maybe = NumberFromUint32(value);
2569   if (!maybe->To<Object>(&number)) return maybe;
2570   return NumberToString(number, check_number_string_cache);
2571 }
2572
2573
2574 Map* Heap::MapForExternalArrayType(ExternalArrayType array_type) {
2575   return Map::cast(roots_[RootIndexForExternalArrayType(array_type)]);
2576 }
2577
2578
2579 Heap::RootListIndex Heap::RootIndexForExternalArrayType(
2580     ExternalArrayType array_type) {
2581   switch (array_type) {
2582     case kExternalByteArray:
2583       return kExternalByteArrayMapRootIndex;
2584     case kExternalUnsignedByteArray:
2585       return kExternalUnsignedByteArrayMapRootIndex;
2586     case kExternalShortArray:
2587       return kExternalShortArrayMapRootIndex;
2588     case kExternalUnsignedShortArray:
2589       return kExternalUnsignedShortArrayMapRootIndex;
2590     case kExternalIntArray:
2591       return kExternalIntArrayMapRootIndex;
2592     case kExternalUnsignedIntArray:
2593       return kExternalUnsignedIntArrayMapRootIndex;
2594     case kExternalFloatArray:
2595       return kExternalFloatArrayMapRootIndex;
2596     case kExternalDoubleArray:
2597       return kExternalDoubleArrayMapRootIndex;
2598     case kExternalPixelArray:
2599       return kExternalPixelArrayMapRootIndex;
2600     default:
2601       UNREACHABLE();
2602       return kUndefinedValueRootIndex;
2603   }
2604 }
2605
2606
2607 MaybeObject* Heap::NumberFromDouble(double value, PretenureFlag pretenure) {
2608   // We need to distinguish the minus zero value and this cannot be
2609   // done after conversion to int. Doing this by comparing bit
2610   // patterns is faster than using fpclassify() et al.
2611   static const DoubleRepresentation minus_zero(-0.0);
2612
2613   DoubleRepresentation rep(value);
2614   if (rep.bits == minus_zero.bits) {
2615     return AllocateHeapNumber(-0.0, pretenure);
2616   }
2617
2618   int int_value = FastD2I(value);
2619   if (value == int_value && Smi::IsValid(int_value)) {
2620     return Smi::FromInt(int_value);
2621   }
2622
2623   // Materialize the value in the heap.
2624   return AllocateHeapNumber(value, pretenure);
2625 }
2626
2627
2628 MaybeObject* Heap::AllocateForeign(Address address, PretenureFlag pretenure) {
2629   // Statically ensure that it is safe to allocate foreigns in paged spaces.
2630   STATIC_ASSERT(Foreign::kSize <= Page::kMaxHeapObjectSize);
2631   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
2632   Foreign* result;
2633   MaybeObject* maybe_result = Allocate(foreign_map(), space);
2634   if (!maybe_result->To(&result)) return maybe_result;
2635   result->set_foreign_address(address);
2636   return result;
2637 }
2638
2639
2640 MaybeObject* Heap::AllocateSharedFunctionInfo(Object* name) {
2641   SharedFunctionInfo* share;
2642   MaybeObject* maybe = Allocate(shared_function_info_map(), OLD_POINTER_SPACE);
2643   if (!maybe->To<SharedFunctionInfo>(&share)) return maybe;
2644
2645   // Set pointer fields.
2646   share->set_name(name);
2647   Code* illegal = isolate_->builtins()->builtin(Builtins::kIllegal);
2648   share->set_code(illegal);
2649   share->set_scope_info(SerializedScopeInfo::Empty());
2650   Code* construct_stub =
2651       isolate_->builtins()->builtin(Builtins::kJSConstructStubGeneric);
2652   share->set_construct_stub(construct_stub);
2653   share->set_instance_class_name(Object_symbol());
2654   share->set_function_data(undefined_value());
2655   share->set_script(undefined_value());
2656   share->set_debug_info(undefined_value());
2657   share->set_inferred_name(empty_string());
2658   share->set_initial_map(undefined_value());
2659   share->set_this_property_assignments(undefined_value());
2660   share->set_deopt_counter(Smi::FromInt(FLAG_deopt_every_n_times));
2661
2662   // Set integer fields (smi or int, depending on the architecture).
2663   share->set_length(0);
2664   share->set_formal_parameter_count(0);
2665   share->set_expected_nof_properties(0);
2666   share->set_num_literals(0);
2667   share->set_start_position_and_type(0);
2668   share->set_end_position(0);
2669   share->set_function_token_position(0);
2670   // All compiler hints default to false or 0.
2671   share->set_compiler_hints(0);
2672   share->set_this_property_assignments_count(0);
2673   share->set_opt_count(0);
2674
2675   return share;
2676 }
2677
2678
2679 MaybeObject* Heap::AllocateJSMessageObject(String* type,
2680                                            JSArray* arguments,
2681                                            int start_position,
2682                                            int end_position,
2683                                            Object* script,
2684                                            Object* stack_trace,
2685                                            Object* stack_frames) {
2686   Object* result;
2687   { MaybeObject* maybe_result = Allocate(message_object_map(), NEW_SPACE);
2688     if (!maybe_result->ToObject(&result)) return maybe_result;
2689   }
2690   JSMessageObject* message = JSMessageObject::cast(result);
2691   message->set_properties(Heap::empty_fixed_array());
2692   message->set_elements(Heap::empty_fixed_array());
2693   message->set_type(type);
2694   message->set_arguments(arguments);
2695   message->set_start_position(start_position);
2696   message->set_end_position(end_position);
2697   message->set_script(script);
2698   message->set_stack_trace(stack_trace);
2699   message->set_stack_frames(stack_frames);
2700   return result;
2701 }
2702
2703
2704
2705 // Returns true for a character in a range.  Both limits are inclusive.
2706 static inline bool Between(uint32_t character, uint32_t from, uint32_t to) {
2707   // This makes uses of the the unsigned wraparound.
2708   return character - from <= to - from;
2709 }
2710
2711
2712 MUST_USE_RESULT static inline MaybeObject* MakeOrFindTwoCharacterString(
2713     Heap* heap,
2714     uint32_t c1,
2715     uint32_t c2) {
2716   String* symbol;
2717   // Numeric strings have a different hash algorithm not known by
2718   // LookupTwoCharsSymbolIfExists, so we skip this step for such strings.
2719   if ((!Between(c1, '0', '9') || !Between(c2, '0', '9')) &&
2720       heap->symbol_table()->LookupTwoCharsSymbolIfExists(c1, c2, &symbol)) {
2721     return symbol;
2722   // Now we know the length is 2, we might as well make use of that fact
2723   // when building the new string.
2724   } else if ((c1 | c2) <= String::kMaxAsciiCharCodeU) {  // We can do this
2725     ASSERT(IsPowerOf2(String::kMaxAsciiCharCodeU + 1));  // because of this.
2726     Object* result;
2727     { MaybeObject* maybe_result = heap->AllocateRawAsciiString(2);
2728       if (!maybe_result->ToObject(&result)) return maybe_result;
2729     }
2730     char* dest = SeqAsciiString::cast(result)->GetChars();
2731     dest[0] = c1;
2732     dest[1] = c2;
2733     return result;
2734   } else {
2735     Object* result;
2736     { MaybeObject* maybe_result = heap->AllocateRawTwoByteString(2);
2737       if (!maybe_result->ToObject(&result)) return maybe_result;
2738     }
2739     uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2740     dest[0] = c1;
2741     dest[1] = c2;
2742     return result;
2743   }
2744 }
2745
2746
2747 MaybeObject* Heap::AllocateConsString(String* first, String* second) {
2748   int first_length = first->length();
2749   if (first_length == 0) {
2750     return second;
2751   }
2752
2753   int second_length = second->length();
2754   if (second_length == 0) {
2755     return first;
2756   }
2757
2758   int length = first_length + second_length;
2759
2760   // Optimization for 2-byte strings often used as keys in a decompression
2761   // dictionary.  Check whether we already have the string in the symbol
2762   // table to prevent creation of many unneccesary strings.
2763   if (length == 2) {
2764     unsigned c1 = first->Get(0);
2765     unsigned c2 = second->Get(0);
2766     return MakeOrFindTwoCharacterString(this, c1, c2);
2767   }
2768
2769   bool first_is_ascii = first->IsAsciiRepresentation();
2770   bool second_is_ascii = second->IsAsciiRepresentation();
2771   bool is_ascii = first_is_ascii && second_is_ascii;
2772
2773   // Make sure that an out of memory exception is thrown if the length
2774   // of the new cons string is too large.
2775   if (length > String::kMaxLength || length < 0) {
2776     isolate()->context()->mark_out_of_memory();
2777     return Failure::OutOfMemoryException();
2778   }
2779
2780   bool is_ascii_data_in_two_byte_string = false;
2781   if (!is_ascii) {
2782     // At least one of the strings uses two-byte representation so we
2783     // can't use the fast case code for short ascii strings below, but
2784     // we can try to save memory if all chars actually fit in ascii.
2785     is_ascii_data_in_two_byte_string =
2786         first->HasOnlyAsciiChars() && second->HasOnlyAsciiChars();
2787     if (is_ascii_data_in_two_byte_string) {
2788       isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2789     }
2790   }
2791
2792   // If the resulting string is small make a flat string.
2793   if (length < String::kMinNonFlatLength) {
2794     // Note that neither of the two inputs can be a slice because:
2795     STATIC_ASSERT(String::kMinNonFlatLength <= SlicedString::kMinLength);
2796     ASSERT(first->IsFlat());
2797     ASSERT(second->IsFlat());
2798     if (is_ascii) {
2799       Object* result;
2800       { MaybeObject* maybe_result = AllocateRawAsciiString(length);
2801         if (!maybe_result->ToObject(&result)) return maybe_result;
2802       }
2803       // Copy the characters into the new object.
2804       char* dest = SeqAsciiString::cast(result)->GetChars();
2805       // Copy first part.
2806       const char* src;
2807       if (first->IsExternalString()) {
2808         src = ExternalAsciiString::cast(first)->resource()->data();
2809       } else {
2810         src = SeqAsciiString::cast(first)->GetChars();
2811       }
2812       for (int i = 0; i < first_length; i++) *dest++ = src[i];
2813       // Copy second part.
2814       if (second->IsExternalString()) {
2815         src = ExternalAsciiString::cast(second)->resource()->data();
2816       } else {
2817         src = SeqAsciiString::cast(second)->GetChars();
2818       }
2819       for (int i = 0; i < second_length; i++) *dest++ = src[i];
2820       return result;
2821     } else {
2822       if (is_ascii_data_in_two_byte_string) {
2823         Object* result;
2824         { MaybeObject* maybe_result = AllocateRawAsciiString(length);
2825           if (!maybe_result->ToObject(&result)) return maybe_result;
2826         }
2827         // Copy the characters into the new object.
2828         char* dest = SeqAsciiString::cast(result)->GetChars();
2829         String::WriteToFlat(first, dest, 0, first_length);
2830         String::WriteToFlat(second, dest + first_length, 0, second_length);
2831         isolate_->counters()->string_add_runtime_ext_to_ascii()->Increment();
2832         return result;
2833       }
2834
2835       Object* result;
2836       { MaybeObject* maybe_result = AllocateRawTwoByteString(length);
2837         if (!maybe_result->ToObject(&result)) return maybe_result;
2838       }
2839       // Copy the characters into the new object.
2840       uc16* dest = SeqTwoByteString::cast(result)->GetChars();
2841       String::WriteToFlat(first, dest, 0, first_length);
2842       String::WriteToFlat(second, dest + first_length, 0, second_length);
2843       return result;
2844     }
2845   }
2846
2847   Map* map = (is_ascii || is_ascii_data_in_two_byte_string) ?
2848       cons_ascii_string_map() : cons_string_map();
2849
2850   Object* result;
2851   { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2852     if (!maybe_result->ToObject(&result)) return maybe_result;
2853   }
2854
2855   AssertNoAllocation no_gc;
2856   ConsString* cons_string = ConsString::cast(result);
2857   WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc);
2858   cons_string->set_length(length);
2859   cons_string->set_hash_field(String::kEmptyHashField);
2860   cons_string->set_first(first, mode);
2861   cons_string->set_second(second, mode);
2862   return result;
2863 }
2864
2865
2866 MaybeObject* Heap::AllocateSubString(String* buffer,
2867                                      int start,
2868                                      int end,
2869                                      PretenureFlag pretenure) {
2870   int length = end - start;
2871   if (length == 0) {
2872     return empty_string();
2873   } else if (length == 1) {
2874     return LookupSingleCharacterStringFromCode(buffer->Get(start));
2875   } else if (length == 2) {
2876     // Optimization for 2-byte strings often used as keys in a decompression
2877     // dictionary.  Check whether we already have the string in the symbol
2878     // table to prevent creation of many unneccesary strings.
2879     unsigned c1 = buffer->Get(start);
2880     unsigned c2 = buffer->Get(start + 1);
2881     return MakeOrFindTwoCharacterString(this, c1, c2);
2882   }
2883
2884   // Make an attempt to flatten the buffer to reduce access time.
2885   buffer = buffer->TryFlattenGetString();
2886
2887   if (!FLAG_string_slices ||
2888       !buffer->IsFlat() ||
2889       length < SlicedString::kMinLength ||
2890       pretenure == TENURED) {
2891     Object* result;
2892     // WriteToFlat takes care of the case when an indirect string has a
2893     // different encoding from its underlying string.  These encodings may
2894     // differ because of externalization.
2895     bool is_ascii = buffer->IsAsciiRepresentation();
2896     { MaybeObject* maybe_result = is_ascii
2897                                   ? AllocateRawAsciiString(length, pretenure)
2898                                   : AllocateRawTwoByteString(length, pretenure);
2899       if (!maybe_result->ToObject(&result)) return maybe_result;
2900     }
2901     String* string_result = String::cast(result);
2902     // Copy the characters into the new object.
2903     if (is_ascii) {
2904       ASSERT(string_result->IsAsciiRepresentation());
2905       char* dest = SeqAsciiString::cast(string_result)->GetChars();
2906       String::WriteToFlat(buffer, dest, start, end);
2907     } else {
2908       ASSERT(string_result->IsTwoByteRepresentation());
2909       uc16* dest = SeqTwoByteString::cast(string_result)->GetChars();
2910       String::WriteToFlat(buffer, dest, start, end);
2911     }
2912     return result;
2913   }
2914
2915   ASSERT(buffer->IsFlat());
2916 #if DEBUG
2917   if (FLAG_verify_heap) {
2918     buffer->StringVerify();
2919   }
2920 #endif
2921
2922   Object* result;
2923   // When slicing an indirect string we use its encoding for a newly created
2924   // slice and don't check the encoding of the underlying string.  This is safe
2925   // even if the encodings are different because of externalization.  If an
2926   // indirect ASCII string is pointing to a two-byte string, the two-byte char
2927   // codes of the underlying string must still fit into ASCII (because
2928   // externalization must not change char codes).
2929   { Map* map = buffer->IsAsciiRepresentation()
2930                  ? sliced_ascii_string_map()
2931                  : sliced_string_map();
2932     MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2933     if (!maybe_result->ToObject(&result)) return maybe_result;
2934   }
2935
2936   AssertNoAllocation no_gc;
2937   SlicedString* sliced_string = SlicedString::cast(result);
2938   sliced_string->set_length(length);
2939   sliced_string->set_hash_field(String::kEmptyHashField);
2940   if (buffer->IsConsString()) {
2941     ConsString* cons = ConsString::cast(buffer);
2942     ASSERT(cons->second()->length() == 0);
2943     sliced_string->set_parent(cons->first());
2944     sliced_string->set_offset(start);
2945   } else if (buffer->IsSlicedString()) {
2946     // Prevent nesting sliced strings.
2947     SlicedString* parent_slice = SlicedString::cast(buffer);
2948     sliced_string->set_parent(parent_slice->parent());
2949     sliced_string->set_offset(start + parent_slice->offset());
2950   } else {
2951     sliced_string->set_parent(buffer);
2952     sliced_string->set_offset(start);
2953   }
2954   ASSERT(sliced_string->parent()->IsSeqString() ||
2955          sliced_string->parent()->IsExternalString());
2956   return result;
2957 }
2958
2959
2960 MaybeObject* Heap::AllocateExternalStringFromAscii(
2961     const ExternalAsciiString::Resource* resource) {
2962   size_t length = resource->length();
2963   if (length > static_cast<size_t>(String::kMaxLength)) {
2964     isolate()->context()->mark_out_of_memory();
2965     return Failure::OutOfMemoryException();
2966   }
2967
2968   Map* map = external_ascii_string_map();
2969   Object* result;
2970   { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
2971     if (!maybe_result->ToObject(&result)) return maybe_result;
2972   }
2973
2974   ExternalAsciiString* external_string = ExternalAsciiString::cast(result);
2975   external_string->set_length(static_cast<int>(length));
2976   external_string->set_hash_field(String::kEmptyHashField);
2977   external_string->set_resource(resource);
2978
2979   return result;
2980 }
2981
2982
2983 MaybeObject* Heap::AllocateExternalStringFromTwoByte(
2984     const ExternalTwoByteString::Resource* resource) {
2985   size_t length = resource->length();
2986   if (length > static_cast<size_t>(String::kMaxLength)) {
2987     isolate()->context()->mark_out_of_memory();
2988     return Failure::OutOfMemoryException();
2989   }
2990
2991   // For small strings we check whether the resource contains only
2992   // ASCII characters.  If yes, we use a different string map.
2993   static const size_t kAsciiCheckLengthLimit = 32;
2994   bool is_ascii = length <= kAsciiCheckLengthLimit &&
2995       String::IsAscii(resource->data(), static_cast<int>(length));
2996   Map* map = is_ascii ?
2997       external_string_with_ascii_data_map() : external_string_map();
2998   Object* result;
2999   { MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3000     if (!maybe_result->ToObject(&result)) return maybe_result;
3001   }
3002
3003   ExternalTwoByteString* external_string = ExternalTwoByteString::cast(result);
3004   external_string->set_length(static_cast<int>(length));
3005   external_string->set_hash_field(String::kEmptyHashField);
3006   external_string->set_resource(resource);
3007
3008   return result;
3009 }
3010
3011
3012 MaybeObject* Heap::LookupSingleCharacterStringFromCode(uint16_t code) {
3013   if (code <= String::kMaxAsciiCharCode) {
3014     Object* value = single_character_string_cache()->get(code);
3015     if (value != undefined_value()) return value;
3016
3017     char buffer[1];
3018     buffer[0] = static_cast<char>(code);
3019     Object* result;
3020     MaybeObject* maybe_result = LookupSymbol(Vector<const char>(buffer, 1));
3021
3022     if (!maybe_result->ToObject(&result)) return maybe_result;
3023     single_character_string_cache()->set(code, result);
3024     return result;
3025   }
3026
3027   Object* result;
3028   { MaybeObject* maybe_result = AllocateRawTwoByteString(1);
3029     if (!maybe_result->ToObject(&result)) return maybe_result;
3030   }
3031   String* answer = String::cast(result);
3032   answer->Set(0, code);
3033   return answer;
3034 }
3035
3036
3037 MaybeObject* Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
3038   if (length < 0 || length > ByteArray::kMaxLength) {
3039     return Failure::OutOfMemoryException();
3040   }
3041   if (pretenure == NOT_TENURED) {
3042     return AllocateByteArray(length);
3043   }
3044   int size = ByteArray::SizeFor(length);
3045   Object* result;
3046   { MaybeObject* maybe_result = (size <= MaxObjectSizeInPagedSpace())
3047                    ? old_data_space_->AllocateRaw(size)
3048                    : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
3049     if (!maybe_result->ToObject(&result)) return maybe_result;
3050   }
3051
3052   reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
3053   reinterpret_cast<ByteArray*>(result)->set_length(length);
3054   return result;
3055 }
3056
3057
3058 MaybeObject* Heap::AllocateByteArray(int length) {
3059   if (length < 0 || length > ByteArray::kMaxLength) {
3060     return Failure::OutOfMemoryException();
3061   }
3062   int size = ByteArray::SizeFor(length);
3063   AllocationSpace space =
3064       (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : NEW_SPACE;
3065   Object* result;
3066   { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
3067     if (!maybe_result->ToObject(&result)) return maybe_result;
3068   }
3069
3070   reinterpret_cast<ByteArray*>(result)->set_map(byte_array_map());
3071   reinterpret_cast<ByteArray*>(result)->set_length(length);
3072   return result;
3073 }
3074
3075
3076 void Heap::CreateFillerObjectAt(Address addr, int size) {
3077   if (size == 0) return;
3078   HeapObject* filler = HeapObject::FromAddress(addr);
3079   if (size == kPointerSize) {
3080     filler->set_map(one_pointer_filler_map());
3081   } else if (size == 2 * kPointerSize) {
3082     filler->set_map(two_pointer_filler_map());
3083   } else {
3084     filler->set_map(free_space_map());
3085     FreeSpace::cast(filler)->set_size(size);
3086   }
3087 }
3088
3089
3090 MaybeObject* Heap::AllocateExternalArray(int length,
3091                                          ExternalArrayType array_type,
3092                                          void* external_pointer,
3093                                          PretenureFlag pretenure) {
3094   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
3095   Object* result;
3096   { MaybeObject* maybe_result = AllocateRaw(ExternalArray::kAlignedSize,
3097                                             space,
3098                                             OLD_DATA_SPACE);
3099     if (!maybe_result->ToObject(&result)) return maybe_result;
3100   }
3101
3102   reinterpret_cast<ExternalArray*>(result)->set_map(
3103       MapForExternalArrayType(array_type));
3104   reinterpret_cast<ExternalArray*>(result)->set_length(length);
3105   reinterpret_cast<ExternalArray*>(result)->set_external_pointer(
3106       external_pointer);
3107
3108   return result;
3109 }
3110
3111
3112 MaybeObject* Heap::CreateCode(const CodeDesc& desc,
3113                               Code::Flags flags,
3114                               Handle<Object> self_reference,
3115                               bool immovable) {
3116   // Allocate ByteArray before the Code object, so that we do not risk
3117   // leaving uninitialized Code object (and breaking the heap).
3118   Object* reloc_info;
3119   { MaybeObject* maybe_reloc_info = AllocateByteArray(desc.reloc_size, TENURED);
3120     if (!maybe_reloc_info->ToObject(&reloc_info)) return maybe_reloc_info;
3121   }
3122
3123   // Compute size.
3124   int body_size = RoundUp(desc.instr_size, kObjectAlignment);
3125   int obj_size = Code::SizeFor(body_size);
3126   ASSERT(IsAligned(static_cast<intptr_t>(obj_size), kCodeAlignment));
3127   MaybeObject* maybe_result;
3128   // Large code objects and code objects which should stay at a fixed address
3129   // are allocated in large object space.
3130   if (obj_size > MaxObjectSizeInPagedSpace() || immovable) {
3131     maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3132   } else {
3133     maybe_result = code_space_->AllocateRaw(obj_size);
3134   }
3135
3136   Object* result;
3137   if (!maybe_result->ToObject(&result)) return maybe_result;
3138
3139   // Initialize the object
3140   HeapObject::cast(result)->set_map(code_map());
3141   Code* code = Code::cast(result);
3142   ASSERT(!isolate_->code_range()->exists() ||
3143       isolate_->code_range()->contains(code->address()));
3144   code->set_instruction_size(desc.instr_size);
3145   code->set_relocation_info(ByteArray::cast(reloc_info));
3146   code->set_flags(flags);
3147   if (code->is_call_stub() || code->is_keyed_call_stub()) {
3148     code->set_check_type(RECEIVER_MAP_CHECK);
3149   }
3150   code->set_deoptimization_data(empty_fixed_array());
3151   code->set_next_code_flushing_candidate(undefined_value());
3152   // Allow self references to created code object by patching the handle to
3153   // point to the newly allocated Code object.
3154   if (!self_reference.is_null()) {
3155     *(self_reference.location()) = code;
3156   }
3157   // Migrate generated code.
3158   // The generated code can contain Object** values (typically from handles)
3159   // that are dereferenced during the copy to point directly to the actual heap
3160   // objects. These pointers can include references to the code object itself,
3161   // through the self_reference parameter.
3162   code->CopyFrom(desc);
3163
3164 #ifdef DEBUG
3165   if (FLAG_verify_heap) {
3166     code->Verify();
3167   }
3168 #endif
3169   return code;
3170 }
3171
3172
3173 MaybeObject* Heap::CopyCode(Code* code) {
3174   // Allocate an object the same size as the code object.
3175   int obj_size = code->Size();
3176   MaybeObject* maybe_result;
3177   if (obj_size > MaxObjectSizeInPagedSpace()) {
3178     maybe_result = lo_space_->AllocateRaw(obj_size, EXECUTABLE);
3179   } else {
3180     maybe_result = code_space_->AllocateRaw(obj_size);
3181   }
3182
3183   Object* result;
3184   if (!maybe_result->ToObject(&result)) return maybe_result;
3185
3186   // Copy code object.
3187   Address old_addr = code->address();
3188   Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
3189   CopyBlock(new_addr, old_addr, obj_size);
3190   // Relocate the copy.
3191   Code* new_code = Code::cast(result);
3192   ASSERT(!isolate_->code_range()->exists() ||
3193       isolate_->code_range()->contains(code->address()));
3194   new_code->Relocate(new_addr - old_addr);
3195   return new_code;
3196 }
3197
3198
3199 MaybeObject* Heap::CopyCode(Code* code, Vector<byte> reloc_info) {
3200   // Allocate ByteArray before the Code object, so that we do not risk
3201   // leaving uninitialized Code object (and breaking the heap).
3202   Object* reloc_info_array;
3203   { MaybeObject* maybe_reloc_info_array =
3204         AllocateByteArray(reloc_info.length(), TENURED);
3205     if (!maybe_reloc_info_array->ToObject(&reloc_info_array)) {
3206       return maybe_reloc_info_array;
3207     }
3208   }
3209
3210   int new_body_size = RoundUp(code->instruction_size(), kObjectAlignment);
3211
3212   int new_obj_size = Code::SizeFor(new_body_size);
3213
3214   Address old_addr = code->address();
3215
3216   size_t relocation_offset =
3217       static_cast<size_t>(code->instruction_end() - old_addr);
3218
3219   MaybeObject* maybe_result;
3220   if (new_obj_size > MaxObjectSizeInPagedSpace()) {
3221     maybe_result = lo_space_->AllocateRaw(new_obj_size, EXECUTABLE);
3222   } else {
3223     maybe_result = code_space_->AllocateRaw(new_obj_size);
3224   }
3225
3226   Object* result;
3227   if (!maybe_result->ToObject(&result)) return maybe_result;
3228
3229   // Copy code object.
3230   Address new_addr = reinterpret_cast<HeapObject*>(result)->address();
3231
3232   // Copy header and instructions.
3233   memcpy(new_addr, old_addr, relocation_offset);
3234
3235   Code* new_code = Code::cast(result);
3236   new_code->set_relocation_info(ByteArray::cast(reloc_info_array));
3237
3238   // Copy patched rinfo.
3239   memcpy(new_code->relocation_start(), reloc_info.start(), reloc_info.length());
3240
3241   // Relocate the copy.
3242   ASSERT(!isolate_->code_range()->exists() ||
3243       isolate_->code_range()->contains(code->address()));
3244   new_code->Relocate(new_addr - old_addr);
3245
3246 #ifdef DEBUG
3247   if (FLAG_verify_heap) {
3248     code->Verify();
3249   }
3250 #endif
3251   return new_code;
3252 }
3253
3254
3255 MaybeObject* Heap::Allocate(Map* map, AllocationSpace space) {
3256   ASSERT(gc_state_ == NOT_IN_GC);
3257   ASSERT(map->instance_type() != MAP_TYPE);
3258   // If allocation failures are disallowed, we may allocate in a different
3259   // space when new space is full and the object is not a large object.
3260   AllocationSpace retry_space =
3261       (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
3262   Object* result;
3263   { MaybeObject* maybe_result =
3264         AllocateRaw(map->instance_size(), space, retry_space);
3265     if (!maybe_result->ToObject(&result)) return maybe_result;
3266   }
3267   HeapObject::cast(result)->set_map(map);
3268   return result;
3269 }
3270
3271
3272 void Heap::InitializeFunction(JSFunction* function,
3273                               SharedFunctionInfo* shared,
3274                               Object* prototype) {
3275   ASSERT(!prototype->IsMap());
3276   function->initialize_properties();
3277   function->initialize_elements();
3278   function->set_shared(shared);
3279   function->set_code(shared->code());
3280   function->set_prototype_or_initial_map(prototype);
3281   function->set_context(undefined_value());
3282   function->set_literals_or_bindings(empty_fixed_array());
3283   function->set_next_function_link(undefined_value());
3284 }
3285
3286
3287 MaybeObject* Heap::AllocateFunctionPrototype(JSFunction* function) {
3288   // Allocate the prototype.  Make sure to use the object function
3289   // from the function's context, since the function can be from a
3290   // different context.
3291   JSFunction* object_function =
3292       function->context()->global_context()->object_function();
3293
3294   // Each function prototype gets a copy of the object function map.
3295   // This avoid unwanted sharing of maps between prototypes of different
3296   // constructors.
3297   Map* new_map;
3298   ASSERT(object_function->has_initial_map());
3299   { MaybeObject* maybe_map =
3300         object_function->initial_map()->CopyDropTransitions();
3301     if (!maybe_map->To<Map>(&new_map)) return maybe_map;
3302   }
3303   Object* prototype;
3304   { MaybeObject* maybe_prototype = AllocateJSObjectFromMap(new_map);
3305     if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
3306   }
3307   // When creating the prototype for the function we must set its
3308   // constructor to the function.
3309   Object* result;
3310   { MaybeObject* maybe_result =
3311         JSObject::cast(prototype)->SetLocalPropertyIgnoreAttributes(
3312             constructor_symbol(), function, DONT_ENUM);
3313     if (!maybe_result->ToObject(&result)) return maybe_result;
3314   }
3315   return prototype;
3316 }
3317
3318
3319 MaybeObject* Heap::AllocateFunction(Map* function_map,
3320                                     SharedFunctionInfo* shared,
3321                                     Object* prototype,
3322                                     PretenureFlag pretenure) {
3323   AllocationSpace space =
3324       (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
3325   Object* result;
3326   { MaybeObject* maybe_result = Allocate(function_map, space);
3327     if (!maybe_result->ToObject(&result)) return maybe_result;
3328   }
3329   InitializeFunction(JSFunction::cast(result), shared, prototype);
3330   return result;
3331 }
3332
3333
3334 MaybeObject* Heap::AllocateArgumentsObject(Object* callee, int length) {
3335   // To get fast allocation and map sharing for arguments objects we
3336   // allocate them based on an arguments boilerplate.
3337
3338   JSObject* boilerplate;
3339   int arguments_object_size;
3340   bool strict_mode_callee = callee->IsJSFunction() &&
3341                             JSFunction::cast(callee)->shared()->strict_mode();
3342   if (strict_mode_callee) {
3343     boilerplate =
3344         isolate()->context()->global_context()->
3345             strict_mode_arguments_boilerplate();
3346     arguments_object_size = kArgumentsObjectSizeStrict;
3347   } else {
3348     boilerplate =
3349         isolate()->context()->global_context()->arguments_boilerplate();
3350     arguments_object_size = kArgumentsObjectSize;
3351   }
3352
3353   // This calls Copy directly rather than using Heap::AllocateRaw so we
3354   // duplicate the check here.
3355   ASSERT(allocation_allowed_ && gc_state_ == NOT_IN_GC);
3356
3357   // Check that the size of the boilerplate matches our
3358   // expectations. The ArgumentsAccessStub::GenerateNewObject relies
3359   // on the size being a known constant.
3360   ASSERT(arguments_object_size == boilerplate->map()->instance_size());
3361
3362   // Do the allocation.
3363   Object* result;
3364   { MaybeObject* maybe_result =
3365         AllocateRaw(arguments_object_size, NEW_SPACE, OLD_POINTER_SPACE);
3366     if (!maybe_result->ToObject(&result)) return maybe_result;
3367   }
3368
3369   // Copy the content. The arguments boilerplate doesn't have any
3370   // fields that point to new space so it's safe to skip the write
3371   // barrier here.
3372   CopyBlock(HeapObject::cast(result)->address(),
3373             boilerplate->address(),
3374             JSObject::kHeaderSize);
3375
3376   // Set the length property.
3377   JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsLengthIndex,
3378                                                 Smi::FromInt(length),
3379                                                 SKIP_WRITE_BARRIER);
3380   // Set the callee property for non-strict mode arguments object only.
3381   if (!strict_mode_callee) {
3382     JSObject::cast(result)->InObjectPropertyAtPut(kArgumentsCalleeIndex,
3383                                                   callee);
3384   }
3385
3386   // Check the state of the object
3387   ASSERT(JSObject::cast(result)->HasFastProperties());
3388   ASSERT(JSObject::cast(result)->HasFastElements());
3389
3390   return result;
3391 }
3392
3393
3394 static bool HasDuplicates(DescriptorArray* descriptors) {
3395   int count = descriptors->number_of_descriptors();
3396   if (count > 1) {
3397     String* prev_key = descriptors->GetKey(0);
3398     for (int i = 1; i != count; i++) {
3399       String* current_key = descriptors->GetKey(i);
3400       if (prev_key == current_key) return true;
3401       prev_key = current_key;
3402     }
3403   }
3404   return false;
3405 }
3406
3407
3408 MaybeObject* Heap::AllocateInitialMap(JSFunction* fun) {
3409   ASSERT(!fun->has_initial_map());
3410
3411   // First create a new map with the size and number of in-object properties
3412   // suggested by the function.
3413   int instance_size = fun->shared()->CalculateInstanceSize();
3414   int in_object_properties = fun->shared()->CalculateInObjectProperties();
3415   Object* map_obj;
3416   { MaybeObject* maybe_map_obj = AllocateMap(JS_OBJECT_TYPE, instance_size);
3417     if (!maybe_map_obj->ToObject(&map_obj)) return maybe_map_obj;
3418   }
3419
3420   // Fetch or allocate prototype.
3421   Object* prototype;
3422   if (fun->has_instance_prototype()) {
3423     prototype = fun->instance_prototype();
3424   } else {
3425     { MaybeObject* maybe_prototype = AllocateFunctionPrototype(fun);
3426       if (!maybe_prototype->ToObject(&prototype)) return maybe_prototype;
3427     }
3428   }
3429   Map* map = Map::cast(map_obj);
3430   map->set_inobject_properties(in_object_properties);
3431   map->set_unused_property_fields(in_object_properties);
3432   map->set_prototype(prototype);
3433   ASSERT(map->has_fast_elements());
3434
3435   // If the function has only simple this property assignments add
3436   // field descriptors for these to the initial map as the object
3437   // cannot be constructed without having these properties.  Guard by
3438   // the inline_new flag so we only change the map if we generate a
3439   // specialized construct stub.
3440   ASSERT(in_object_properties <= Map::kMaxPreAllocatedPropertyFields);
3441   if (fun->shared()->CanGenerateInlineConstructor(prototype)) {
3442     int count = fun->shared()->this_property_assignments_count();
3443     if (count > in_object_properties) {
3444       // Inline constructor can only handle inobject properties.
3445       fun->shared()->ForbidInlineConstructor();
3446     } else {
3447       DescriptorArray* descriptors;
3448       { MaybeObject* maybe_descriptors_obj = DescriptorArray::Allocate(count);
3449         if (!maybe_descriptors_obj->To<DescriptorArray>(&descriptors)) {
3450           return maybe_descriptors_obj;
3451         }
3452       }
3453       DescriptorArray::WhitenessWitness witness(descriptors);
3454       for (int i = 0; i < count; i++) {
3455         String* name = fun->shared()->GetThisPropertyAssignmentName(i);
3456         ASSERT(name->IsSymbol());
3457         FieldDescriptor field(name, i, NONE);
3458         field.SetEnumerationIndex(i);
3459         descriptors->Set(i, &field, witness);
3460       }
3461       descriptors->SetNextEnumerationIndex(count);
3462       descriptors->SortUnchecked(witness);
3463
3464       // The descriptors may contain duplicates because the compiler does not
3465       // guarantee the uniqueness of property names (it would have required
3466       // quadratic time). Once the descriptors are sorted we can check for
3467       // duplicates in linear time.
3468       if (HasDuplicates(descriptors)) {
3469         fun->shared()->ForbidInlineConstructor();
3470       } else {
3471         map->set_instance_descriptors(descriptors);
3472         map->set_pre_allocated_property_fields(count);
3473         map->set_unused_property_fields(in_object_properties - count);
3474       }
3475     }
3476   }
3477
3478   fun->shared()->StartInobjectSlackTracking(map);
3479
3480   return map;
3481 }
3482
3483
3484 void Heap::InitializeJSObjectFromMap(JSObject* obj,
3485                                      FixedArray* properties,
3486                                      Map* map) {
3487   obj->set_properties(properties);
3488   obj->initialize_elements();
3489   // TODO(1240798): Initialize the object's body using valid initial values
3490   // according to the object's initial map.  For example, if the map's
3491   // instance type is JS_ARRAY_TYPE, the length field should be initialized
3492   // to a number (eg, Smi::FromInt(0)) and the elements initialized to a
3493   // fixed array (eg, Heap::empty_fixed_array()).  Currently, the object
3494   // verification code has to cope with (temporarily) invalid objects.  See
3495   // for example, JSArray::JSArrayVerify).
3496   Object* filler;
3497   // We cannot always fill with one_pointer_filler_map because objects
3498   // created from API functions expect their internal fields to be initialized
3499   // with undefined_value.
3500   // Pre-allocated fields need to be initialized with undefined_value as well
3501   // so that object accesses before the constructor completes (e.g. in the
3502   // debugger) will not cause a crash.
3503   if (map->constructor()->IsJSFunction() &&
3504       JSFunction::cast(map->constructor())->shared()->
3505           IsInobjectSlackTrackingInProgress()) {
3506     // We might want to shrink the object later.
3507     ASSERT(obj->GetInternalFieldCount() == 0);
3508     filler = Heap::one_pointer_filler_map();
3509   } else {
3510     filler = Heap::undefined_value();
3511   }
3512   obj->InitializeBody(map, Heap::undefined_value(), filler);
3513 }
3514
3515
3516 MaybeObject* Heap::AllocateJSObjectFromMap(Map* map, PretenureFlag pretenure) {
3517   // JSFunctions should be allocated using AllocateFunction to be
3518   // properly initialized.
3519   ASSERT(map->instance_type() != JS_FUNCTION_TYPE);
3520
3521   // Both types of global objects should be allocated using
3522   // AllocateGlobalObject to be properly initialized.
3523   ASSERT(map->instance_type() != JS_GLOBAL_OBJECT_TYPE);
3524   ASSERT(map->instance_type() != JS_BUILTINS_OBJECT_TYPE);
3525
3526   // Allocate the backing storage for the properties.
3527   int prop_size =
3528       map->pre_allocated_property_fields() +
3529       map->unused_property_fields() -
3530       map->inobject_properties();
3531   ASSERT(prop_size >= 0);
3532   Object* properties;
3533   { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, pretenure);
3534     if (!maybe_properties->ToObject(&properties)) return maybe_properties;
3535   }
3536
3537   // Allocate the JSObject.
3538   AllocationSpace space =
3539       (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
3540   if (map->instance_size() > MaxObjectSizeInPagedSpace()) space = LO_SPACE;
3541   Object* obj;
3542   { MaybeObject* maybe_obj = Allocate(map, space);
3543     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3544   }
3545
3546   // Initialize the JSObject.
3547   InitializeJSObjectFromMap(JSObject::cast(obj),
3548                             FixedArray::cast(properties),
3549                             map);
3550   ASSERT(JSObject::cast(obj)->HasFastSmiOnlyElements() ||
3551          JSObject::cast(obj)->HasFastElements());
3552   return obj;
3553 }
3554
3555
3556 MaybeObject* Heap::AllocateJSObject(JSFunction* constructor,
3557                                     PretenureFlag pretenure) {
3558   // Allocate the initial map if absent.
3559   if (!constructor->has_initial_map()) {
3560     Object* initial_map;
3561     { MaybeObject* maybe_initial_map = AllocateInitialMap(constructor);
3562       if (!maybe_initial_map->ToObject(&initial_map)) return maybe_initial_map;
3563     }
3564     constructor->set_initial_map(Map::cast(initial_map));
3565     Map::cast(initial_map)->set_constructor(constructor);
3566   }
3567   // Allocate the object based on the constructors initial map.
3568   MaybeObject* result =
3569       AllocateJSObjectFromMap(constructor->initial_map(), pretenure);
3570 #ifdef DEBUG
3571   // Make sure result is NOT a global object if valid.
3572   Object* non_failure;
3573   ASSERT(!result->ToObject(&non_failure) || !non_failure->IsGlobalObject());
3574 #endif
3575   return result;
3576 }
3577
3578
3579 MaybeObject* Heap::AllocateJSProxy(Object* handler, Object* prototype) {
3580   // Allocate map.
3581   // TODO(rossberg): Once we optimize proxies, think about a scheme to share
3582   // maps. Will probably depend on the identity of the handler object, too.
3583   Map* map;
3584   MaybeObject* maybe_map_obj = AllocateMap(JS_PROXY_TYPE, JSProxy::kSize);
3585   if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
3586   map->set_prototype(prototype);
3587
3588   // Allocate the proxy object.
3589   JSProxy* result;
3590   MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3591   if (!maybe_result->To<JSProxy>(&result)) return maybe_result;
3592   result->InitializeBody(map->instance_size(), Smi::FromInt(0));
3593   result->set_handler(handler);
3594   result->set_hash(undefined_value());
3595   return result;
3596 }
3597
3598
3599 MaybeObject* Heap::AllocateJSFunctionProxy(Object* handler,
3600                                            Object* call_trap,
3601                                            Object* construct_trap,
3602                                            Object* prototype) {
3603   // Allocate map.
3604   // TODO(rossberg): Once we optimize proxies, think about a scheme to share
3605   // maps. Will probably depend on the identity of the handler object, too.
3606   Map* map;
3607   MaybeObject* maybe_map_obj =
3608       AllocateMap(JS_FUNCTION_PROXY_TYPE, JSFunctionProxy::kSize);
3609   if (!maybe_map_obj->To<Map>(&map)) return maybe_map_obj;
3610   map->set_prototype(prototype);
3611
3612   // Allocate the proxy object.
3613   JSFunctionProxy* result;
3614   MaybeObject* maybe_result = Allocate(map, NEW_SPACE);
3615   if (!maybe_result->To<JSFunctionProxy>(&result)) return maybe_result;
3616   result->InitializeBody(map->instance_size(), Smi::FromInt(0));
3617   result->set_handler(handler);
3618   result->set_hash(undefined_value());
3619   result->set_call_trap(call_trap);
3620   result->set_construct_trap(construct_trap);
3621   return result;
3622 }
3623
3624
3625 MaybeObject* Heap::AllocateGlobalObject(JSFunction* constructor) {
3626   ASSERT(constructor->has_initial_map());
3627   Map* map = constructor->initial_map();
3628
3629   // Make sure no field properties are described in the initial map.
3630   // This guarantees us that normalizing the properties does not
3631   // require us to change property values to JSGlobalPropertyCells.
3632   ASSERT(map->NextFreePropertyIndex() == 0);
3633
3634   // Make sure we don't have a ton of pre-allocated slots in the
3635   // global objects. They will be unused once we normalize the object.
3636   ASSERT(map->unused_property_fields() == 0);
3637   ASSERT(map->inobject_properties() == 0);
3638
3639   // Initial size of the backing store to avoid resize of the storage during
3640   // bootstrapping. The size differs between the JS global object ad the
3641   // builtins object.
3642   int initial_size = map->instance_type() == JS_GLOBAL_OBJECT_TYPE ? 64 : 512;
3643
3644   // Allocate a dictionary object for backing storage.
3645   Object* obj;
3646   { MaybeObject* maybe_obj =
3647         StringDictionary::Allocate(
3648             map->NumberOfDescribedProperties() * 2 + initial_size);
3649     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3650   }
3651   StringDictionary* dictionary = StringDictionary::cast(obj);
3652
3653   // The global object might be created from an object template with accessors.
3654   // Fill these accessors into the dictionary.
3655   DescriptorArray* descs = map->instance_descriptors();
3656   for (int i = 0; i < descs->number_of_descriptors(); i++) {
3657     PropertyDetails details(descs->GetDetails(i));
3658     ASSERT(details.type() == CALLBACKS);  // Only accessors are expected.
3659     PropertyDetails d =
3660         PropertyDetails(details.attributes(), CALLBACKS, details.index());
3661     Object* value = descs->GetCallbacksObject(i);
3662     { MaybeObject* maybe_value = AllocateJSGlobalPropertyCell(value);
3663       if (!maybe_value->ToObject(&value)) return maybe_value;
3664     }
3665
3666     Object* result;
3667     { MaybeObject* maybe_result = dictionary->Add(descs->GetKey(i), value, d);
3668       if (!maybe_result->ToObject(&result)) return maybe_result;
3669     }
3670     dictionary = StringDictionary::cast(result);
3671   }
3672
3673   // Allocate the global object and initialize it with the backing store.
3674   { MaybeObject* maybe_obj = Allocate(map, OLD_POINTER_SPACE);
3675     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3676   }
3677   JSObject* global = JSObject::cast(obj);
3678   InitializeJSObjectFromMap(global, dictionary, map);
3679
3680   // Create a new map for the global object.
3681   { MaybeObject* maybe_obj = map->CopyDropDescriptors();
3682     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
3683   }
3684   Map* new_map = Map::cast(obj);
3685
3686   // Setup the global object as a normalized object.
3687   global->set_map(new_map);
3688   global->map()->clear_instance_descriptors();
3689   global->set_properties(dictionary);
3690
3691   // Make sure result is a global object with properties in dictionary.
3692   ASSERT(global->IsGlobalObject());
3693   ASSERT(!global->HasFastProperties());
3694   return global;
3695 }
3696
3697
3698 MaybeObject* Heap::CopyJSObject(JSObject* source) {
3699   // Never used to copy functions.  If functions need to be copied we
3700   // have to be careful to clear the literals array.
3701   SLOW_ASSERT(!source->IsJSFunction());
3702
3703   // Make the clone.
3704   Map* map = source->map();
3705   int object_size = map->instance_size();
3706   Object* clone;
3707
3708   WriteBarrierMode wb_mode = UPDATE_WRITE_BARRIER;
3709
3710   // If we're forced to always allocate, we use the general allocation
3711   // functions which may leave us with an object in old space.
3712   if (always_allocate()) {
3713     { MaybeObject* maybe_clone =
3714           AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
3715       if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3716     }
3717     Address clone_address = HeapObject::cast(clone)->address();
3718     CopyBlock(clone_address,
3719               source->address(),
3720               object_size);
3721     // Update write barrier for all fields that lie beyond the header.
3722     RecordWrites(clone_address,
3723                  JSObject::kHeaderSize,
3724                  (object_size - JSObject::kHeaderSize) / kPointerSize);
3725   } else {
3726     wb_mode = SKIP_WRITE_BARRIER;
3727     { MaybeObject* maybe_clone = new_space_.AllocateRaw(object_size);
3728       if (!maybe_clone->ToObject(&clone)) return maybe_clone;
3729     }
3730     SLOW_ASSERT(InNewSpace(clone));
3731     // Since we know the clone is allocated in new space, we can copy
3732     // the contents without worrying about updating the write barrier.
3733     CopyBlock(HeapObject::cast(clone)->address(),
3734               source->address(),
3735               object_size);
3736   }
3737
3738   SLOW_ASSERT(
3739       JSObject::cast(clone)->GetElementsKind() == source->GetElementsKind());
3740   FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
3741   FixedArray* properties = FixedArray::cast(source->properties());
3742   // Update elements if necessary.
3743   if (elements->length() > 0) {
3744     Object* elem;
3745     { MaybeObject* maybe_elem;
3746       if (elements->map() == fixed_cow_array_map()) {
3747         maybe_elem = FixedArray::cast(elements);
3748       } else if (source->HasFastDoubleElements()) {
3749         maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
3750       } else {
3751         maybe_elem = CopyFixedArray(FixedArray::cast(elements));
3752       }
3753       if (!maybe_elem->ToObject(&elem)) return maybe_elem;
3754     }
3755     JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem), wb_mode);
3756   }
3757   // Update properties if necessary.
3758   if (properties->length() > 0) {
3759     Object* prop;
3760     { MaybeObject* maybe_prop = CopyFixedArray(properties);
3761       if (!maybe_prop->ToObject(&prop)) return maybe_prop;
3762     }
3763     JSObject::cast(clone)->set_properties(FixedArray::cast(prop), wb_mode);
3764   }
3765   // Return the new clone.
3766   return clone;
3767 }
3768
3769
3770 MaybeObject* Heap::ReinitializeJSReceiver(
3771     JSReceiver* object, InstanceType type, int size) {
3772   ASSERT(type >= FIRST_JS_OBJECT_TYPE);
3773
3774   // Allocate fresh map.
3775   // TODO(rossberg): Once we optimize proxies, cache these maps.
3776   Map* map;
3777   MaybeObject* maybe = AllocateMap(type, size);
3778   if (!maybe->To<Map>(&map)) return maybe;
3779
3780   // Check that the receiver has at least the size of the fresh object.
3781   int size_difference = object->map()->instance_size() - map->instance_size();
3782   ASSERT(size_difference >= 0);
3783
3784   map->set_prototype(object->map()->prototype());
3785
3786   // Allocate the backing storage for the properties.
3787   int prop_size = map->unused_property_fields() - map->inobject_properties();
3788   Object* properties;
3789   maybe = AllocateFixedArray(prop_size, TENURED);
3790   if (!maybe->ToObject(&properties)) return maybe;
3791
3792   // Functions require some allocation, which might fail here.
3793   SharedFunctionInfo* shared = NULL;
3794   if (type == JS_FUNCTION_TYPE) {
3795     String* name;
3796     maybe = LookupAsciiSymbol("<freezing call trap>");
3797     if (!maybe->To<String>(&name)) return maybe;
3798     maybe = AllocateSharedFunctionInfo(name);
3799     if (!maybe->To<SharedFunctionInfo>(&shared)) return maybe;
3800   }
3801
3802   // Because of possible retries of this function after failure,
3803   // we must NOT fail after this point, where we have changed the type!
3804
3805   // Reset the map for the object.
3806   object->set_map(map);
3807   JSObject* jsobj = JSObject::cast(object);
3808
3809   // Reinitialize the object from the constructor map.
3810   InitializeJSObjectFromMap(jsobj, FixedArray::cast(properties), map);
3811
3812   // Functions require some minimal initialization.
3813   if (type == JS_FUNCTION_TYPE) {
3814     map->set_function_with_prototype(true);
3815     InitializeFunction(JSFunction::cast(object), shared, the_hole_value());
3816     JSFunction::cast(object)->set_context(
3817         isolate()->context()->global_context());
3818   }
3819
3820   // Put in filler if the new object is smaller than the old.
3821   if (size_difference > 0) {
3822     CreateFillerObjectAt(
3823         object->address() + map->instance_size(), size_difference);
3824   }
3825
3826   return object;
3827 }
3828
3829
3830 MaybeObject* Heap::ReinitializeJSGlobalProxy(JSFunction* constructor,
3831                                              JSGlobalProxy* object) {
3832   ASSERT(constructor->has_initial_map());
3833   Map* map = constructor->initial_map();
3834
3835   // Check that the already allocated object has the same size and type as
3836   // objects allocated using the constructor.
3837   ASSERT(map->instance_size() == object->map()->instance_size());
3838   ASSERT(map->instance_type() == object->map()->instance_type());
3839
3840   // Allocate the backing storage for the properties.
3841   int prop_size = map->unused_property_fields() - map->inobject_properties();
3842   Object* properties;
3843   { MaybeObject* maybe_properties = AllocateFixedArray(prop_size, TENURED);
3844     if (!maybe_properties->ToObject(&properties)) return maybe_properties;
3845   }
3846
3847   // Reset the map for the object.
3848   object->set_map(constructor->initial_map());
3849
3850   // Reinitialize the object from the constructor map.
3851   InitializeJSObjectFromMap(object, FixedArray::cast(properties), map);
3852   return object;
3853 }
3854
3855
3856 MaybeObject* Heap::AllocateStringFromAscii(Vector<const char> string,
3857                                            PretenureFlag pretenure) {
3858   if (string.length() == 1) {
3859     return Heap::LookupSingleCharacterStringFromCode(string[0]);
3860   }
3861   Object* result;
3862   { MaybeObject* maybe_result =
3863         AllocateRawAsciiString(string.length(), pretenure);
3864     if (!maybe_result->ToObject(&result)) return maybe_result;
3865   }
3866
3867   // Copy the characters into the new object.
3868   SeqAsciiString* string_result = SeqAsciiString::cast(result);
3869   for (int i = 0; i < string.length(); i++) {
3870     string_result->SeqAsciiStringSet(i, string[i]);
3871   }
3872   return result;
3873 }
3874
3875
3876 MaybeObject* Heap::AllocateStringFromUtf8Slow(Vector<const char> string,
3877                                               PretenureFlag pretenure) {
3878   // V8 only supports characters in the Basic Multilingual Plane.
3879   const uc32 kMaxSupportedChar = 0xFFFF;
3880   // Count the number of characters in the UTF-8 string and check if
3881   // it is an ASCII string.
3882   Access<UnicodeCache::Utf8Decoder>
3883       decoder(isolate_->unicode_cache()->utf8_decoder());
3884   decoder->Reset(string.start(), string.length());
3885   int chars = 0;
3886   while (decoder->has_more()) {
3887     decoder->GetNext();
3888     chars++;
3889   }
3890
3891   Object* result;
3892   { MaybeObject* maybe_result = AllocateRawTwoByteString(chars, pretenure);
3893     if (!maybe_result->ToObject(&result)) return maybe_result;
3894   }
3895
3896   // Convert and copy the characters into the new object.
3897   String* string_result = String::cast(result);
3898   decoder->Reset(string.start(), string.length());
3899   for (int i = 0; i < chars; i++) {
3900     uc32 r = decoder->GetNext();
3901     if (r > kMaxSupportedChar) { r = unibrow::Utf8::kBadChar; }
3902     string_result->Set(i, r);
3903   }
3904   return result;
3905 }
3906
3907
3908 MaybeObject* Heap::AllocateStringFromTwoByte(Vector<const uc16> string,
3909                                              PretenureFlag pretenure) {
3910   // Check if the string is an ASCII string.
3911   MaybeObject* maybe_result;
3912   if (String::IsAscii(string.start(), string.length())) {
3913     maybe_result = AllocateRawAsciiString(string.length(), pretenure);
3914   } else {  // It's not an ASCII string.
3915     maybe_result = AllocateRawTwoByteString(string.length(), pretenure);
3916   }
3917   Object* result;
3918   if (!maybe_result->ToObject(&result)) return maybe_result;
3919
3920   // Copy the characters into the new object, which may be either ASCII or
3921   // UTF-16.
3922   String* string_result = String::cast(result);
3923   for (int i = 0; i < string.length(); i++) {
3924     string_result->Set(i, string[i]);
3925   }
3926   return result;
3927 }
3928
3929
3930 Map* Heap::SymbolMapForString(String* string) {
3931   // If the string is in new space it cannot be used as a symbol.
3932   if (InNewSpace(string)) return NULL;
3933
3934   // Find the corresponding symbol map for strings.
3935   Map* map = string->map();
3936   if (map == ascii_string_map()) {
3937     return ascii_symbol_map();
3938   }
3939   if (map == string_map()) {
3940     return symbol_map();
3941   }
3942   if (map == cons_string_map()) {
3943     return cons_symbol_map();
3944   }
3945   if (map == cons_ascii_string_map()) {
3946     return cons_ascii_symbol_map();
3947   }
3948   if (map == external_string_map()) {
3949     return external_symbol_map();
3950   }
3951   if (map == external_ascii_string_map()) {
3952     return external_ascii_symbol_map();
3953   }
3954   if (map == external_string_with_ascii_data_map()) {
3955     return external_symbol_with_ascii_data_map();
3956   }
3957
3958   // No match found.
3959   return NULL;
3960 }
3961
3962
3963 MaybeObject* Heap::AllocateInternalSymbol(unibrow::CharacterStream* buffer,
3964                                           int chars,
3965                                           uint32_t hash_field) {
3966   ASSERT(chars >= 0);
3967   // Ensure the chars matches the number of characters in the buffer.
3968   ASSERT(static_cast<unsigned>(chars) == buffer->Length());
3969   // Determine whether the string is ascii.
3970   bool is_ascii = true;
3971   while (buffer->has_more()) {
3972     if (buffer->GetNext() > unibrow::Utf8::kMaxOneByteChar) {
3973       is_ascii = false;
3974       break;
3975     }
3976   }
3977   buffer->Rewind();
3978
3979   // Compute map and object size.
3980   int size;
3981   Map* map;
3982
3983   if (is_ascii) {
3984     if (chars > SeqAsciiString::kMaxLength) {
3985       return Failure::OutOfMemoryException();
3986     }
3987     map = ascii_symbol_map();
3988     size = SeqAsciiString::SizeFor(chars);
3989   } else {
3990     if (chars > SeqTwoByteString::kMaxLength) {
3991       return Failure::OutOfMemoryException();
3992     }
3993     map = symbol_map();
3994     size = SeqTwoByteString::SizeFor(chars);
3995   }
3996
3997   // Allocate string.
3998   Object* result;
3999   { MaybeObject* maybe_result = (size > MaxObjectSizeInPagedSpace())
4000                    ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
4001                    : old_data_space_->AllocateRaw(size);
4002     if (!maybe_result->ToObject(&result)) return maybe_result;
4003   }
4004
4005   reinterpret_cast<HeapObject*>(result)->set_map(map);
4006   // Set length and hash fields of the allocated string.
4007   String* answer = String::cast(result);
4008   answer->set_length(chars);
4009   answer->set_hash_field(hash_field);
4010   SeqString::cast(answer)->set_symbol_id(0);
4011
4012   ASSERT_EQ(size, answer->Size());
4013
4014   // Fill in the characters.
4015   for (int i = 0; i < chars; i++) {
4016     answer->Set(i, buffer->GetNext());
4017   }
4018   return answer;
4019 }
4020
4021
4022 MaybeObject* Heap::AllocateRawAsciiString(int length, PretenureFlag pretenure) {
4023   if (length < 0 || length > SeqAsciiString::kMaxLength) {
4024     return Failure::OutOfMemoryException();
4025   }
4026
4027   int size = SeqAsciiString::SizeFor(length);
4028   ASSERT(size <= SeqAsciiString::kMaxSize);
4029
4030   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4031   AllocationSpace retry_space = OLD_DATA_SPACE;
4032
4033   if (space == NEW_SPACE) {
4034     if (size > kMaxObjectSizeInNewSpace) {
4035       // Allocate in large object space, retry space will be ignored.
4036       space = LO_SPACE;
4037     } else if (size > MaxObjectSizeInPagedSpace()) {
4038       // Allocate in new space, retry in large object space.
4039       retry_space = LO_SPACE;
4040     }
4041   } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
4042     space = LO_SPACE;
4043   }
4044   Object* result;
4045   { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4046     if (!maybe_result->ToObject(&result)) return maybe_result;
4047   }
4048
4049   // Partially initialize the object.
4050   HeapObject::cast(result)->set_map(ascii_string_map());
4051   String::cast(result)->set_length(length);
4052   String::cast(result)->set_hash_field(String::kEmptyHashField);
4053   SeqString::cast(result)->set_symbol_id(0);
4054   ASSERT_EQ(size, HeapObject::cast(result)->Size());
4055   return result;
4056 }
4057
4058
4059 MaybeObject* Heap::AllocateRawTwoByteString(int length,
4060                                             PretenureFlag pretenure) {
4061   if (length < 0 || length > SeqTwoByteString::kMaxLength) {
4062     return Failure::OutOfMemoryException();
4063   }
4064   int size = SeqTwoByteString::SizeFor(length);
4065   ASSERT(size <= SeqTwoByteString::kMaxSize);
4066   AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4067   AllocationSpace retry_space = OLD_DATA_SPACE;
4068
4069   if (space == NEW_SPACE) {
4070     if (size > kMaxObjectSizeInNewSpace) {
4071       // Allocate in large object space, retry space will be ignored.
4072       space = LO_SPACE;
4073     } else if (size > MaxObjectSizeInPagedSpace()) {
4074       // Allocate in new space, retry in large object space.
4075       retry_space = LO_SPACE;
4076     }
4077   } else if (space == OLD_DATA_SPACE && size > MaxObjectSizeInPagedSpace()) {
4078     space = LO_SPACE;
4079   }
4080   Object* result;
4081   { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
4082     if (!maybe_result->ToObject(&result)) return maybe_result;
4083   }
4084
4085   // Partially initialize the object.
4086   HeapObject::cast(result)->set_map(string_map());
4087   String::cast(result)->set_length(length);
4088   String::cast(result)->set_hash_field(String::kEmptyHashField);
4089   SeqString::cast(result)->set_symbol_id(0);
4090   ASSERT_EQ(size, HeapObject::cast(result)->Size());
4091   return result;
4092 }
4093
4094
4095 MaybeObject* Heap::AllocateEmptyFixedArray() {
4096   int size = FixedArray::SizeFor(0);
4097   Object* result;
4098   { MaybeObject* maybe_result =
4099         AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
4100     if (!maybe_result->ToObject(&result)) return maybe_result;
4101   }
4102   // Initialize the object.
4103   reinterpret_cast<FixedArray*>(result)->set_map(fixed_array_map());
4104   reinterpret_cast<FixedArray*>(result)->set_length(0);
4105   return result;
4106 }
4107
4108
4109 MaybeObject* Heap::AllocateRawFixedArray(int length) {
4110   if (length < 0 || length > FixedArray::kMaxLength) {
4111     return Failure::OutOfMemoryException();
4112   }
4113   ASSERT(length > 0);
4114   // Use the general function if we're forced to always allocate.
4115   if (always_allocate()) return AllocateFixedArray(length, TENURED);
4116   // Allocate the raw data for a fixed array.
4117   int size = FixedArray::SizeFor(length);
4118   return size <= kMaxObjectSizeInNewSpace
4119       ? new_space_.AllocateRaw(size)
4120       : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
4121 }
4122
4123
4124 MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
4125   int len = src->length();
4126   Object* obj;
4127   { MaybeObject* maybe_obj = AllocateRawFixedArray(len);
4128     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4129   }
4130   if (InNewSpace(obj)) {
4131     HeapObject* dst = HeapObject::cast(obj);
4132     dst->set_map(map);
4133     CopyBlock(dst->address() + kPointerSize,
4134               src->address() + kPointerSize,
4135               FixedArray::SizeFor(len) - kPointerSize);
4136     return obj;
4137   }
4138   HeapObject::cast(obj)->set_map(map);
4139   FixedArray* result = FixedArray::cast(obj);
4140   result->set_length(len);
4141
4142   // Copy the content
4143   AssertNoAllocation no_gc;
4144   WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
4145   for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
4146   return result;
4147 }
4148
4149
4150 MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
4151                                                Map* map) {
4152   int len = src->length();
4153   Object* obj;
4154   { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED);
4155     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4156   }
4157   HeapObject* dst = HeapObject::cast(obj);
4158   dst->set_map(map);
4159   CopyBlock(
4160       dst->address() + FixedDoubleArray::kLengthOffset,
4161       src->address() + FixedDoubleArray::kLengthOffset,
4162       FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
4163   return obj;
4164 }
4165
4166
4167 MaybeObject* Heap::AllocateFixedArray(int length) {
4168   ASSERT(length >= 0);
4169   if (length == 0) return empty_fixed_array();
4170   Object* result;
4171   { MaybeObject* maybe_result = AllocateRawFixedArray(length);
4172     if (!maybe_result->ToObject(&result)) return maybe_result;
4173   }
4174   // Initialize header.
4175   FixedArray* array = reinterpret_cast<FixedArray*>(result);
4176   array->set_map(fixed_array_map());
4177   array->set_length(length);
4178   // Initialize body.
4179   ASSERT(!InNewSpace(undefined_value()));
4180   MemsetPointer(array->data_start(), undefined_value(), length);
4181   return result;
4182 }
4183
4184
4185 MaybeObject* Heap::AllocateRawFixedArray(int length, PretenureFlag pretenure) {
4186   if (length < 0 || length > FixedArray::kMaxLength) {
4187     return Failure::OutOfMemoryException();
4188   }
4189
4190   AllocationSpace space =
4191       (pretenure == TENURED) ? OLD_POINTER_SPACE : NEW_SPACE;
4192   int size = FixedArray::SizeFor(length);
4193   if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4194     // Too big for new space.
4195     space = LO_SPACE;
4196   } else if (space == OLD_POINTER_SPACE &&
4197              size > MaxObjectSizeInPagedSpace()) {
4198     // Too big for old pointer space.
4199     space = LO_SPACE;
4200   }
4201
4202   AllocationSpace retry_space =
4203       (size <= MaxObjectSizeInPagedSpace()) ? OLD_POINTER_SPACE : LO_SPACE;
4204
4205   return AllocateRaw(size, space, retry_space);
4206 }
4207
4208
4209 MUST_USE_RESULT static MaybeObject* AllocateFixedArrayWithFiller(
4210     Heap* heap,
4211     int length,
4212     PretenureFlag pretenure,
4213     Object* filler) {
4214   ASSERT(length >= 0);
4215   ASSERT(heap->empty_fixed_array()->IsFixedArray());
4216   if (length == 0) return heap->empty_fixed_array();
4217
4218   ASSERT(!heap->InNewSpace(filler));
4219   Object* result;
4220   { MaybeObject* maybe_result = heap->AllocateRawFixedArray(length, pretenure);
4221     if (!maybe_result->ToObject(&result)) return maybe_result;
4222   }
4223
4224   HeapObject::cast(result)->set_map(heap->fixed_array_map());
4225   FixedArray* array = FixedArray::cast(result);
4226   array->set_length(length);
4227   MemsetPointer(array->data_start(), filler, length);
4228   return array;
4229 }
4230
4231
4232 MaybeObject* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
4233   return AllocateFixedArrayWithFiller(this,
4234                                       length,
4235                                       pretenure,
4236                                       undefined_value());
4237 }
4238
4239
4240 MaybeObject* Heap::AllocateFixedArrayWithHoles(int length,
4241                                                PretenureFlag pretenure) {
4242   return AllocateFixedArrayWithFiller(this,
4243                                       length,
4244                                       pretenure,
4245                                       the_hole_value());
4246 }
4247
4248
4249 MaybeObject* Heap::AllocateUninitializedFixedArray(int length) {
4250   if (length == 0) return empty_fixed_array();
4251
4252   Object* obj;
4253   { MaybeObject* maybe_obj = AllocateRawFixedArray(length);
4254     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4255   }
4256
4257   reinterpret_cast<FixedArray*>(obj)->set_map(fixed_array_map());
4258   FixedArray::cast(obj)->set_length(length);
4259   return obj;
4260 }
4261
4262
4263 MaybeObject* Heap::AllocateEmptyFixedDoubleArray() {
4264   int size = FixedDoubleArray::SizeFor(0);
4265   Object* result;
4266   { MaybeObject* maybe_result =
4267         AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
4268     if (!maybe_result->ToObject(&result)) return maybe_result;
4269   }
4270   // Initialize the object.
4271   reinterpret_cast<FixedDoubleArray*>(result)->set_map(
4272       fixed_double_array_map());
4273   reinterpret_cast<FixedDoubleArray*>(result)->set_length(0);
4274   return result;
4275 }
4276
4277
4278 MaybeObject* Heap::AllocateUninitializedFixedDoubleArray(
4279     int length,
4280     PretenureFlag pretenure) {
4281   if (length == 0) return empty_fixed_double_array();
4282
4283   Object* obj;
4284   { MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(length, pretenure);
4285     if (!maybe_obj->ToObject(&obj)) return maybe_obj;
4286   }
4287
4288   reinterpret_cast<FixedDoubleArray*>(obj)->set_map(fixed_double_array_map());
4289   FixedDoubleArray::cast(obj)->set_length(length);
4290   return obj;
4291 }
4292
4293
4294 MaybeObject* Heap::AllocateRawFixedDoubleArray(int length,
4295                                                PretenureFlag pretenure) {
4296   if (length < 0 || length > FixedDoubleArray::kMaxLength) {
4297     return Failure::OutOfMemoryException();
4298   }
4299
4300   AllocationSpace space =
4301       (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
4302   int size = FixedDoubleArray::SizeFor(length);
4303   if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
4304     // Too big for new space.
4305     space = LO_SPACE;
4306   } else if (space == OLD_DATA_SPACE &&
4307              size > MaxObjectSizeInPagedSpace()) {
4308     // Too big for old data space.
4309     space = LO_SPACE;
4310   }
4311
4312   AllocationSpace retry_space =
4313       (size <= MaxObjectSizeInPagedSpace()) ? OLD_DATA_SPACE : LO_SPACE;
4314
4315   return AllocateRaw(size, space, retry_space);
4316 }
4317
4318
4319 MaybeObject* Heap::AllocateHashTable(int length, PretenureFlag pretenure) {
4320   Object* result;
4321   { MaybeObject* maybe_result = AllocateFixedArray(length, pretenure);
4322     if (!maybe_result->ToObject(&result)) return maybe_result;
4323   }
4324   reinterpret_cast<HeapObject*>(result)->set_map(hash_table_map());
4325   ASSERT(result->IsHashTable());
4326   return result;
4327 }
4328
4329
4330 MaybeObject* Heap::AllocateGlobalContext() {
4331   Object* result;
4332   { MaybeObject* maybe_result =
4333         AllocateFixedArray(Context::GLOBAL_CONTEXT_SLOTS);
4334     if (!maybe_result->ToObject(&result)) return maybe_result;
4335   }
4336   Context* context = reinterpret_cast<Context*>(result);
4337   context->set_map(global_context_map());
4338   ASSERT(context->IsGlobalContext());
4339   ASSERT(result->IsContext());
4340   return result;
4341 }
4342
4343
4344 MaybeObject* Heap::AllocateFunctionContext(int length, JSFunction* function) {
4345   ASSERT(length >= Context::MIN_CONTEXT_SLOTS);
4346   Object* result;
4347   { MaybeObject* maybe_result = AllocateFixedArray(length);
4348     if (!maybe_result->ToObject(&result)) return maybe_result;
4349   }
4350   Context* context = reinterpret_cast<Context*>(result);
4351   context->set_map(function_context_map());
4352   context->set_closure(function);
4353   context->set_previous(function->context());
4354   context->set_extension(NULL);
4355   context->set_global(function->context()->global());
4356   return context;
4357 }
4358
4359
4360 MaybeObject* Heap::AllocateCatchContext(JSFunction* function,
4361                                         Context* previous,
4362                                         String* name,
4363                                         Object* thrown_object) {
4364   STATIC_ASSERT(Context::MIN_CONTEXT_SLOTS == Context::THROWN_OBJECT_INDEX);
4365   Object* result;
4366   { MaybeObject* maybe_result =
4367         AllocateFixedArray(Context::MIN_CONTEXT_SLOTS + 1);
4368     if (!maybe_result->ToObject(&result)) return maybe_result;
4369   }
4370   Context* context = reinterpret_cast<Context*>(result);
4371   context->set_map(catch_context_map());
4372   context->set_closure(function);
4373   context->set_previous(previous);
4374   context->set_extension(name);
4375   context->set_global(previous->global());
4376   context->set(Context::THROWN_OBJECT_INDEX, thrown_object);
4377   return context;
4378 }
4379
4380
4381 MaybeObject* Heap::AllocateWithContext(JSFunction* function,
4382                                        Context* previous,
4383                                        JSObject* extension) {
4384   Object* result;
4385   { MaybeObject* maybe_result = AllocateFixedArray(Context::MIN_CONTEXT_SLOTS);
4386     if (!maybe_result->ToObject(&result)) return maybe_result;
4387   }
4388   Context* context = reinterpret_cast<Context*>(result);
4389   context->set_map(with_context_map());
4390   context->set_closure(function);
4391   context->set_previous(previous);
4392   context->set_extension(extension);
4393   context->set_global(previous->global());
4394   return context;
4395 }
4396
4397
4398 MaybeObject* Heap::AllocateBlockContext(JSFunction* function,
4399                                         Context* previous,
4400                                         SerializedScopeInfo* scope_info) {
4401   Object* result;
4402   { MaybeObject* maybe_result =
4403         AllocateFixedArrayWithHoles(scope_info->NumberOfContextSlots());
4404     if (!maybe_result->ToObject(&result)) return maybe_result;
4405   }
4406   Context* context = reinterpret_cast<Context*>(result);
4407   context->set_map(block_context_map());
4408   context->set_closure(function);
4409   context->set_previous(previous);
4410   context->set_extension(scope_info);
4411   context->set_global(previous->global());
4412   return context;
4413 }
4414
4415
4416 MaybeObject* Heap::AllocateSerializedScopeInfo(int length) {
4417   Object* result;
4418   { MaybeObject* maybe_result = AllocateFixedArray(length, TENURED);
4419     if (!maybe_result->ToObject(&result)) return maybe_result;
4420   }
4421   SerializedScopeInfo* scope_info =
4422       reinterpret_cast<SerializedScopeInfo*>(result);
4423   scope_info->set_map(serialized_scope_info_map());
4424   return scope_info;
4425 }
4426
4427
4428 MaybeObject* Heap::AllocateStruct(InstanceType type) {
4429   Map* map;
4430   switch (type) {
4431 #define MAKE_CASE(NAME, Name, name) \
4432     case NAME##_TYPE: map = name##_map(); break;
4433 STRUCT_LIST(MAKE_CASE)
4434 #undef MAKE_CASE
4435     default:
4436       UNREACHABLE();
4437       return Failure::InternalError();
4438   }
4439   int size = map->instance_size();
4440   AllocationSpace space =
4441       (size > MaxObjectSizeInPagedSpace()) ? LO_SPACE : OLD_POINTER_SPACE;
4442   Object* result;
4443   { MaybeObject* maybe_result = Allocate(map, space);
4444     if (!maybe_result->ToObject(&result)) return maybe_result;
4445   }
4446   Struct::cast(result)->InitializeBody(size);
4447   return result;
4448 }
4449
4450
4451 bool Heap::IsHeapIterable() {
4452   return (!old_pointer_space()->was_swept_conservatively() &&
4453           !old_data_space()->was_swept_conservatively());
4454 }
4455
4456
4457 void Heap::EnsureHeapIsIterable() {
4458   ASSERT(IsAllocationAllowed());
4459   if (!IsHeapIterable()) {
4460     CollectAllGarbage(kMakeHeapIterableMask);
4461   }
4462   ASSERT(IsHeapIterable());
4463 }
4464
4465
4466 bool Heap::IdleNotification() {
4467   static const int kIdlesBeforeScavenge = 4;
4468   static const int kIdlesBeforeMarkSweep = 7;
4469   static const int kIdlesBeforeMarkCompact = 8;
4470   static const int kMaxIdleCount = kIdlesBeforeMarkCompact + 1;
4471   static const unsigned int kGCsBetweenCleanup = 4;
4472
4473   if (!last_idle_notification_gc_count_init_) {
4474     last_idle_notification_gc_count_ = gc_count_;
4475     last_idle_notification_gc_count_init_ = true;
4476   }
4477
4478   bool uncommit = true;
4479   bool finished = false;
4480
4481   // Reset the number of idle notifications received when a number of
4482   // GCs have taken place. This allows another round of cleanup based
4483   // on idle notifications if enough work has been carried out to
4484   // provoke a number of garbage collections.
4485   if (gc_count_ - last_idle_notification_gc_count_ < kGCsBetweenCleanup) {
4486     number_idle_notifications_ =
4487         Min(number_idle_notifications_ + 1, kMaxIdleCount);
4488   } else {
4489     number_idle_notifications_ = 0;
4490     last_idle_notification_gc_count_ = gc_count_;
4491   }
4492
4493   if (number_idle_notifications_ == kIdlesBeforeScavenge) {
4494     if (contexts_disposed_ > 0) {
4495       HistogramTimerScope scope(isolate_->counters()->gc_context());
4496       CollectAllGarbage(kNoGCFlags);
4497     } else {
4498       CollectGarbage(NEW_SPACE);
4499     }
4500     new_space_.Shrink();
4501     last_idle_notification_gc_count_ = gc_count_;
4502   } else if (number_idle_notifications_ == kIdlesBeforeMarkSweep) {
4503     // Before doing the mark-sweep collections we clear the
4504     // compilation cache to avoid hanging on to source code and
4505     // generated code for cached functions.
4506     isolate_->compilation_cache()->Clear();
4507
4508     CollectAllGarbage(kNoGCFlags);
4509     new_space_.Shrink();
4510     last_idle_notification_gc_count_ = gc_count_;
4511
4512   } else if (number_idle_notifications_ == kIdlesBeforeMarkCompact) {
4513     CollectAllGarbage(kNoGCFlags);
4514     new_space_.Shrink();
4515     last_idle_notification_gc_count_ = gc_count_;
4516     number_idle_notifications_ = 0;
4517     finished = true;
4518   } else if (contexts_disposed_ > 0) {
4519     if (FLAG_expose_gc) {
4520       contexts_disposed_ = 0;
4521     } else {
4522       HistogramTimerScope scope(isolate_->counters()->gc_context());
4523       CollectAllGarbage(kNoGCFlags);
4524       last_idle_notification_gc_count_ = gc_count_;
4525     }
4526     // If this is the first idle notification, we reset the
4527     // notification count to avoid letting idle notifications for
4528     // context disposal garbage collections start a potentially too
4529     // aggressive idle GC cycle.
4530     if (number_idle_notifications_ <= 1) {
4531       number_idle_notifications_ = 0;
4532       uncommit = false;
4533     }
4534   } else if (number_idle_notifications_ > kIdlesBeforeMarkCompact) {
4535     // If we have received more than kIdlesBeforeMarkCompact idle
4536     // notifications we do not perform any cleanup because we don't
4537     // expect to gain much by doing so.
4538     finished = true;
4539   }
4540
4541   // Make sure that we have no pending context disposals and
4542   // conditionally uncommit from space.
4543   // Take into account that we might have decided to delay full collection
4544   // because incremental marking is in progress.
4545   ASSERT((contexts_disposed_ == 0) || !incremental_marking()->IsStopped());
4546   if (uncommit) UncommitFromSpace();
4547
4548   return finished;
4549 }
4550
4551
4552 #ifdef DEBUG
4553
4554 void Heap::Print() {
4555   if (!HasBeenSetup()) return;
4556   isolate()->PrintStack();
4557   AllSpaces spaces;
4558   for (Space* space = spaces.next(); space != NULL; space = spaces.next())
4559     space->Print();
4560 }
4561
4562
4563 void Heap::ReportCodeStatistics(const char* title) {
4564   PrintF(">>>>>> Code Stats (%s) >>>>>>\n", title);
4565   PagedSpace::ResetCodeStatistics();
4566   // We do not look for code in new space, map space, or old space.  If code
4567   // somehow ends up in those spaces, we would miss it here.
4568   code_space_->CollectCodeStatistics();
4569   lo_space_->CollectCodeStatistics();
4570   PagedSpace::ReportCodeStatistics();
4571 }
4572
4573
4574 // This function expects that NewSpace's allocated objects histogram is
4575 // populated (via a call to CollectStatistics or else as a side effect of a
4576 // just-completed scavenge collection).
4577 void Heap::ReportHeapStatistics(const char* title) {
4578   USE(title);
4579   PrintF(">>>>>> =============== %s (%d) =============== >>>>>>\n",
4580          title, gc_count_);
4581   PrintF("old_gen_promotion_limit_ %" V8_PTR_PREFIX "d\n",
4582          old_gen_promotion_limit_);
4583   PrintF("old_gen_allocation_limit_ %" V8_PTR_PREFIX "d\n",
4584          old_gen_allocation_limit_);
4585   PrintF("old_gen_limit_factor_ %d\n", old_gen_limit_factor_);
4586
4587   PrintF("\n");
4588   PrintF("Number of handles : %d\n", HandleScope::NumberOfHandles());
4589   isolate_->global_handles()->PrintStats();
4590   PrintF("\n");
4591
4592   PrintF("Heap statistics : ");
4593   isolate_->memory_allocator()->ReportStatistics();
4594   PrintF("To space : ");
4595   new_space_.ReportStatistics();
4596   PrintF("Old pointer space : ");
4597   old_pointer_space_->ReportStatistics();
4598   PrintF("Old data space : ");
4599   old_data_space_->ReportStatistics();
4600   PrintF("Code space : ");
4601   code_space_->ReportStatistics();
4602   PrintF("Map space : ");
4603   map_space_->ReportStatistics();
4604   PrintF("Cell space : ");
4605   cell_space_->ReportStatistics();
4606   PrintF("Large object space : ");
4607   lo_space_->ReportStatistics();
4608   PrintF(">>>>>> ========================================= >>>>>>\n");
4609 }
4610
4611 #endif  // DEBUG
4612
4613 bool Heap::Contains(HeapObject* value) {
4614   return Contains(value->address());
4615 }
4616
4617
4618 bool Heap::Contains(Address addr) {
4619   if (OS::IsOutsideAllocatedSpace(addr)) return false;
4620   return HasBeenSetup() &&
4621     (new_space_.ToSpaceContains(addr) ||
4622      old_pointer_space_->Contains(addr) ||
4623      old_data_space_->Contains(addr) ||
4624      code_space_->Contains(addr) ||
4625      map_space_->Contains(addr) ||
4626      cell_space_->Contains(addr) ||
4627      lo_space_->SlowContains(addr));
4628 }
4629
4630
4631 bool Heap::InSpace(HeapObject* value, AllocationSpace space) {
4632   return InSpace(value->address(), space);
4633 }
4634
4635
4636 bool Heap::InSpace(Address addr, AllocationSpace space) {
4637   if (OS::IsOutsideAllocatedSpace(addr)) return false;
4638   if (!HasBeenSetup()) return false;
4639
4640   switch (space) {
4641     case NEW_SPACE:
4642       return new_space_.ToSpaceContains(addr);
4643     case OLD_POINTER_SPACE:
4644       return old_pointer_space_->Contains(addr);
4645     case OLD_DATA_SPACE:
4646       return old_data_space_->Contains(addr);
4647     case CODE_SPACE:
4648       return code_space_->Contains(addr);
4649     case MAP_SPACE:
4650       return map_space_->Contains(addr);
4651     case CELL_SPACE:
4652       return cell_space_->Contains(addr);
4653     case LO_SPACE:
4654       return lo_space_->SlowContains(addr);
4655   }
4656
4657   return false;
4658 }
4659
4660
4661 #ifdef DEBUG
4662 void Heap::Verify() {
4663   ASSERT(HasBeenSetup());
4664
4665   store_buffer()->Verify();
4666
4667   VerifyPointersVisitor visitor;
4668   IterateRoots(&visitor, VISIT_ONLY_STRONG);
4669
4670   new_space_.Verify();
4671
4672   old_pointer_space_->Verify(&visitor);
4673   map_space_->Verify(&visitor);
4674
4675   VerifyPointersVisitor no_dirty_regions_visitor;
4676   old_data_space_->Verify(&no_dirty_regions_visitor);
4677   code_space_->Verify(&no_dirty_regions_visitor);
4678   cell_space_->Verify(&no_dirty_regions_visitor);
4679
4680   lo_space_->Verify();
4681 }
4682
4683 #endif  // DEBUG
4684
4685
4686 MaybeObject* Heap::LookupSymbol(Vector<const char> string) {
4687   Object* symbol = NULL;
4688   Object* new_table;
4689   { MaybeObject* maybe_new_table =
4690         symbol_table()->LookupSymbol(string, &symbol);
4691     if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
4692   }
4693   // Can't use set_symbol_table because SymbolTable::cast knows that
4694   // SymbolTable is a singleton and checks for identity.
4695   roots_[kSymbolTableRootIndex] = new_table;
4696   ASSERT(symbol != NULL);
4697   return symbol;
4698 }
4699
4700
4701 MaybeObject* Heap::LookupAsciiSymbol(Vector<const char> string) {
4702   Object* symbol = NULL;
4703   Object* new_table;
4704   { MaybeObject* maybe_new_table =
4705         symbol_table()->LookupAsciiSymbol(string, &symbol);
4706     if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
4707   }
4708   // Can't use set_symbol_table because SymbolTable::cast knows that
4709   // SymbolTable is a singleton and checks for identity.
4710   roots_[kSymbolTableRootIndex] = new_table;
4711   ASSERT(symbol != NULL);
4712   return symbol;
4713 }
4714
4715
4716 MaybeObject* Heap::LookupAsciiSymbol(Handle<SeqAsciiString> string,
4717                                      int from,
4718                                      int length) {
4719   Object* symbol = NULL;
4720   Object* new_table;
4721   { MaybeObject* maybe_new_table =
4722         symbol_table()->LookupSubStringAsciiSymbol(string,
4723                                                    from,
4724                                                    length,
4725                                                    &symbol);
4726     if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
4727   }
4728   // Can't use set_symbol_table because SymbolTable::cast knows that
4729   // SymbolTable is a singleton and checks for identity.
4730   roots_[kSymbolTableRootIndex] = new_table;
4731   ASSERT(symbol != NULL);
4732   return symbol;
4733 }
4734
4735
4736 MaybeObject* Heap::LookupTwoByteSymbol(Vector<const uc16> string) {
4737   Object* symbol = NULL;
4738   Object* new_table;
4739   { MaybeObject* maybe_new_table =
4740         symbol_table()->LookupTwoByteSymbol(string, &symbol);
4741     if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
4742   }
4743   // Can't use set_symbol_table because SymbolTable::cast knows that
4744   // SymbolTable is a singleton and checks for identity.
4745   roots_[kSymbolTableRootIndex] = new_table;
4746   ASSERT(symbol != NULL);
4747   return symbol;
4748 }
4749
4750
4751 MaybeObject* Heap::LookupSymbol(String* string) {
4752   if (string->IsSymbol()) return string;
4753   Object* symbol = NULL;
4754   Object* new_table;
4755   { MaybeObject* maybe_new_table =
4756         symbol_table()->LookupString(string, &symbol);
4757     if (!maybe_new_table->ToObject(&new_table)) return maybe_new_table;
4758   }
4759   // Can't use set_symbol_table because SymbolTable::cast knows that
4760   // SymbolTable is a singleton and checks for identity.
4761   roots_[kSymbolTableRootIndex] = new_table;
4762   ASSERT(symbol != NULL);
4763   return symbol;
4764 }
4765
4766
4767 bool Heap::LookupSymbolIfExists(String* string, String** symbol) {
4768   if (string->IsSymbol()) {
4769     *symbol = string;
4770     return true;
4771   }
4772   return symbol_table()->LookupSymbolIfExists(string, symbol);
4773 }
4774
4775
4776 #ifdef DEBUG
4777 void Heap::ZapFromSpace() {
4778   NewSpacePageIterator it(new_space_.FromSpaceStart(),
4779                           new_space_.FromSpaceEnd());
4780   while (it.has_next()) {
4781     NewSpacePage* page = it.next();
4782     for (Address cursor = page->body(), limit = page->body_limit();
4783          cursor < limit;
4784          cursor += kPointerSize) {
4785       Memory::Address_at(cursor) = kFromSpaceZapValue;
4786     }
4787   }
4788 }
4789 #endif  // DEBUG
4790
4791
4792 void Heap::IterateAndMarkPointersToFromSpace(Address start,
4793                                              Address end,
4794                                              ObjectSlotCallback callback) {
4795   Address slot_address = start;
4796
4797   // We are not collecting slots on new space objects during mutation
4798   // thus we have to scan for pointers to evacuation candidates when we
4799   // promote objects. But we should not record any slots in non-black
4800   // objects. Grey object's slots would be rescanned.
4801   // White object might not survive until the end of collection
4802   // it would be a violation of the invariant to record it's slots.
4803   bool record_slots = false;
4804   if (incremental_marking()->IsCompacting()) {
4805     MarkBit mark_bit = Marking::MarkBitFrom(HeapObject::FromAddress(start));
4806     record_slots = Marking::IsBlack(mark_bit);
4807   }
4808
4809   while (slot_address < end) {
4810     Object** slot = reinterpret_cast<Object**>(slot_address);
4811     Object* object = *slot;
4812     // If the store buffer becomes overfull we mark pages as being exempt from
4813     // the store buffer.  These pages are scanned to find pointers that point
4814     // to the new space.  In that case we may hit newly promoted objects and
4815     // fix the pointers before the promotion queue gets to them.  Thus the 'if'.
4816     if (object->IsHeapObject()) {
4817       if (Heap::InFromSpace(object)) {
4818         callback(reinterpret_cast<HeapObject**>(slot),
4819                  HeapObject::cast(object));
4820         Object* new_object = *slot;
4821         if (InNewSpace(new_object)) {
4822           SLOW_ASSERT(Heap::InToSpace(new_object));
4823           SLOW_ASSERT(new_object->IsHeapObject());
4824           store_buffer_.EnterDirectlyIntoStoreBuffer(
4825               reinterpret_cast<Address>(slot));
4826         }
4827         SLOW_ASSERT(!MarkCompactCollector::IsOnEvacuationCandidate(new_object));
4828       } else if (record_slots &&
4829                  MarkCompactCollector::IsOnEvacuationCandidate(object)) {
4830         mark_compact_collector()->RecordSlot(slot, slot, object);
4831       }
4832     }
4833     slot_address += kPointerSize;
4834   }
4835 }
4836
4837
4838 #ifdef DEBUG
4839 typedef bool (*CheckStoreBufferFilter)(Object** addr);
4840
4841
4842 bool IsAMapPointerAddress(Object** addr) {
4843   uintptr_t a = reinterpret_cast<uintptr_t>(addr);
4844   int mod = a % Map::kSize;
4845   return mod >= Map::kPointerFieldsBeginOffset &&
4846          mod < Map::kPointerFieldsEndOffset;
4847 }
4848
4849
4850 bool EverythingsAPointer(Object** addr) {
4851   return true;
4852 }
4853
4854
4855 static void CheckStoreBuffer(Heap* heap,
4856                              Object** current,
4857                              Object** limit,
4858                              Object**** store_buffer_position,
4859                              Object*** store_buffer_top,
4860                              CheckStoreBufferFilter filter,
4861                              Address special_garbage_start,
4862                              Address special_garbage_end) {
4863   Map* free_space_map = heap->free_space_map();
4864   for ( ; current < limit; current++) {
4865     Object* o = *current;
4866     Address current_address = reinterpret_cast<Address>(current);
4867     // Skip free space.
4868     if (o == free_space_map) {
4869       Address current_address = reinterpret_cast<Address>(current);
4870       FreeSpace* free_space =
4871           FreeSpace::cast(HeapObject::FromAddress(current_address));
4872       int skip = free_space->Size();
4873       ASSERT(current_address + skip <= reinterpret_cast<Address>(limit));
4874       ASSERT(skip > 0);
4875       current_address += skip - kPointerSize;
4876       current = reinterpret_cast<Object**>(current_address);
4877       continue;
4878     }
4879     // Skip the current linear allocation space between top and limit which is
4880     // unmarked with the free space map, but can contain junk.
4881     if (current_address == special_garbage_start &&
4882         special_garbage_end != special_garbage_start) {
4883       current_address = special_garbage_end - kPointerSize;
4884       current = reinterpret_cast<Object**>(current_address);
4885       continue;
4886     }
4887     if (!(*filter)(current)) continue;
4888     ASSERT(current_address < special_garbage_start ||
4889            current_address >= special_garbage_end);
4890     ASSERT(reinterpret_cast<uintptr_t>(o) != kFreeListZapValue);
4891     // We have to check that the pointer does not point into new space
4892     // without trying to cast it to a heap object since the hash field of
4893     // a string can contain values like 1 and 3 which are tagged null
4894     // pointers.
4895     if (!heap->InNewSpace(o)) continue;
4896     while (**store_buffer_position < current &&
4897            *store_buffer_position < store_buffer_top) {
4898       (*store_buffer_position)++;
4899     }
4900     if (**store_buffer_position != current ||
4901         *store_buffer_position == store_buffer_top) {
4902       Object** obj_start = current;
4903       while (!(*obj_start)->IsMap()) obj_start--;
4904       UNREACHABLE();
4905     }
4906   }
4907 }
4908
4909
4910 // Check that the store buffer contains all intergenerational pointers by
4911 // scanning a page and ensuring that all pointers to young space are in the
4912 // store buffer.
4913 void Heap::OldPointerSpaceCheckStoreBuffer() {
4914   OldSpace* space = old_pointer_space();
4915   PageIterator pages(space);
4916
4917   store_buffer()->SortUniq();
4918
4919   while (pages.has_next()) {
4920     Page* page = pages.next();
4921     Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
4922
4923     Address end = page->ObjectAreaEnd();
4924
4925     Object*** store_buffer_position = store_buffer()->Start();
4926     Object*** store_buffer_top = store_buffer()->Top();
4927
4928     Object** limit = reinterpret_cast<Object**>(end);
4929     CheckStoreBuffer(this,
4930                      current,
4931                      limit,
4932                      &store_buffer_position,
4933                      store_buffer_top,
4934                      &EverythingsAPointer,
4935                      space->top(),
4936                      space->limit());
4937   }
4938 }
4939
4940
4941 void Heap::MapSpaceCheckStoreBuffer() {
4942   MapSpace* space = map_space();
4943   PageIterator pages(space);
4944
4945   store_buffer()->SortUniq();
4946
4947   while (pages.has_next()) {
4948     Page* page = pages.next();
4949     Object** current = reinterpret_cast<Object**>(page->ObjectAreaStart());
4950
4951     Address end = page->ObjectAreaEnd();
4952
4953     Object*** store_buffer_position = store_buffer()->Start();
4954     Object*** store_buffer_top = store_buffer()->Top();
4955
4956     Object** limit = reinterpret_cast<Object**>(end);
4957     CheckStoreBuffer(this,
4958                      current,
4959                      limit,
4960                      &store_buffer_position,
4961                      store_buffer_top,
4962                      &IsAMapPointerAddress,
4963                      space->top(),
4964                      space->limit());
4965   }
4966 }
4967
4968
4969 void Heap::LargeObjectSpaceCheckStoreBuffer() {
4970   LargeObjectIterator it(lo_space());
4971   for (HeapObject* object = it.Next(); object != NULL; object = it.Next()) {
4972     // We only have code, sequential strings, or fixed arrays in large
4973     // object space, and only fixed arrays can possibly contain pointers to
4974     // the young generation.
4975     if (object->IsFixedArray()) {
4976       Object*** store_buffer_position = store_buffer()->Start();
4977       Object*** store_buffer_top = store_buffer()->Top();
4978       Object** current = reinterpret_cast<Object**>(object->address());
4979       Object** limit =
4980           reinterpret_cast<Object**>(object->address() + object->Size());
4981       CheckStoreBuffer(this,
4982                        current,
4983                        limit,
4984                        &store_buffer_position,
4985                        store_buffer_top,
4986                        &EverythingsAPointer,
4987                        NULL,
4988                        NULL);
4989     }
4990   }
4991 }
4992 #endif
4993
4994
4995 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
4996   IterateStrongRoots(v, mode);
4997   IterateWeakRoots(v, mode);
4998 }
4999
5000
5001 void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
5002   v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
5003   v->Synchronize("symbol_table");
5004   if (mode != VISIT_ALL_IN_SCAVENGE &&
5005       mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
5006     // Scavenge collections have special processing for this.
5007     external_string_table_.Iterate(v);
5008   }
5009   v->Synchronize("external_string_table");
5010 }
5011
5012
5013 void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
5014   v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]);
5015   v->Synchronize("strong_root_list");
5016
5017   v->VisitPointer(BitCast<Object**>(&hidden_symbol_));
5018   v->Synchronize("symbol");
5019
5020   isolate_->bootstrapper()->Iterate(v);
5021   v->Synchronize("bootstrapper");
5022   isolate_->Iterate(v);
5023   v->Synchronize("top");
5024   Relocatable::Iterate(v);
5025   v->Synchronize("relocatable");
5026
5027 #ifdef ENABLE_DEBUGGER_SUPPORT
5028   isolate_->debug()->Iterate(v);
5029   if (isolate_->deoptimizer_data() != NULL) {
5030     isolate_->deoptimizer_data()->Iterate(v);
5031   }
5032 #endif
5033   v->Synchronize("debug");
5034   isolate_->compilation_cache()->Iterate(v);
5035   v->Synchronize("compilationcache");
5036
5037   // Iterate over local handles in handle scopes.
5038   isolate_->handle_scope_implementer()->Iterate(v);
5039   v->Synchronize("handlescope");
5040
5041   // Iterate over the builtin code objects and code stubs in the
5042   // heap. Note that it is not necessary to iterate over code objects
5043   // on scavenge collections.
5044   if (mode != VISIT_ALL_IN_SCAVENGE) {
5045     isolate_->builtins()->IterateBuiltins(v);
5046   }
5047   v->Synchronize("builtins");
5048
5049   // Iterate over global handles.
5050   switch (mode) {
5051     case VISIT_ONLY_STRONG:
5052       isolate_->global_handles()->IterateStrongRoots(v);
5053       break;
5054     case VISIT_ALL_IN_SCAVENGE:
5055       isolate_->global_handles()->IterateNewSpaceStrongAndDependentRoots(v);
5056       break;
5057     case VISIT_ALL_IN_SWEEP_NEWSPACE:
5058     case VISIT_ALL:
5059       isolate_->global_handles()->IterateAllRoots(v);
5060       break;
5061   }
5062   v->Synchronize("globalhandles");
5063
5064   // Iterate over pointers being held by inactive threads.
5065   isolate_->thread_manager()->Iterate(v);
5066   v->Synchronize("threadmanager");
5067
5068   // Iterate over the pointers the Serialization/Deserialization code is
5069   // holding.
5070   // During garbage collection this keeps the partial snapshot cache alive.
5071   // During deserialization of the startup snapshot this creates the partial
5072   // snapshot cache and deserializes the objects it refers to.  During
5073   // serialization this does nothing, since the partial snapshot cache is
5074   // empty.  However the next thing we do is create the partial snapshot,
5075   // filling up the partial snapshot cache with objects it needs as we go.
5076   SerializerDeserializer::Iterate(v);
5077   // We don't do a v->Synchronize call here, because in debug mode that will
5078   // output a flag to the snapshot.  However at this point the serializer and
5079   // deserializer are deliberately a little unsynchronized (see above) so the
5080   // checking of the sync flag in the snapshot would fail.
5081 }
5082
5083
5084 // TODO(1236194): Since the heap size is configurable on the command line
5085 // and through the API, we should gracefully handle the case that the heap
5086 // size is not big enough to fit all the initial objects.
5087 bool Heap::ConfigureHeap(int max_semispace_size,
5088                          intptr_t max_old_gen_size,
5089                          intptr_t max_executable_size) {
5090   if (HasBeenSetup()) return false;
5091
5092   if (max_semispace_size > 0) {
5093     if (max_semispace_size < Page::kPageSize) {
5094       max_semispace_size = Page::kPageSize;
5095       if (FLAG_trace_gc) {
5096         PrintF("Max semispace size cannot be less than %dkbytes\n",
5097                Page::kPageSize >> 10);
5098       }
5099     }
5100     max_semispace_size_ = max_semispace_size;
5101   }
5102
5103   if (Snapshot::IsEnabled()) {
5104     // If we are using a snapshot we always reserve the default amount
5105     // of memory for each semispace because code in the snapshot has
5106     // write-barrier code that relies on the size and alignment of new
5107     // space.  We therefore cannot use a larger max semispace size
5108     // than the default reserved semispace size.
5109     if (max_semispace_size_ > reserved_semispace_size_) {
5110       max_semispace_size_ = reserved_semispace_size_;
5111       if (FLAG_trace_gc) {
5112         PrintF("Max semispace size cannot be more than %dkbytes\n",
5113                reserved_semispace_size_ >> 10);
5114       }
5115     }
5116   } else {
5117     // If we are not using snapshots we reserve space for the actual
5118     // max semispace size.
5119     reserved_semispace_size_ = max_semispace_size_;
5120   }
5121
5122   if (max_old_gen_size > 0) max_old_generation_size_ = max_old_gen_size;
5123   if (max_executable_size > 0) {
5124     max_executable_size_ = RoundUp(max_executable_size, Page::kPageSize);
5125   }
5126
5127   // The max executable size must be less than or equal to the max old
5128   // generation size.
5129   if (max_executable_size_ > max_old_generation_size_) {
5130     max_executable_size_ = max_old_generation_size_;
5131   }
5132
5133   // The new space size must be a power of two to support single-bit testing
5134   // for containment.
5135   max_semispace_size_ = RoundUpToPowerOf2(max_semispace_size_);
5136   reserved_semispace_size_ = RoundUpToPowerOf2(reserved_semispace_size_);
5137   initial_semispace_size_ = Min(initial_semispace_size_, max_semispace_size_);
5138   external_allocation_limit_ = 10 * max_semispace_size_;
5139
5140   // The old generation is paged and needs at least one page for each space.
5141   int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1;
5142   max_old_generation_size_ = Max(static_cast<intptr_t>(paged_space_count *
5143                                                        Page::kPageSize),
5144                                  RoundUp(max_old_generation_size_,
5145                                          Page::kPageSize));
5146
5147   configured_ = true;
5148   return true;
5149 }
5150
5151
5152 bool Heap::ConfigureHeapDefault() {
5153   return ConfigureHeap(static_cast<intptr_t>(FLAG_max_new_space_size / 2) * KB,
5154                        static_cast<intptr_t>(FLAG_max_old_space_size) * MB,
5155                        static_cast<intptr_t>(FLAG_max_executable_size) * MB);
5156 }
5157
5158
5159 void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
5160   *stats->start_marker = HeapStats::kStartMarker;
5161   *stats->end_marker = HeapStats::kEndMarker;
5162   *stats->new_space_size = new_space_.SizeAsInt();
5163   *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
5164   *stats->old_pointer_space_size = old_pointer_space_->Size();
5165   *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
5166   *stats->old_data_space_size = old_data_space_->Size();
5167   *stats->old_data_space_capacity = old_data_space_->Capacity();
5168   *stats->code_space_size = code_space_->Size();
5169   *stats->code_space_capacity = code_space_->Capacity();
5170   *stats->map_space_size = map_space_->Size();
5171   *stats->map_space_capacity = map_space_->Capacity();
5172   *stats->cell_space_size = cell_space_->Size();
5173   *stats->cell_space_capacity = cell_space_->Capacity();
5174   *stats->lo_space_size = lo_space_->Size();
5175   isolate_->global_handles()->RecordStats(stats);
5176   *stats->memory_allocator_size = isolate()->memory_allocator()->Size();
5177   *stats->memory_allocator_capacity =
5178       isolate()->memory_allocator()->Size() +
5179       isolate()->memory_allocator()->Available();
5180   *stats->os_error = OS::GetLastError();
5181       isolate()->memory_allocator()->Available();
5182   if (take_snapshot) {
5183     HeapIterator iterator;
5184     for (HeapObject* obj = iterator.next();
5185          obj != NULL;
5186          obj = iterator.next()) {
5187       InstanceType type = obj->map()->instance_type();
5188       ASSERT(0 <= type && type <= LAST_TYPE);
5189       stats->objects_per_type[type]++;
5190       stats->size_per_type[type] += obj->Size();
5191     }
5192   }
5193 }
5194
5195
5196 intptr_t Heap::PromotedSpaceSize() {
5197   return old_pointer_space_->Size()
5198       + old_data_space_->Size()
5199       + code_space_->Size()
5200       + map_space_->Size()
5201       + cell_space_->Size()
5202       + lo_space_->Size();
5203 }
5204
5205
5206 int Heap::PromotedExternalMemorySize() {
5207   if (amount_of_external_allocated_memory_
5208       <= amount_of_external_allocated_memory_at_last_global_gc_) return 0;
5209   return amount_of_external_allocated_memory_
5210       - amount_of_external_allocated_memory_at_last_global_gc_;
5211 }
5212
5213 #ifdef DEBUG
5214
5215 // Tags 0, 1, and 3 are used. Use 2 for marking visited HeapObject.
5216 static const int kMarkTag = 2;
5217
5218
5219 class HeapDebugUtils {
5220  public:
5221   explicit HeapDebugUtils(Heap* heap)
5222     : search_for_any_global_(false),
5223       search_target_(NULL),
5224       found_target_(false),
5225       object_stack_(20),
5226       heap_(heap) {
5227   }
5228
5229   class MarkObjectVisitor : public ObjectVisitor {
5230    public:
5231     explicit MarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5232
5233     void VisitPointers(Object** start, Object** end) {
5234       // Copy all HeapObject pointers in [start, end)
5235       for (Object** p = start; p < end; p++) {
5236         if ((*p)->IsHeapObject())
5237           utils_->MarkObjectRecursively(p);
5238       }
5239     }
5240
5241     HeapDebugUtils* utils_;
5242   };
5243
5244   void MarkObjectRecursively(Object** p) {
5245     if (!(*p)->IsHeapObject()) return;
5246
5247     HeapObject* obj = HeapObject::cast(*p);
5248
5249     Object* map = obj->map();
5250
5251     if (!map->IsHeapObject()) return;  // visited before
5252
5253     if (found_target_) return;  // stop if target found
5254     object_stack_.Add(obj);
5255     if ((search_for_any_global_ && obj->IsJSGlobalObject()) ||
5256         (!search_for_any_global_ && (obj == search_target_))) {
5257       found_target_ = true;
5258       return;
5259     }
5260
5261     // not visited yet
5262     Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
5263
5264     Address map_addr = map_p->address();
5265
5266     obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
5267
5268     MarkObjectRecursively(&map);
5269
5270     MarkObjectVisitor mark_visitor(this);
5271
5272     obj->IterateBody(map_p->instance_type(), obj->SizeFromMap(map_p),
5273                      &mark_visitor);
5274
5275     if (!found_target_)  // don't pop if found the target
5276       object_stack_.RemoveLast();
5277   }
5278
5279
5280   class UnmarkObjectVisitor : public ObjectVisitor {
5281    public:
5282     explicit UnmarkObjectVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5283
5284     void VisitPointers(Object** start, Object** end) {
5285       // Copy all HeapObject pointers in [start, end)
5286       for (Object** p = start; p < end; p++) {
5287         if ((*p)->IsHeapObject())
5288           utils_->UnmarkObjectRecursively(p);
5289       }
5290     }
5291
5292     HeapDebugUtils* utils_;
5293   };
5294
5295
5296   void UnmarkObjectRecursively(Object** p) {
5297     if (!(*p)->IsHeapObject()) return;
5298
5299     HeapObject* obj = HeapObject::cast(*p);
5300
5301     Object* map = obj->map();
5302
5303     if (map->IsHeapObject()) return;  // unmarked already
5304
5305     Address map_addr = reinterpret_cast<Address>(map);
5306
5307     map_addr -= kMarkTag;
5308
5309     ASSERT_TAG_ALIGNED(map_addr);
5310
5311     HeapObject* map_p = HeapObject::FromAddress(map_addr);
5312
5313     obj->set_map(reinterpret_cast<Map*>(map_p));
5314
5315     UnmarkObjectRecursively(reinterpret_cast<Object**>(&map_p));
5316
5317     UnmarkObjectVisitor unmark_visitor(this);
5318
5319     obj->IterateBody(Map::cast(map_p)->instance_type(),
5320                      obj->SizeFromMap(Map::cast(map_p)),
5321                      &unmark_visitor);
5322   }
5323
5324
5325   void MarkRootObjectRecursively(Object** root) {
5326     if (search_for_any_global_) {
5327       ASSERT(search_target_ == NULL);
5328     } else {
5329       ASSERT(search_target_->IsHeapObject());
5330     }
5331     found_target_ = false;
5332     object_stack_.Clear();
5333
5334     MarkObjectRecursively(root);
5335     UnmarkObjectRecursively(root);
5336
5337     if (found_target_) {
5338       PrintF("=====================================\n");
5339       PrintF("====        Path to object       ====\n");
5340       PrintF("=====================================\n\n");
5341
5342       ASSERT(!object_stack_.is_empty());
5343       for (int i = 0; i < object_stack_.length(); i++) {
5344         if (i > 0) PrintF("\n     |\n     |\n     V\n\n");
5345         Object* obj = object_stack_[i];
5346         obj->Print();
5347       }
5348       PrintF("=====================================\n");
5349     }
5350   }
5351
5352   // Helper class for visiting HeapObjects recursively.
5353   class MarkRootVisitor: public ObjectVisitor {
5354    public:
5355     explicit MarkRootVisitor(HeapDebugUtils* utils) : utils_(utils) { }
5356
5357     void VisitPointers(Object** start, Object** end) {
5358       // Visit all HeapObject pointers in [start, end)
5359       for (Object** p = start; p < end; p++) {
5360         if ((*p)->IsHeapObject())
5361           utils_->MarkRootObjectRecursively(p);
5362       }
5363     }
5364
5365     HeapDebugUtils* utils_;
5366   };
5367
5368   bool search_for_any_global_;
5369   Object* search_target_;
5370   bool found_target_;
5371   List<Object*> object_stack_;
5372   Heap* heap_;
5373
5374   friend class Heap;
5375 };
5376
5377 #endif
5378
5379 bool Heap::Setup(bool create_heap_objects) {
5380 #ifdef DEBUG
5381   allocation_timeout_ = FLAG_gc_interval;
5382   debug_utils_ = new HeapDebugUtils(this);
5383 #endif
5384
5385   // Initialize heap spaces and initial maps and objects. Whenever something
5386   // goes wrong, just return false. The caller should check the results and
5387   // call Heap::TearDown() to release allocated memory.
5388   //
5389   // If the heap is not yet configured (eg, through the API), configure it.
5390   // Configuration is based on the flags new-space-size (really the semispace
5391   // size) and old-space-size if set or the initial values of semispace_size_
5392   // and old_generation_size_ otherwise.
5393   if (!configured_) {
5394     if (!ConfigureHeapDefault()) return false;
5395   }
5396
5397   gc_initializer_mutex->Lock();
5398   static bool initialized_gc = false;
5399   if (!initialized_gc) {
5400       initialized_gc = true;
5401       InitializeScavengingVisitorsTables();
5402       NewSpaceScavenger::Initialize();
5403       MarkCompactCollector::Initialize();
5404   }
5405   gc_initializer_mutex->Unlock();
5406
5407   MarkMapPointersAsEncoded(false);
5408
5409   // Setup memory allocator.
5410   if (!isolate_->memory_allocator()->Setup(MaxReserved(), MaxExecutableSize()))
5411       return false;
5412
5413   // Setup new space.
5414   if (!new_space_.Setup(reserved_semispace_size_, max_semispace_size_)) {
5415     return false;
5416   }
5417
5418   // Initialize old pointer space.
5419   old_pointer_space_ =
5420       new OldSpace(this,
5421                    max_old_generation_size_,
5422                    OLD_POINTER_SPACE,
5423                    NOT_EXECUTABLE);
5424   if (old_pointer_space_ == NULL) return false;
5425   if (!old_pointer_space_->Setup()) return false;
5426
5427   // Initialize old data space.
5428   old_data_space_ =
5429       new OldSpace(this,
5430                    max_old_generation_size_,
5431                    OLD_DATA_SPACE,
5432                    NOT_EXECUTABLE);
5433   if (old_data_space_ == NULL) return false;
5434   if (!old_data_space_->Setup()) return false;
5435
5436   // Initialize the code space, set its maximum capacity to the old
5437   // generation size. It needs executable memory.
5438   // On 64-bit platform(s), we put all code objects in a 2 GB range of
5439   // virtual address space, so that they can call each other with near calls.
5440   if (code_range_size_ > 0) {
5441     if (!isolate_->code_range()->Setup(code_range_size_)) {
5442       return false;
5443     }
5444   }
5445
5446   code_space_ =
5447       new OldSpace(this, max_old_generation_size_, CODE_SPACE, EXECUTABLE);
5448   if (code_space_ == NULL) return false;
5449   if (!code_space_->Setup()) return false;
5450
5451   // Initialize map space.
5452   map_space_ = new MapSpace(this,
5453                             max_old_generation_size_,
5454                             FLAG_max_map_space_pages,
5455                             MAP_SPACE);
5456   if (map_space_ == NULL) return false;
5457   if (!map_space_->Setup()) return false;
5458
5459   // Initialize global property cell space.
5460   cell_space_ = new CellSpace(this, max_old_generation_size_, CELL_SPACE);
5461   if (cell_space_ == NULL) return false;
5462   if (!cell_space_->Setup()) return false;
5463
5464   // The large object code space may contain code or data.  We set the memory
5465   // to be non-executable here for safety, but this means we need to enable it
5466   // explicitly when allocating large code objects.
5467   lo_space_ = new LargeObjectSpace(this, max_old_generation_size_, LO_SPACE);
5468   if (lo_space_ == NULL) return false;
5469   if (!lo_space_->Setup()) return false;
5470   if (create_heap_objects) {
5471     // Create initial maps.
5472     if (!CreateInitialMaps()) return false;
5473     if (!CreateApiObjects()) return false;
5474
5475     // Create initial objects
5476     if (!CreateInitialObjects()) return false;
5477
5478     global_contexts_list_ = undefined_value();
5479   }
5480
5481   LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity()));
5482   LOG(isolate_, IntPtrTEvent("heap-available", Available()));
5483
5484   store_buffer()->Setup();
5485
5486   return true;
5487 }
5488
5489
5490 void Heap::SetStackLimits() {
5491   ASSERT(isolate_ != NULL);
5492   ASSERT(isolate_ == isolate());
5493   // On 64 bit machines, pointers are generally out of range of Smis.  We write
5494   // something that looks like an out of range Smi to the GC.
5495
5496   // Set up the special root array entries containing the stack limits.
5497   // These are actually addresses, but the tag makes the GC ignore it.
5498   roots_[kStackLimitRootIndex] =
5499       reinterpret_cast<Object*>(
5500           (isolate_->stack_guard()->jslimit() & ~kSmiTagMask) | kSmiTag);
5501   roots_[kRealStackLimitRootIndex] =
5502       reinterpret_cast<Object*>(
5503           (isolate_->stack_guard()->real_jslimit() & ~kSmiTagMask) | kSmiTag);
5504 }
5505
5506
5507 void Heap::TearDown() {
5508   if (FLAG_print_cumulative_gc_stat) {
5509     PrintF("\n\n");
5510     PrintF("gc_count=%d ", gc_count_);
5511     PrintF("mark_sweep_count=%d ", ms_count_);
5512     PrintF("max_gc_pause=%d ", get_max_gc_pause());
5513     PrintF("min_in_mutator=%d ", get_min_in_mutator());
5514     PrintF("max_alive_after_gc=%" V8_PTR_PREFIX "d ",
5515            get_max_alive_after_gc());
5516     PrintF("\n\n");
5517   }
5518
5519   isolate_->global_handles()->TearDown();
5520
5521   external_string_table_.TearDown();
5522
5523   new_space_.TearDown();
5524
5525   if (old_pointer_space_ != NULL) {
5526     old_pointer_space_->TearDown();
5527     delete old_pointer_space_;
5528     old_pointer_space_ = NULL;
5529   }
5530
5531   if (old_data_space_ != NULL) {
5532     old_data_space_->TearDown();
5533     delete old_data_space_;
5534     old_data_space_ = NULL;
5535   }
5536
5537   if (code_space_ != NULL) {
5538     code_space_->TearDown();
5539     delete code_space_;
5540     code_space_ = NULL;
5541   }
5542
5543   if (map_space_ != NULL) {
5544     map_space_->TearDown();
5545     delete map_space_;
5546     map_space_ = NULL;
5547   }
5548
5549   if (cell_space_ != NULL) {
5550     cell_space_->TearDown();
5551     delete cell_space_;
5552     cell_space_ = NULL;
5553   }
5554
5555   if (lo_space_ != NULL) {
5556     lo_space_->TearDown();
5557     delete lo_space_;
5558     lo_space_ = NULL;
5559   }
5560
5561   store_buffer()->TearDown();
5562   incremental_marking()->TearDown();
5563
5564   isolate_->memory_allocator()->TearDown();
5565
5566 #ifdef DEBUG
5567   delete debug_utils_;
5568   debug_utils_ = NULL;
5569 #endif
5570 }
5571
5572
5573 void Heap::Shrink() {
5574   // Try to shrink all paged spaces.
5575   PagedSpaces spaces;
5576   for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
5577     space->ReleaseAllUnusedPages();
5578 }
5579
5580
5581 void Heap::AddGCPrologueCallback(GCPrologueCallback callback, GCType gc_type) {
5582   ASSERT(callback != NULL);
5583   GCPrologueCallbackPair pair(callback, gc_type);
5584   ASSERT(!gc_prologue_callbacks_.Contains(pair));
5585   return gc_prologue_callbacks_.Add(pair);
5586 }
5587
5588
5589 void Heap::RemoveGCPrologueCallback(GCPrologueCallback callback) {
5590   ASSERT(callback != NULL);
5591   for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) {
5592     if (gc_prologue_callbacks_[i].callback == callback) {
5593       gc_prologue_callbacks_.Remove(i);
5594       return;
5595     }
5596   }
5597   UNREACHABLE();
5598 }
5599
5600
5601 void Heap::AddGCEpilogueCallback(GCEpilogueCallback callback, GCType gc_type) {
5602   ASSERT(callback != NULL);
5603   GCEpilogueCallbackPair pair(callback, gc_type);
5604   ASSERT(!gc_epilogue_callbacks_.Contains(pair));
5605   return gc_epilogue_callbacks_.Add(pair);
5606 }
5607
5608
5609 void Heap::RemoveGCEpilogueCallback(GCEpilogueCallback callback) {
5610   ASSERT(callback != NULL);
5611   for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) {
5612     if (gc_epilogue_callbacks_[i].callback == callback) {
5613       gc_epilogue_callbacks_.Remove(i);
5614       return;
5615     }
5616   }
5617   UNREACHABLE();
5618 }
5619
5620
5621 #ifdef DEBUG
5622
5623 class PrintHandleVisitor: public ObjectVisitor {
5624  public:
5625   void VisitPointers(Object** start, Object** end) {
5626     for (Object** p = start; p < end; p++)
5627       PrintF("  handle %p to %p\n",
5628              reinterpret_cast<void*>(p),
5629              reinterpret_cast<void*>(*p));
5630   }
5631 };
5632
5633 void Heap::PrintHandles() {
5634   PrintF("Handles:\n");
5635   PrintHandleVisitor v;
5636   isolate_->handle_scope_implementer()->Iterate(&v);
5637 }
5638
5639 #endif
5640
5641
5642 Space* AllSpaces::next() {
5643   switch (counter_++) {
5644     case NEW_SPACE:
5645       return HEAP->new_space();
5646     case OLD_POINTER_SPACE:
5647       return HEAP->old_pointer_space();
5648     case OLD_DATA_SPACE:
5649       return HEAP->old_data_space();
5650     case CODE_SPACE:
5651       return HEAP->code_space();
5652     case MAP_SPACE:
5653       return HEAP->map_space();
5654     case CELL_SPACE:
5655       return HEAP->cell_space();
5656     case LO_SPACE:
5657       return HEAP->lo_space();
5658     default:
5659       return NULL;
5660   }
5661 }
5662
5663
5664 PagedSpace* PagedSpaces::next() {
5665   switch (counter_++) {
5666     case OLD_POINTER_SPACE:
5667       return HEAP->old_pointer_space();
5668     case OLD_DATA_SPACE:
5669       return HEAP->old_data_space();
5670     case CODE_SPACE:
5671       return HEAP->code_space();
5672     case MAP_SPACE:
5673       return HEAP->map_space();
5674     case CELL_SPACE:
5675       return HEAP->cell_space();
5676     default:
5677       return NULL;
5678   }
5679 }
5680
5681
5682
5683 OldSpace* OldSpaces::next() {
5684   switch (counter_++) {
5685     case OLD_POINTER_SPACE:
5686       return HEAP->old_pointer_space();
5687     case OLD_DATA_SPACE:
5688       return HEAP->old_data_space();
5689     case CODE_SPACE:
5690       return HEAP->code_space();
5691     default:
5692       return NULL;
5693   }
5694 }
5695
5696
5697 SpaceIterator::SpaceIterator()
5698     : current_space_(FIRST_SPACE),
5699       iterator_(NULL),
5700       size_func_(NULL) {
5701 }
5702
5703
5704 SpaceIterator::SpaceIterator(HeapObjectCallback size_func)
5705     : current_space_(FIRST_SPACE),
5706       iterator_(NULL),
5707       size_func_(size_func) {
5708 }
5709
5710
5711 SpaceIterator::~SpaceIterator() {
5712   // Delete active iterator if any.
5713   delete iterator_;
5714 }
5715
5716
5717 bool SpaceIterator::has_next() {
5718   // Iterate until no more spaces.
5719   return current_space_ != LAST_SPACE;
5720 }
5721
5722
5723 ObjectIterator* SpaceIterator::next() {
5724   if (iterator_ != NULL) {
5725     delete iterator_;
5726     iterator_ = NULL;
5727     // Move to the next space
5728     current_space_++;
5729     if (current_space_ > LAST_SPACE) {
5730       return NULL;
5731     }
5732   }
5733
5734   // Return iterator for the new current space.
5735   return CreateIterator();
5736 }
5737
5738
5739 // Create an iterator for the space to iterate.
5740 ObjectIterator* SpaceIterator::CreateIterator() {
5741   ASSERT(iterator_ == NULL);
5742
5743   switch (current_space_) {
5744     case NEW_SPACE:
5745       iterator_ = new SemiSpaceIterator(HEAP->new_space(), size_func_);
5746       break;
5747     case OLD_POINTER_SPACE:
5748       iterator_ = new HeapObjectIterator(HEAP->old_pointer_space(), size_func_);
5749       break;
5750     case OLD_DATA_SPACE:
5751       iterator_ = new HeapObjectIterator(HEAP->old_data_space(), size_func_);
5752       break;
5753     case CODE_SPACE:
5754       iterator_ = new HeapObjectIterator(HEAP->code_space(), size_func_);
5755       break;
5756     case MAP_SPACE:
5757       iterator_ = new HeapObjectIterator(HEAP->map_space(), size_func_);
5758       break;
5759     case CELL_SPACE:
5760       iterator_ = new HeapObjectIterator(HEAP->cell_space(), size_func_);
5761       break;
5762     case LO_SPACE:
5763       iterator_ = new LargeObjectIterator(HEAP->lo_space(), size_func_);
5764       break;
5765   }
5766
5767   // Return the newly allocated iterator;
5768   ASSERT(iterator_ != NULL);
5769   return iterator_;
5770 }
5771
5772
5773 class HeapObjectsFilter {
5774  public:
5775   virtual ~HeapObjectsFilter() {}
5776   virtual bool SkipObject(HeapObject* object) = 0;
5777 };
5778
5779
5780 class UnreachableObjectsFilter : public HeapObjectsFilter {
5781  public:
5782   UnreachableObjectsFilter() {
5783     MarkReachableObjects();
5784   }
5785
5786   ~UnreachableObjectsFilter() {
5787     Isolate::Current()->heap()->mark_compact_collector()->ClearMarkbits();
5788   }
5789
5790   bool SkipObject(HeapObject* object) {
5791     MarkBit mark_bit = Marking::MarkBitFrom(object);
5792     return !mark_bit.Get();
5793   }
5794
5795  private:
5796   class MarkingVisitor : public ObjectVisitor {
5797    public:
5798     MarkingVisitor() : marking_stack_(10) {}
5799
5800     void VisitPointers(Object** start, Object** end) {
5801       for (Object** p = start; p < end; p++) {
5802         if (!(*p)->IsHeapObject()) continue;
5803         HeapObject* obj = HeapObject::cast(*p);
5804         MarkBit mark_bit = Marking::MarkBitFrom(obj);
5805         if (!mark_bit.Get()) {
5806           mark_bit.Set();
5807           marking_stack_.Add(obj);
5808         }
5809       }
5810     }
5811
5812     void TransitiveClosure() {
5813       while (!marking_stack_.is_empty()) {
5814         HeapObject* obj = marking_stack_.RemoveLast();
5815         obj->Iterate(this);
5816       }
5817     }
5818
5819    private:
5820     List<HeapObject*> marking_stack_;
5821   };
5822
5823   void MarkReachableObjects() {
5824     Heap* heap = Isolate::Current()->heap();
5825     MarkingVisitor visitor;
5826     heap->IterateRoots(&visitor, VISIT_ALL);
5827     visitor.TransitiveClosure();
5828   }
5829
5830   AssertNoAllocation no_alloc;
5831 };
5832
5833
5834 HeapIterator::HeapIterator()
5835     : filtering_(HeapIterator::kNoFiltering),
5836       filter_(NULL) {
5837   Init();
5838 }
5839
5840
5841 HeapIterator::HeapIterator(HeapIterator::HeapObjectsFiltering filtering)
5842     : filtering_(filtering),
5843       filter_(NULL) {
5844   Init();
5845 }
5846
5847
5848 HeapIterator::~HeapIterator() {
5849   Shutdown();
5850 }
5851
5852
5853 void HeapIterator::Init() {
5854   // Start the iteration.
5855   space_iterator_ = new SpaceIterator;
5856   switch (filtering_) {
5857     case kFilterUnreachable:
5858       filter_ = new UnreachableObjectsFilter;
5859       break;
5860     default:
5861       break;
5862   }
5863   object_iterator_ = space_iterator_->next();
5864 }
5865
5866
5867 void HeapIterator::Shutdown() {
5868 #ifdef DEBUG
5869   // Assert that in filtering mode we have iterated through all
5870   // objects. Otherwise, heap will be left in an inconsistent state.
5871   if (filtering_ != kNoFiltering) {
5872     ASSERT(object_iterator_ == NULL);
5873   }
5874 #endif
5875   // Make sure the last iterator is deallocated.
5876   delete space_iterator_;
5877   space_iterator_ = NULL;
5878   object_iterator_ = NULL;
5879   delete filter_;
5880   filter_ = NULL;
5881 }
5882
5883
5884 HeapObject* HeapIterator::next() {
5885   if (filter_ == NULL) return NextObject();
5886
5887   HeapObject* obj = NextObject();
5888   while (obj != NULL && filter_->SkipObject(obj)) obj = NextObject();
5889   return obj;
5890 }
5891
5892
5893 HeapObject* HeapIterator::NextObject() {
5894   // No iterator means we are done.
5895   if (object_iterator_ == NULL) return NULL;
5896
5897   if (HeapObject* obj = object_iterator_->next_object()) {
5898     // If the current iterator has more objects we are fine.
5899     return obj;
5900   } else {
5901     // Go though the spaces looking for one that has objects.
5902     while (space_iterator_->has_next()) {
5903       object_iterator_ = space_iterator_->next();
5904       if (HeapObject* obj = object_iterator_->next_object()) {
5905         return obj;
5906       }
5907     }
5908   }
5909   // Done with the last space.
5910   object_iterator_ = NULL;
5911   return NULL;
5912 }
5913
5914
5915 void HeapIterator::reset() {
5916   // Restart the iterator.
5917   Shutdown();
5918   Init();
5919 }
5920
5921
5922 #if defined(DEBUG) || defined(LIVE_OBJECT_LIST)
5923
5924 Object* const PathTracer::kAnyGlobalObject = reinterpret_cast<Object*>(NULL);
5925
5926 class PathTracer::MarkVisitor: public ObjectVisitor {
5927  public:
5928   explicit MarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
5929   void VisitPointers(Object** start, Object** end) {
5930     // Scan all HeapObject pointers in [start, end)
5931     for (Object** p = start; !tracer_->found() && (p < end); p++) {
5932       if ((*p)->IsHeapObject())
5933         tracer_->MarkRecursively(p, this);
5934     }
5935   }
5936
5937  private:
5938   PathTracer* tracer_;
5939 };
5940
5941
5942 class PathTracer::UnmarkVisitor: public ObjectVisitor {
5943  public:
5944   explicit UnmarkVisitor(PathTracer* tracer) : tracer_(tracer) {}
5945   void VisitPointers(Object** start, Object** end) {
5946     // Scan all HeapObject pointers in [start, end)
5947     for (Object** p = start; p < end; p++) {
5948       if ((*p)->IsHeapObject())
5949         tracer_->UnmarkRecursively(p, this);
5950     }
5951   }
5952
5953  private:
5954   PathTracer* tracer_;
5955 };
5956
5957
5958 void PathTracer::VisitPointers(Object** start, Object** end) {
5959   bool done = ((what_to_find_ == FIND_FIRST) && found_target_);
5960   // Visit all HeapObject pointers in [start, end)
5961   for (Object** p = start; !done && (p < end); p++) {
5962     if ((*p)->IsHeapObject()) {
5963       TracePathFrom(p);
5964       done = ((what_to_find_ == FIND_FIRST) && found_target_);
5965     }
5966   }
5967 }
5968
5969
5970 void PathTracer::Reset() {
5971   found_target_ = false;
5972   object_stack_.Clear();
5973 }
5974
5975
5976 void PathTracer::TracePathFrom(Object** root) {
5977   ASSERT((search_target_ == kAnyGlobalObject) ||
5978          search_target_->IsHeapObject());
5979   found_target_in_trace_ = false;
5980   object_stack_.Clear();
5981
5982   MarkVisitor mark_visitor(this);
5983   MarkRecursively(root, &mark_visitor);
5984
5985   UnmarkVisitor unmark_visitor(this);
5986   UnmarkRecursively(root, &unmark_visitor);
5987
5988   ProcessResults();
5989 }
5990
5991
5992 static bool SafeIsGlobalContext(HeapObject* obj) {
5993   return obj->map() == obj->GetHeap()->raw_unchecked_global_context_map();
5994 }
5995
5996
5997 void PathTracer::MarkRecursively(Object** p, MarkVisitor* mark_visitor) {
5998   if (!(*p)->IsHeapObject()) return;
5999
6000   HeapObject* obj = HeapObject::cast(*p);
6001
6002   Object* map = obj->map();
6003
6004   if (!map->IsHeapObject()) return;  // visited before
6005
6006   if (found_target_in_trace_) return;  // stop if target found
6007   object_stack_.Add(obj);
6008   if (((search_target_ == kAnyGlobalObject) && obj->IsJSGlobalObject()) ||
6009       (obj == search_target_)) {
6010     found_target_in_trace_ = true;
6011     found_target_ = true;
6012     return;
6013   }
6014
6015   bool is_global_context = SafeIsGlobalContext(obj);
6016
6017   // not visited yet
6018   Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
6019
6020   Address map_addr = map_p->address();
6021
6022   obj->set_map(reinterpret_cast<Map*>(map_addr + kMarkTag));
6023
6024   // Scan the object body.
6025   if (is_global_context && (visit_mode_ == VISIT_ONLY_STRONG)) {
6026     // This is specialized to scan Context's properly.
6027     Object** start = reinterpret_cast<Object**>(obj->address() +
6028                                                 Context::kHeaderSize);
6029     Object** end = reinterpret_cast<Object**>(obj->address() +
6030         Context::kHeaderSize + Context::FIRST_WEAK_SLOT * kPointerSize);
6031     mark_visitor->VisitPointers(start, end);
6032   } else {
6033     obj->IterateBody(map_p->instance_type(),
6034                      obj->SizeFromMap(map_p),
6035                      mark_visitor);
6036   }
6037
6038   // Scan the map after the body because the body is a lot more interesting
6039   // when doing leak detection.
6040   MarkRecursively(&map, mark_visitor);
6041
6042   if (!found_target_in_trace_)  // don't pop if found the target
6043     object_stack_.RemoveLast();
6044 }
6045
6046
6047 void PathTracer::UnmarkRecursively(Object** p, UnmarkVisitor* unmark_visitor) {
6048   if (!(*p)->IsHeapObject()) return;
6049
6050   HeapObject* obj = HeapObject::cast(*p);
6051
6052   Object* map = obj->map();
6053
6054   if (map->IsHeapObject()) return;  // unmarked already
6055
6056   Address map_addr = reinterpret_cast<Address>(map);
6057
6058   map_addr -= kMarkTag;
6059
6060   ASSERT_TAG_ALIGNED(map_addr);
6061
6062   HeapObject* map_p = HeapObject::FromAddress(map_addr);
6063
6064   obj->set_map(reinterpret_cast<Map*>(map_p));
6065
6066   UnmarkRecursively(reinterpret_cast<Object**>(&map_p), unmark_visitor);
6067
6068   obj->IterateBody(Map::cast(map_p)->instance_type(),
6069                    obj->SizeFromMap(Map::cast(map_p)),
6070                    unmark_visitor);
6071 }
6072
6073
6074 void PathTracer::ProcessResults() {
6075   if (found_target_) {
6076     PrintF("=====================================\n");
6077     PrintF("====        Path to object       ====\n");
6078     PrintF("=====================================\n\n");
6079
6080     ASSERT(!object_stack_.is_empty());
6081     for (int i = 0; i < object_stack_.length(); i++) {
6082       if (i > 0) PrintF("\n     |\n     |\n     V\n\n");
6083       Object* obj = object_stack_[i];
6084 #ifdef OBJECT_PRINT
6085       obj->Print();
6086 #else
6087       obj->ShortPrint();
6088 #endif
6089     }
6090     PrintF("=====================================\n");
6091   }
6092 }
6093 #endif  // DEBUG || LIVE_OBJECT_LIST
6094
6095
6096 #ifdef DEBUG
6097 // Triggers a depth-first traversal of reachable objects from roots
6098 // and finds a path to a specific heap object and prints it.
6099 void Heap::TracePathToObject(Object* target) {
6100   PathTracer tracer(target, PathTracer::FIND_ALL, VISIT_ALL);
6101   IterateRoots(&tracer, VISIT_ONLY_STRONG);
6102 }
6103
6104
6105 // Triggers a depth-first traversal of reachable objects from roots
6106 // and finds a path to any global object and prints it. Useful for
6107 // determining the source for leaks of global objects.
6108 void Heap::TracePathToGlobal() {
6109   PathTracer tracer(PathTracer::kAnyGlobalObject,
6110                     PathTracer::FIND_ALL,
6111                     VISIT_ALL);
6112   IterateRoots(&tracer, VISIT_ONLY_STRONG);
6113 }
6114 #endif
6115
6116
6117 static intptr_t CountTotalHolesSize() {
6118   intptr_t holes_size = 0;
6119   OldSpaces spaces;
6120   for (OldSpace* space = spaces.next();
6121        space != NULL;
6122        space = spaces.next()) {
6123     holes_size += space->Waste() + space->Available();
6124   }
6125   return holes_size;
6126 }
6127
6128
6129 GCTracer::GCTracer(Heap* heap)
6130     : start_time_(0.0),
6131       start_size_(0),
6132       gc_count_(0),
6133       full_gc_count_(0),
6134       allocated_since_last_gc_(0),
6135       spent_in_mutator_(0),
6136       promoted_objects_size_(0),
6137       heap_(heap) {
6138   if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
6139   start_time_ = OS::TimeCurrentMillis();
6140   start_size_ = heap_->SizeOfObjects();
6141
6142   for (int i = 0; i < Scope::kNumberOfScopes; i++) {
6143     scopes_[i] = 0;
6144   }
6145
6146   in_free_list_or_wasted_before_gc_ = CountTotalHolesSize();
6147
6148   allocated_since_last_gc_ =
6149       heap_->SizeOfObjects() - heap_->alive_after_last_gc_;
6150
6151   if (heap_->last_gc_end_timestamp_ > 0) {
6152     spent_in_mutator_ = Max(start_time_ - heap_->last_gc_end_timestamp_, 0.0);
6153   }
6154
6155   steps_count_ = heap_->incremental_marking()->steps_count();
6156   steps_took_ = heap_->incremental_marking()->steps_took();
6157   longest_step_ = heap_->incremental_marking()->longest_step();
6158   steps_count_since_last_gc_ =
6159       heap_->incremental_marking()->steps_count_since_last_gc();
6160   steps_took_since_last_gc_ =
6161       heap_->incremental_marking()->steps_took_since_last_gc();
6162 }
6163
6164
6165 GCTracer::~GCTracer() {
6166   // Printf ONE line iff flag is set.
6167   if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
6168
6169   bool first_gc = (heap_->last_gc_end_timestamp_ == 0);
6170
6171   heap_->alive_after_last_gc_ = heap_->SizeOfObjects();
6172   heap_->last_gc_end_timestamp_ = OS::TimeCurrentMillis();
6173
6174   int time = static_cast<int>(heap_->last_gc_end_timestamp_ - start_time_);
6175
6176   // Update cumulative GC statistics if required.
6177   if (FLAG_print_cumulative_gc_stat) {
6178     heap_->max_gc_pause_ = Max(heap_->max_gc_pause_, time);
6179     heap_->max_alive_after_gc_ = Max(heap_->max_alive_after_gc_,
6180                                      heap_->alive_after_last_gc_);
6181     if (!first_gc) {
6182       heap_->min_in_mutator_ = Min(heap_->min_in_mutator_,
6183                                    static_cast<int>(spent_in_mutator_));
6184     }
6185   }
6186
6187   if (!FLAG_trace_gc_nvp) {
6188     int external_time = static_cast<int>(scopes_[Scope::EXTERNAL]);
6189
6190     PrintF("%s %.1f -> %.1f MB, ",
6191            CollectorString(),
6192            static_cast<double>(start_size_) / MB,
6193            SizeOfHeapObjects());
6194
6195     if (external_time > 0) PrintF("%d / ", external_time);
6196     PrintF("%d ms", time);
6197     if (steps_count_ > 0) {
6198       if (collector_ == SCAVENGER) {
6199         PrintF(" (+ %d ms in %d steps since last GC)",
6200                static_cast<int>(steps_took_since_last_gc_),
6201                steps_count_since_last_gc_);
6202       } else {
6203         PrintF(" (+ %d ms in %d steps since start of marking, "
6204                    "biggest step %f ms)",
6205                static_cast<int>(steps_took_),
6206                steps_count_,
6207                longest_step_);
6208       }
6209     }
6210     PrintF(".\n");
6211   } else {
6212     PrintF("pause=%d ", time);
6213     PrintF("mutator=%d ",
6214            static_cast<int>(spent_in_mutator_));
6215
6216     PrintF("gc=");
6217     switch (collector_) {
6218       case SCAVENGER:
6219         PrintF("s");
6220         break;
6221       case MARK_COMPACTOR:
6222         PrintF("ms");
6223         break;
6224       default:
6225         UNREACHABLE();
6226     }
6227     PrintF(" ");
6228
6229     PrintF("external=%d ", static_cast<int>(scopes_[Scope::EXTERNAL]));
6230     PrintF("mark=%d ", static_cast<int>(scopes_[Scope::MC_MARK]));
6231     PrintF("sweep=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP]));
6232     PrintF("sweepns=%d ", static_cast<int>(scopes_[Scope::MC_SWEEP_NEWSPACE]));
6233     PrintF("compact=%d ", static_cast<int>(scopes_[Scope::MC_COMPACT]));
6234
6235     PrintF("total_size_before=%" V8_PTR_PREFIX "d ", start_size_);
6236     PrintF("total_size_after=%" V8_PTR_PREFIX "d ", heap_->SizeOfObjects());
6237     PrintF("holes_size_before=%" V8_PTR_PREFIX "d ",
6238            in_free_list_or_wasted_before_gc_);
6239     PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", CountTotalHolesSize());
6240
6241     PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc_);
6242     PrintF("promoted=%" V8_PTR_PREFIX "d ", promoted_objects_size_);
6243
6244     if (collector_ == SCAVENGER) {
6245       PrintF("stepscount=%d ", steps_count_since_last_gc_);
6246       PrintF("stepstook=%d ", static_cast<int>(steps_took_since_last_gc_));
6247     } else {
6248       PrintF("stepscount=%d ", steps_count_);
6249       PrintF("stepstook=%d ", static_cast<int>(steps_took_));
6250     }
6251
6252     PrintF("\n");
6253   }
6254
6255   heap_->PrintShortHeapStatistics();
6256 }
6257
6258
6259 const char* GCTracer::CollectorString() {
6260   switch (collector_) {
6261     case SCAVENGER:
6262       return "Scavenge";
6263     case MARK_COMPACTOR:
6264       return "Mark-sweep";
6265   }
6266   return "Unknown GC";
6267 }
6268
6269
6270 int KeyedLookupCache::Hash(Map* map, String* name) {
6271   // Uses only lower 32 bits if pointers are larger.
6272   uintptr_t addr_hash =
6273       static_cast<uint32_t>(reinterpret_cast<uintptr_t>(map)) >> kMapHashShift;
6274   return static_cast<uint32_t>((addr_hash ^ name->Hash()) & kCapacityMask);
6275 }
6276
6277
6278 int KeyedLookupCache::Lookup(Map* map, String* name) {
6279   int index = Hash(map, name);
6280   Key& key = keys_[index];
6281   if ((key.map == map) && key.name->Equals(name)) {
6282     return field_offsets_[index];
6283   }
6284   return kNotFound;
6285 }
6286
6287
6288 void KeyedLookupCache::Update(Map* map, String* name, int field_offset) {
6289   String* symbol;
6290   if (HEAP->LookupSymbolIfExists(name, &symbol)) {
6291     int index = Hash(map, symbol);
6292     Key& key = keys_[index];
6293     key.map = map;
6294     key.name = symbol;
6295     field_offsets_[index] = field_offset;
6296   }
6297 }
6298
6299
6300 void KeyedLookupCache::Clear() {
6301   for (int index = 0; index < kLength; index++) keys_[index].map = NULL;
6302 }
6303
6304
6305 void DescriptorLookupCache::Clear() {
6306   for (int index = 0; index < kLength; index++) keys_[index].array = NULL;
6307 }
6308
6309
6310 #ifdef DEBUG
6311 void Heap::GarbageCollectionGreedyCheck() {
6312   ASSERT(FLAG_gc_greedy);
6313   if (isolate_->bootstrapper()->IsActive()) return;
6314   if (disallow_allocation_failure()) return;
6315   CollectGarbage(NEW_SPACE);
6316 }
6317 #endif
6318
6319
6320 TranscendentalCache::SubCache::SubCache(Type t)
6321   : type_(t),
6322     isolate_(Isolate::Current()) {
6323   uint32_t in0 = 0xffffffffu;  // Bit-pattern for a NaN that isn't
6324   uint32_t in1 = 0xffffffffu;  // generated by the FPU.
6325   for (int i = 0; i < kCacheSize; i++) {
6326     elements_[i].in[0] = in0;
6327     elements_[i].in[1] = in1;
6328     elements_[i].output = NULL;
6329   }
6330 }
6331
6332
6333 void TranscendentalCache::Clear() {
6334   for (int i = 0; i < kNumberOfCaches; i++) {
6335     if (caches_[i] != NULL) {
6336       delete caches_[i];
6337       caches_[i] = NULL;
6338     }
6339   }
6340 }
6341
6342
6343 void ExternalStringTable::CleanUp() {
6344   int last = 0;
6345   for (int i = 0; i < new_space_strings_.length(); ++i) {
6346     if (new_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
6347     if (heap_->InNewSpace(new_space_strings_[i])) {
6348       new_space_strings_[last++] = new_space_strings_[i];
6349     } else {
6350       old_space_strings_.Add(new_space_strings_[i]);
6351     }
6352   }
6353   new_space_strings_.Rewind(last);
6354   last = 0;
6355   for (int i = 0; i < old_space_strings_.length(); ++i) {
6356     if (old_space_strings_[i] == heap_->raw_unchecked_null_value()) continue;
6357     ASSERT(!heap_->InNewSpace(old_space_strings_[i]));
6358     old_space_strings_[last++] = old_space_strings_[i];
6359   }
6360   old_space_strings_.Rewind(last);
6361   if (FLAG_verify_heap) {
6362     Verify();
6363   }
6364 }
6365
6366
6367 void ExternalStringTable::TearDown() {
6368   new_space_strings_.Free();
6369   old_space_strings_.Free();
6370 }
6371
6372
6373 void Heap::QueueMemoryChunkForFree(MemoryChunk* chunk) {
6374   chunk->set_next_chunk(chunks_queued_for_free_);
6375   chunks_queued_for_free_ = chunk;
6376 }
6377
6378
6379 void Heap::FreeQueuedChunks() {
6380   if (chunks_queued_for_free_ == NULL) return;
6381   MemoryChunk* next;
6382   MemoryChunk* chunk;
6383   for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6384     next = chunk->next_chunk();
6385     chunk->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
6386
6387     if (chunk->owner()->identity() == LO_SPACE) {
6388       // StoreBuffer::Filter relies on MemoryChunk::FromAnyPointerAddress.
6389       // If FromAnyPointerAddress encounters a slot that belongs to a large
6390       // chunk queued for deletion it will fail to find the chunk because
6391       // it try to perform a search in the list of pages owned by of the large
6392       // object space and queued chunks were detached from that list.
6393       // To work around this we split large chunk into normal kPageSize aligned
6394       // pieces and initialize owner field and flags of every piece.
6395       // If FromAnyPointerAddress encounteres a slot that belongs to one of
6396       // these smaller pieces it will treat it as a slot on a normal Page.
6397       MemoryChunk* inner = MemoryChunk::FromAddress(
6398           chunk->address() + Page::kPageSize);
6399       MemoryChunk* inner_last = MemoryChunk::FromAddress(
6400           chunk->address() + chunk->size() - 1);
6401       while (inner <= inner_last) {
6402         // Size of a large chunk is always a multiple of
6403         // OS::AllocationAlignment() so there is always
6404         // enough space for a fake MemoryChunk header.
6405         inner->set_owner(lo_space());
6406         inner->SetFlag(MemoryChunk::ABOUT_TO_BE_FREED);
6407         inner = MemoryChunk::FromAddress(
6408             inner->address() + Page::kPageSize);
6409       }
6410     }
6411   }
6412   isolate_->heap()->store_buffer()->Compact();
6413   isolate_->heap()->store_buffer()->Filter(MemoryChunk::ABOUT_TO_BE_FREED);
6414   for (chunk = chunks_queued_for_free_; chunk != NULL; chunk = next) {
6415     next = chunk->next_chunk();
6416     isolate_->memory_allocator()->Free(chunk);
6417   }
6418   chunks_queued_for_free_ = NULL;
6419 }
6420
6421 } }  // namespace v8::internal