deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / heap / objects-visiting-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8
9 namespace v8 {
10 namespace internal {
11
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
14   table_.Register(
15       kVisitShortcutCandidate,
16       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
17
18   table_.Register(
19       kVisitConsString,
20       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
21
22   table_.Register(kVisitSlicedString,
23                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
24                                     int>::Visit);
25
26   table_.Register(
27       kVisitSymbol,
28       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
29
30   table_.Register(kVisitFixedArray,
31                   &FlexibleBodyVisitor<StaticVisitor,
32                                        FixedArray::BodyDescriptor, int>::Visit);
33
34   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
37
38   table_.Register(
39       kVisitNativeContext,
40       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
41                         int>::Visit);
42
43   table_.Register(kVisitByteArray, &VisitByteArray);
44
45   table_.Register(
46       kVisitSharedFunctionInfo,
47       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
48                         int>::Visit);
49
50   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
51
52   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
53
54   table_.Register(kVisitJSFunction, &VisitJSFunction);
55
56   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
57
58   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
59
60   table_.Register(kVisitJSDataView, &VisitJSDataView);
61
62   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
63
64   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
65
66   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
67
68   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69                                           kVisitDataObjectGeneric>();
70
71   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72                                           kVisitJSObjectGeneric>();
73   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74                                           kVisitStructGeneric>();
75 }
76
77
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80     Map* map, HeapObject* object) {
81   Heap* heap = map->GetHeap();
82
83   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85   VisitPointers(heap, HeapObject::RawField(
86                           object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87                 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
88   VisitPointers(
89       heap, HeapObject::RawField(
90                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92   return JSArrayBuffer::kSizeWithInternalFields;
93 }
94
95
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98     Map* map, HeapObject* object) {
99   VisitPointers(
100       map->GetHeap(),
101       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
103   VisitPointers(
104       map->GetHeap(), HeapObject::RawField(
105                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
106       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107   return JSTypedArray::kSizeWithInternalFields;
108 }
109
110
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113                                                           HeapObject* object) {
114   VisitPointers(
115       map->GetHeap(),
116       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
118   VisitPointers(
119       map->GetHeap(),
120       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122   return JSDataView::kSizeWithInternalFields;
123 }
124
125
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128   table_.Register(kVisitShortcutCandidate,
129                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
130                                     void>::Visit);
131
132   table_.Register(kVisitConsString,
133                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
134                                     void>::Visit);
135
136   table_.Register(kVisitSlicedString,
137                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
138                                     void>::Visit);
139
140   table_.Register(
141       kVisitSymbol,
142       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
143
144   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
145
146   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
147
148   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
149
150   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
151
152   table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
153
154   table_.Register(kVisitNativeContext, &VisitNativeContext);
155
156   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
157
158   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
159
160   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
161
162   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
163
164   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
165
166   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
167
168   table_.Register(
169       kVisitOddball,
170       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
171
172   table_.Register(kVisitMap, &VisitMap);
173
174   table_.Register(kVisitCode, &VisitCode);
175
176   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
177
178   table_.Register(kVisitJSFunction, &VisitJSFunction);
179
180   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
181
182   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
183
184   table_.Register(kVisitJSDataView, &VisitJSDataView);
185
186   // Registration for kVisitJSRegExp is done by StaticVisitor.
187
188   table_.Register(
189       kVisitCell,
190       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
191
192   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
193
194   table_.Register(kVisitWeakCell, &VisitWeakCell);
195
196   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
197                                           kVisitDataObjectGeneric>();
198
199   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
200                                           kVisitJSObjectGeneric>();
201
202   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203                                           kVisitStructGeneric>();
204 }
205
206
207 template <typename StaticVisitor>
208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209     Heap* heap, Address entry_address) {
210   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211   heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
212   StaticVisitor::MarkObject(heap, code);
213 }
214
215
216 template <typename StaticVisitor>
217 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
218     Heap* heap, RelocInfo* rinfo) {
219   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
220   HeapObject* object = HeapObject::cast(rinfo->target_object());
221   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
222   // TODO(ulan): It could be better to record slots only for strongly embedded
223   // objects here and record slots for weakly embedded object during clearing
224   // of non-live references in mark-compact.
225   if (!rinfo->host()->IsWeakObject(object)) {
226     StaticVisitor::MarkObject(heap, object);
227   }
228 }
229
230
231 template <typename StaticVisitor>
232 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
233                                                     RelocInfo* rinfo) {
234   DCHECK(rinfo->rmode() == RelocInfo::CELL);
235   Cell* cell = rinfo->target_cell();
236   // No need to record slots because the cell space is not compacted during GC.
237   if (!rinfo->host()->IsWeakObject(cell)) {
238     StaticVisitor::MarkObject(heap, cell);
239   }
240 }
241
242
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
245                                                            RelocInfo* rinfo) {
246   DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
247           rinfo->IsPatchedReturnSequence()) ||
248          (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
249           rinfo->IsPatchedDebugBreakSlotSequence()));
250   Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
251   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
252   StaticVisitor::MarkObject(heap, target);
253 }
254
255
256 template <typename StaticVisitor>
257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
258                                                           RelocInfo* rinfo) {
259   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
260   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
261   // Monomorphic ICs are preserved when possible, but need to be flushed
262   // when they might be keeping a Context alive, or when the heap is about
263   // to be serialized.
264   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
265       !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
266                                   target->ic_age() != heap->global_ic_age())) {
267     ICUtility::Clear(heap->isolate(), rinfo->pc(),
268                      rinfo->host()->constant_pool());
269     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
270   }
271   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
272   StaticVisitor::MarkObject(heap, target);
273 }
274
275
276 template <typename StaticVisitor>
277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
278     Heap* heap, RelocInfo* rinfo) {
279   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
280   Code* target = rinfo->code_age_stub();
281   DCHECK(target != NULL);
282   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
283   StaticVisitor::MarkObject(heap, target);
284 }
285
286
287 template <typename StaticVisitor>
288 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
289     Map* map, HeapObject* object) {
290   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
291                    void>::Visit(map, object);
292
293   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
294   for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
295        ++idx) {
296     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
297     collector->RecordSlot(slot, slot, *slot);
298   }
299 }
300
301
302 template <typename StaticVisitor>
303 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
304                                                    HeapObject* object) {
305   Heap* heap = map->GetHeap();
306   Map* map_object = Map::cast(object);
307
308   // Clears the cache of ICs related to this map.
309   if (FLAG_cleanup_code_caches_at_gc) {
310     map_object->ClearCodeCache(heap);
311   }
312
313   // When map collection is enabled we have to mark through map's transitions
314   // and back pointers in a special way to make these links weak.
315   if (FLAG_collect_maps && map_object->CanTransition()) {
316     MarkMapContents(heap, map_object);
317   } else {
318     StaticVisitor::VisitPointers(
319         heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
320         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
321   }
322 }
323
324
325 template <typename StaticVisitor>
326 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
327     Map* map, HeapObject* object) {
328   Heap* heap = map->GetHeap();
329
330   StaticVisitor::VisitPointers(
331       heap,
332       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
333       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
334 }
335
336
337 template <typename StaticVisitor>
338 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
339                                                         HeapObject* object) {
340   Heap* heap = map->GetHeap();
341   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
342   Object* undefined = heap->undefined_value();
343   // Enqueue weak cell in linked list of encountered weak collections.
344   // We can ignore weak cells with cleared values because they will always
345   // contain smi zero.
346   if (weak_cell->next() == undefined && !weak_cell->cleared()) {
347     weak_cell->set_next(heap->encountered_weak_cells());
348     heap->set_encountered_weak_cells(weak_cell);
349   }
350 }
351
352
353 template <typename StaticVisitor>
354 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
355     Map* map, HeapObject* object) {
356   Heap* heap = map->GetHeap();
357
358   StaticVisitor::VisitPointers(
359       heap,
360       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
361       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
362 }
363
364
365 template <typename StaticVisitor>
366 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
367     Map* map, HeapObject* object) {
368   Heap* heap = map->GetHeap();
369   JSWeakCollection* weak_collection =
370       reinterpret_cast<JSWeakCollection*>(object);
371
372   // Enqueue weak collection in linked list of encountered weak collections.
373   if (weak_collection->next() == heap->undefined_value()) {
374     weak_collection->set_next(heap->encountered_weak_collections());
375     heap->set_encountered_weak_collections(weak_collection);
376   }
377
378   // Skip visiting the backing hash table containing the mappings and the
379   // pointer to the other enqueued weak collections, both are post-processed.
380   StaticVisitor::VisitPointers(
381       heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
382       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
383   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
384                 JSWeakCollection::kNextOffset);
385   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
386                 JSWeakCollection::kSize);
387
388   // Partially initialized weak collection is enqueued, but table is ignored.
389   if (!weak_collection->table()->IsHashTable()) return;
390
391   // Mark the backing hash table without pushing it on the marking stack.
392   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
393   HeapObject* obj = HeapObject::cast(*slot);
394   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
395   StaticVisitor::MarkObjectWithoutPush(heap, obj);
396 }
397
398
399 template <typename StaticVisitor>
400 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
401                                                     HeapObject* object) {
402   Heap* heap = map->GetHeap();
403   Code* code = Code::cast(object);
404   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
405     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
406   }
407   code->CodeIterateBody<StaticVisitor>(heap);
408 }
409
410
411 template <typename StaticVisitor>
412 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
413     Map* map, HeapObject* object) {
414   Heap* heap = map->GetHeap();
415   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
416   if (shared->ic_age() != heap->global_ic_age()) {
417     shared->ResetForNewContext(heap->global_ic_age());
418   }
419   if (FLAG_cleanup_code_caches_at_gc) {
420     shared->ClearTypeFeedbackInfoAtGCTime();
421   }
422   if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
423       !shared->optimized_code_map()->IsSmi()) {
424     // Always flush the optimized code map if requested by flag.
425     shared->ClearOptimizedCodeMap();
426   }
427   MarkCompactCollector* collector = heap->mark_compact_collector();
428   if (collector->is_code_flushing_enabled()) {
429     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
430       // Add the shared function info holding an optimized code map to
431       // the code flusher for processing of code maps after marking.
432       collector->code_flusher()->AddOptimizedCodeMap(shared);
433       // Treat all references within the code map weakly by marking the
434       // code map itself but not pushing it onto the marking deque.
435       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
436       StaticVisitor::MarkObjectWithoutPush(heap, code_map);
437     }
438     if (IsFlushable(heap, shared)) {
439       // This function's code looks flushable. But we have to postpone
440       // the decision until we see all functions that point to the same
441       // SharedFunctionInfo because some of them might be optimized.
442       // That would also make the non-optimized version of the code
443       // non-flushable, because it is required for bailing out from
444       // optimized code.
445       collector->code_flusher()->AddCandidate(shared);
446       // Treat the reference to the code object weakly.
447       VisitSharedFunctionInfoWeakCode(heap, object);
448       return;
449     }
450   } else {
451     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
452       // Flush optimized code map on major GCs without code flushing,
453       // needed because cached code doesn't contain breakpoints.
454       shared->ClearOptimizedCodeMap();
455     }
456   }
457   VisitSharedFunctionInfoStrongCode(heap, object);
458 }
459
460
461 template <typename StaticVisitor>
462 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
463     Map* map, HeapObject* object) {
464   Heap* heap = map->GetHeap();
465   ConstantPoolArray* array = ConstantPoolArray::cast(object);
466   ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
467   while (!code_iter.is_finished()) {
468     Address code_entry = reinterpret_cast<Address>(
469         array->RawFieldOfElementAt(code_iter.next_index()));
470     StaticVisitor::VisitCodeEntry(heap, code_entry);
471   }
472
473   ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
474   while (!heap_iter.is_finished()) {
475     Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
476     HeapObject* object = HeapObject::cast(*slot);
477     heap->mark_compact_collector()->RecordSlot(slot, slot, object);
478     bool is_weak_object =
479         (array->get_weak_object_state() ==
480              ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
481          Code::IsWeakObjectInOptimizedCode(object));
482     if (!is_weak_object) {
483       StaticVisitor::MarkObject(heap, object);
484     }
485   }
486 }
487
488
489 template <typename StaticVisitor>
490 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
491                                                           HeapObject* object) {
492   Heap* heap = map->GetHeap();
493   JSFunction* function = JSFunction::cast(object);
494   MarkCompactCollector* collector = heap->mark_compact_collector();
495   if (collector->is_code_flushing_enabled()) {
496     if (IsFlushable(heap, function)) {
497       // This function's code looks flushable. But we have to postpone
498       // the decision until we see all functions that point to the same
499       // SharedFunctionInfo because some of them might be optimized.
500       // That would also make the non-optimized version of the code
501       // non-flushable, because it is required for bailing out from
502       // optimized code.
503       collector->code_flusher()->AddCandidate(function);
504       // Visit shared function info immediately to avoid double checking
505       // of its flushability later. This is just an optimization because
506       // the shared function info would eventually be visited.
507       SharedFunctionInfo* shared = function->shared();
508       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
509         StaticVisitor::MarkObject(heap, shared->map());
510         VisitSharedFunctionInfoWeakCode(heap, shared);
511       }
512       // Treat the reference to the code object weakly.
513       VisitJSFunctionWeakCode(heap, object);
514       return;
515     } else {
516       // Visit all unoptimized code objects to prevent flushing them.
517       StaticVisitor::MarkObject(heap, function->shared()->code());
518       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
519         MarkInlinedFunctionsCode(heap, function->code());
520       }
521     }
522   }
523   VisitJSFunctionStrongCode(heap, object);
524 }
525
526
527 template <typename StaticVisitor>
528 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
529                                                         HeapObject* object) {
530   int last_property_offset =
531       JSRegExp::kSize + kPointerSize * map->inobject_properties();
532   StaticVisitor::VisitPointers(
533       map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
534       HeapObject::RawField(object, last_property_offset));
535 }
536
537
538 template <typename StaticVisitor>
539 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
540     Map* map, HeapObject* object) {
541   Heap* heap = map->GetHeap();
542
543   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
544                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
545   StaticVisitor::VisitPointers(
546       heap,
547       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
548       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
549   StaticVisitor::VisitPointers(
550       heap, HeapObject::RawField(
551                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
552       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
553 }
554
555
556 template <typename StaticVisitor>
557 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
558     Map* map, HeapObject* object) {
559   StaticVisitor::VisitPointers(
560       map->GetHeap(),
561       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
562       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
563   StaticVisitor::VisitPointers(
564       map->GetHeap(), HeapObject::RawField(
565                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
566       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
567 }
568
569
570 template <typename StaticVisitor>
571 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
572                                                           HeapObject* object) {
573   StaticVisitor::VisitPointers(
574       map->GetHeap(),
575       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
576       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
577   StaticVisitor::VisitPointers(
578       map->GetHeap(),
579       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
580       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
581 }
582
583
584 template <typename StaticVisitor>
585 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
586                                                           Map* map) {
587   Object* raw_transitions = map->raw_transitions();
588   if (TransitionArray::IsSimpleTransition(raw_transitions)) {
589     StaticVisitor::VisitPointer(
590         heap, HeapObject::RawField(map, Map::kTransitionsOffset));
591   }
592   if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
593     MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
594   }
595
596   // Since descriptor arrays are potentially shared, ensure that only the
597   // descriptors that belong to this map are marked. The first time a
598   // non-empty descriptor array is marked, its header is also visited. The slot
599   // holding the descriptor array will be implicitly recorded when the pointer
600   // fields of this map are visited.
601   DescriptorArray* descriptors = map->instance_descriptors();
602   if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
603       descriptors->length() > 0) {
604     StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
605                                  descriptors->GetDescriptorEndSlot(0));
606   }
607   int start = 0;
608   int end = map->NumberOfOwnDescriptors();
609   if (start < end) {
610     StaticVisitor::VisitPointers(heap,
611                                  descriptors->GetDescriptorStartSlot(start),
612                                  descriptors->GetDescriptorEndSlot(end));
613   }
614
615   // Mark the pointer fields of the Map. Since the transitions array has
616   // been marked already, it is fine that one of these fields contains a
617   // pointer to it.
618   StaticVisitor::VisitPointers(
619       heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
620       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
621 }
622
623
624 template <typename StaticVisitor>
625 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
626     Heap* heap, TransitionArray* transitions) {
627   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
628
629   if (transitions->HasPrototypeTransitions()) {
630     // Mark prototype transitions array but do not push it onto marking
631     // stack, this will make references from it weak. We will clean dead
632     // prototype transitions in ClearNonLiveReferences.
633     Object** slot = transitions->GetPrototypeTransitionsSlot();
634     HeapObject* obj = HeapObject::cast(*slot);
635     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
636     StaticVisitor::MarkObjectWithoutPush(heap, obj);
637   }
638
639   int num_transitions = TransitionArray::NumberOfTransitions(transitions);
640   for (int i = 0; i < num_transitions; ++i) {
641     StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
642   }
643 }
644
645
646 template <typename StaticVisitor>
647 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
648                                                                    Code* code) {
649   // Skip in absence of inlining.
650   // TODO(turbofan): Revisit once we support inlining.
651   if (code->is_turbofanned()) return;
652   // For optimized functions we should retain both non-optimized version
653   // of its code and non-optimized version of all inlined functions.
654   // This is required to support bailing out from inlined code.
655   DeoptimizationInputData* data =
656       DeoptimizationInputData::cast(code->deoptimization_data());
657   FixedArray* literals = data->LiteralArray();
658   for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
659        i++) {
660     JSFunction* inlined = JSFunction::cast(literals->get(i));
661     StaticVisitor::MarkObject(heap, inlined->shared()->code());
662   }
663 }
664
665
666 inline static bool IsValidNonBuiltinContext(Object* context) {
667   return context->IsContext() &&
668          !Context::cast(context)->global_object()->IsJSBuiltinsObject();
669 }
670
671
672 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
673   Object* undefined = heap->undefined_value();
674   return (info->script() != undefined) &&
675          (reinterpret_cast<Script*>(info->script())->source() != undefined);
676 }
677
678
679 template <typename StaticVisitor>
680 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
681                                                       JSFunction* function) {
682   SharedFunctionInfo* shared_info = function->shared();
683
684   // Code is either on stack, in compilation cache or referenced
685   // by optimized version of function.
686   MarkBit code_mark = Marking::MarkBitFrom(function->code());
687   if (code_mark.Get()) {
688     return false;
689   }
690
691   // The function must have a valid context and not be a builtin.
692   if (!IsValidNonBuiltinContext(function->context())) {
693     return false;
694   }
695
696   // We do not (yet) flush code for optimized functions.
697   if (function->code() != shared_info->code()) {
698     return false;
699   }
700
701   // Check age of optimized code.
702   if (FLAG_age_code && !function->code()->IsOld()) {
703     return false;
704   }
705
706   return IsFlushable(heap, shared_info);
707 }
708
709
710 template <typename StaticVisitor>
711 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
712     Heap* heap, SharedFunctionInfo* shared_info) {
713   // Code is either on stack, in compilation cache or referenced
714   // by optimized version of function.
715   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
716   if (code_mark.Get()) {
717     return false;
718   }
719
720   // The function must be compiled and have the source code available,
721   // to be able to recompile it in case we need the function again.
722   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
723     return false;
724   }
725
726   // We never flush code for API functions.
727   Object* function_data = shared_info->function_data();
728   if (function_data->IsFunctionTemplateInfo()) {
729     return false;
730   }
731
732   // Only flush code for functions.
733   if (shared_info->code()->kind() != Code::FUNCTION) {
734     return false;
735   }
736
737   // Function must be lazy compilable.
738   if (!shared_info->allows_lazy_compilation()) {
739     return false;
740   }
741
742   // We do not (yet?) flush code for generator functions, because we don't know
743   // if there are still live activations (generator objects) on the heap.
744   if (shared_info->is_generator()) {
745     return false;
746   }
747
748   // If this is a full script wrapped in a function we do not flush the code.
749   if (shared_info->is_toplevel()) {
750     return false;
751   }
752
753   // If this is a function initialized with %SetCode then the one-to-one
754   // relation between SharedFunctionInfo and Code is broken.
755   if (shared_info->dont_flush()) {
756     return false;
757   }
758
759   // Check age of code. If code aging is disabled we never flush.
760   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
761     return false;
762   }
763
764   return true;
765 }
766
767
768 template <typename StaticVisitor>
769 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
770     Heap* heap, HeapObject* object) {
771   Object** start_slot = HeapObject::RawField(
772       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
773   Object** end_slot = HeapObject::RawField(
774       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
775   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
776 }
777
778
779 template <typename StaticVisitor>
780 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
781     Heap* heap, HeapObject* object) {
782   Object** name_slot =
783       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
784   StaticVisitor::VisitPointer(heap, name_slot);
785
786   // Skip visiting kCodeOffset as it is treated weakly here.
787   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
788                 SharedFunctionInfo::kCodeOffset);
789   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
790                 SharedFunctionInfo::kOptimizedCodeMapOffset);
791
792   Object** start_slot =
793       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
794   Object** end_slot = HeapObject::RawField(
795       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
796   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
797 }
798
799
800 template <typename StaticVisitor>
801 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
802     Heap* heap, HeapObject* object) {
803   Object** start_slot =
804       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
805   Object** end_slot =
806       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
807   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
808
809   VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
810   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
811                 JSFunction::kPrototypeOrInitialMapOffset);
812
813   start_slot =
814       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
815   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
816   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
817 }
818
819
820 template <typename StaticVisitor>
821 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
822     Heap* heap, HeapObject* object) {
823   Object** start_slot =
824       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
825   Object** end_slot =
826       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
827   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
828
829   // Skip visiting kCodeEntryOffset as it is treated weakly here.
830   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
831                 JSFunction::kPrototypeOrInitialMapOffset);
832
833   start_slot =
834       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
835   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
836   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
837 }
838
839
840 void Code::CodeIterateBody(ObjectVisitor* v) {
841   int mode_mask = RelocInfo::kCodeTargetMask |
842                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
843                   RelocInfo::ModeMask(RelocInfo::CELL) |
844                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
845                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
846                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
847                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
848                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
849                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
850
851   // There are two places where we iterate code bodies: here and the
852   // templated CodeIterateBody (below). They should be kept in sync.
853   IteratePointer(v, kRelocationInfoOffset);
854   IteratePointer(v, kHandlerTableOffset);
855   IteratePointer(v, kDeoptimizationDataOffset);
856   IteratePointer(v, kTypeFeedbackInfoOffset);
857   IterateNextCodeLink(v, kNextCodeLinkOffset);
858   IteratePointer(v, kConstantPoolOffset);
859
860   RelocIterator it(this, mode_mask);
861   Isolate* isolate = this->GetIsolate();
862   for (; !it.done(); it.next()) {
863     it.rinfo()->Visit(isolate, v);
864   }
865 }
866
867
868 template <typename StaticVisitor>
869 void Code::CodeIterateBody(Heap* heap) {
870   int mode_mask = RelocInfo::kCodeTargetMask |
871                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
872                   RelocInfo::ModeMask(RelocInfo::CELL) |
873                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
874                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
875                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
876                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
877                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
878                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
879
880   // There are two places where we iterate code bodies: here and the non-
881   // templated CodeIterateBody (above). They should be kept in sync.
882   StaticVisitor::VisitPointer(
883       heap,
884       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
885   StaticVisitor::VisitPointer(
886       heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
887   StaticVisitor::VisitPointer(
888       heap,
889       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
890   StaticVisitor::VisitPointer(
891       heap,
892       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
893   StaticVisitor::VisitNextCodeLink(
894       heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
895   StaticVisitor::VisitPointer(
896       heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
897
898
899   RelocIterator it(this, mode_mask);
900   for (; !it.done(); it.next()) {
901     it.rinfo()->template Visit<StaticVisitor>(heap);
902   }
903 }
904 }
905 }  // namespace v8::internal
906
907 #endif  // V8_OBJECTS_VISITING_INL_H_