55734fd463a4e538baa0ad353167f2091bb148de
[platform/upstream/v8.git] / src / heap / objects-visiting-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8 #include "src/heap/array-buffer-tracker.h"
9 #include "src/heap/objects-visiting.h"
10 #include "src/ic/ic-state.h"
11 #include "src/macro-assembler.h"
12
13 namespace v8 {
14 namespace internal {
15
16
17 template <typename Callback>
18 Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
19   return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
20 }
21
22
23 template <typename StaticVisitor>
24 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
25   table_.Register(
26       kVisitShortcutCandidate,
27       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
28
29   table_.Register(
30       kVisitConsString,
31       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
32
33   table_.Register(kVisitSlicedString,
34                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
35                                     int>::Visit);
36
37   table_.Register(
38       kVisitSymbol,
39       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
40
41   table_.Register(kVisitFixedArray,
42                   &FlexibleBodyVisitor<StaticVisitor,
43                                        FixedArray::BodyDescriptor, int>::Visit);
44
45   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
46   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
47   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
48
49   table_.Register(
50       kVisitNativeContext,
51       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
52                         int>::Visit);
53
54   table_.Register(kVisitByteArray, &VisitByteArray);
55   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
56
57   table_.Register(
58       kVisitSharedFunctionInfo,
59       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
60                         int>::Visit);
61
62   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
63
64   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
65
66   table_.Register(kVisitJSFunction, &VisitJSFunction);
67
68   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
69
70   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
71
72   table_.Register(kVisitJSDataView, &VisitJSDataView);
73
74   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
75
76   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
77
78   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
79
80   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
81                                           kVisitDataObjectGeneric>();
82
83   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
84                                           kVisitJSObjectGeneric>();
85   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
86                                           kVisitStructGeneric>();
87 }
88
89
90 template <typename StaticVisitor>
91 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
92     Map* map, HeapObject* object) {
93   Heap* heap = map->GetHeap();
94
95   JSArrayBuffer::JSArrayBufferIterateBody<
96       StaticNewSpaceVisitor<StaticVisitor> >(heap, object);
97   if (!JSArrayBuffer::cast(object)->is_external()) {
98     heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
99   }
100   return JSArrayBuffer::kSizeWithInternalFields;
101 }
102
103
104 template <typename StaticVisitor>
105 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
106     Map* map, HeapObject* object) {
107   VisitPointers(
108       map->GetHeap(), object,
109       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
110       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
111   return JSTypedArray::kSizeWithInternalFields;
112 }
113
114
115 template <typename StaticVisitor>
116 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
117                                                           HeapObject* object) {
118   VisitPointers(
119       map->GetHeap(), object,
120       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
121       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122   return JSDataView::kSizeWithInternalFields;
123 }
124
125
126 template <typename StaticVisitor>
127 int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
128     Map* map, HeapObject* object) {
129   VisitPointers(
130       map->GetHeap(), object,
131       HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
132       HeapObject::RawField(object, BytecodeArray::kHeaderSize));
133   return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
134 }
135
136
137 template <typename StaticVisitor>
138 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
139   table_.Register(kVisitShortcutCandidate,
140                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
141                                     void>::Visit);
142
143   table_.Register(kVisitConsString,
144                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
145                                     void>::Visit);
146
147   table_.Register(kVisitSlicedString,
148                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
149                                     void>::Visit);
150
151   table_.Register(
152       kVisitSymbol,
153       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
154
155   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
156
157   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
158
159   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
160
161   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
162
163   table_.Register(kVisitNativeContext, &VisitNativeContext);
164
165   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
166
167   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
168
169   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
170
171   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
172
173   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
174
175   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
176
177   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
178
179   table_.Register(
180       kVisitOddball,
181       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
182
183   table_.Register(kVisitMap, &VisitMap);
184
185   table_.Register(kVisitCode, &VisitCode);
186
187   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
188
189   table_.Register(kVisitJSFunction, &VisitJSFunction);
190
191   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
192
193   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
194
195   table_.Register(kVisitJSDataView, &VisitJSDataView);
196
197   // Registration for kVisitJSRegExp is done by StaticVisitor.
198
199   table_.Register(
200       kVisitCell,
201       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
202
203   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
204
205   table_.Register(kVisitWeakCell, &VisitWeakCell);
206
207   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
208                                           kVisitDataObjectGeneric>();
209
210   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
211                                           kVisitJSObjectGeneric>();
212
213   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
214                                           kVisitStructGeneric>();
215 }
216
217
218 template <typename StaticVisitor>
219 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
220     Heap* heap, HeapObject* object, Address entry_address) {
221   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
222   heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
223                                                       code);
224   StaticVisitor::MarkObject(heap, code);
225 }
226
227
228 template <typename StaticVisitor>
229 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
230     Heap* heap, RelocInfo* rinfo) {
231   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
232   HeapObject* object = HeapObject::cast(rinfo->target_object());
233   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
234   // TODO(ulan): It could be better to record slots only for strongly embedded
235   // objects here and record slots for weakly embedded object during clearing
236   // of non-live references in mark-compact.
237   if (!rinfo->host()->IsWeakObject(object)) {
238     StaticVisitor::MarkObject(heap, object);
239   }
240 }
241
242
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
245                                                     RelocInfo* rinfo) {
246   DCHECK(rinfo->rmode() == RelocInfo::CELL);
247   Cell* cell = rinfo->target_cell();
248   heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
249   if (!rinfo->host()->IsWeakObject(cell)) {
250     StaticVisitor::MarkObject(heap, cell);
251   }
252 }
253
254
255 template <typename StaticVisitor>
256 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
257                                                            RelocInfo* rinfo) {
258   DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
259          rinfo->IsPatchedDebugBreakSlotSequence());
260   Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
261   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
262   StaticVisitor::MarkObject(heap, target);
263 }
264
265
266 template <typename StaticVisitor>
267 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
268                                                           RelocInfo* rinfo) {
269   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
270   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
271   // Monomorphic ICs are preserved when possible, but need to be flushed
272   // when they might be keeping a Context alive, or when the heap is about
273   // to be serialized.
274   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
275       !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
276                                   target->ic_age() != heap->global_ic_age())) {
277     ICUtility::Clear(heap->isolate(), rinfo->pc(),
278                      rinfo->host()->constant_pool());
279     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
280   }
281   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
282   StaticVisitor::MarkObject(heap, target);
283 }
284
285
286 template <typename StaticVisitor>
287 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
288     Heap* heap, RelocInfo* rinfo) {
289   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
290   Code* target = rinfo->code_age_stub();
291   DCHECK(target != NULL);
292   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
293   StaticVisitor::MarkObject(heap, target);
294 }
295
296
297 template <typename StaticVisitor>
298 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
299     Map* map, HeapObject* object) {
300   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
301                    void>::Visit(map, object);
302
303   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
304   for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
305        ++idx) {
306     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
307     collector->RecordSlot(object, slot, *slot);
308   }
309 }
310
311
312 template <typename StaticVisitor>
313 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
314                                                    HeapObject* object) {
315   Heap* heap = map->GetHeap();
316   Map* map_object = Map::cast(object);
317
318   // Clears the cache of ICs related to this map.
319   if (FLAG_cleanup_code_caches_at_gc) {
320     map_object->ClearCodeCache(heap);
321   }
322
323   // When map collection is enabled we have to mark through map's transitions
324   // and back pointers in a special way to make these links weak.
325   if (map_object->CanTransition()) {
326     MarkMapContents(heap, map_object);
327   } else {
328     StaticVisitor::VisitPointers(
329         heap, object,
330         HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
331         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
332   }
333 }
334
335
336 template <typename StaticVisitor>
337 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
338     Map* map, HeapObject* object) {
339   Heap* heap = map->GetHeap();
340
341   StaticVisitor::VisitPointers(
342       heap, object,
343       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
344       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
345 }
346
347
348 template <typename StaticVisitor>
349 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
350                                                         HeapObject* object) {
351   Heap* heap = map->GetHeap();
352   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
353   // Enqueue weak cell in linked list of encountered weak collections.
354   // We can ignore weak cells with cleared values because they will always
355   // contain smi zero.
356   if (weak_cell->next_cleared() && !weak_cell->cleared()) {
357     HeapObject* value = HeapObject::cast(weak_cell->value());
358     if (MarkCompactCollector::IsMarked(value)) {
359       // Weak cells with live values are directly processed here to reduce
360       // the processing time of weak cells during the main GC pause.
361       Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
362       map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
363                                                            *slot);
364     } else {
365       // If we do not know about liveness of values of weak cells, we have to
366       // process them when we know the liveness of the whole transitive
367       // closure.
368       weak_cell->set_next(heap->encountered_weak_cells(),
369                           UPDATE_WEAK_WRITE_BARRIER);
370       heap->set_encountered_weak_cells(weak_cell);
371     }
372   }
373 }
374
375
376 template <typename StaticVisitor>
377 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
378     Map* map, HeapObject* object) {
379   Heap* heap = map->GetHeap();
380
381   StaticVisitor::VisitPointers(
382       heap, object,
383       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
384       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
385 }
386
387
388 template <typename StaticVisitor>
389 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
390     Map* map, HeapObject* object) {
391   Heap* heap = map->GetHeap();
392   JSWeakCollection* weak_collection =
393       reinterpret_cast<JSWeakCollection*>(object);
394
395   // Enqueue weak collection in linked list of encountered weak collections.
396   if (weak_collection->next() == heap->undefined_value()) {
397     weak_collection->set_next(heap->encountered_weak_collections());
398     heap->set_encountered_weak_collections(weak_collection);
399   }
400
401   // Skip visiting the backing hash table containing the mappings and the
402   // pointer to the other enqueued weak collections, both are post-processed.
403   StaticVisitor::VisitPointers(
404       heap, object,
405       HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
406       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
407   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
408                 JSWeakCollection::kNextOffset);
409   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
410                 JSWeakCollection::kSize);
411
412   // Partially initialized weak collection is enqueued, but table is ignored.
413   if (!weak_collection->table()->IsHashTable()) return;
414
415   // Mark the backing hash table without pushing it on the marking stack.
416   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
417   HeapObject* obj = HeapObject::cast(*slot);
418   heap->mark_compact_collector()->RecordSlot(object, slot, obj);
419   StaticVisitor::MarkObjectWithoutPush(heap, obj);
420 }
421
422
423 template <typename StaticVisitor>
424 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
425                                                     HeapObject* object) {
426   Heap* heap = map->GetHeap();
427   Code* code = Code::cast(object);
428   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
429     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
430   }
431   code->CodeIterateBody<StaticVisitor>(heap);
432 }
433
434
435 template <typename StaticVisitor>
436 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
437     Map* map, HeapObject* object) {
438   Heap* heap = map->GetHeap();
439   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
440   if (shared->ic_age() != heap->global_ic_age()) {
441     shared->ResetForNewContext(heap->global_ic_age());
442   }
443   if (FLAG_cleanup_code_caches_at_gc) {
444     shared->ClearTypeFeedbackInfoAtGCTime();
445   }
446   if ((FLAG_flush_optimized_code_cache ||
447        heap->isolate()->serializer_enabled()) &&
448       !shared->optimized_code_map()->IsSmi()) {
449     // Always flush the optimized code map if requested by flag.
450     shared->ClearOptimizedCodeMap();
451   }
452   MarkCompactCollector* collector = heap->mark_compact_collector();
453   if (collector->is_code_flushing_enabled()) {
454     if (!shared->optimized_code_map()->IsSmi()) {
455       // Add the shared function info holding an optimized code map to
456       // the code flusher for processing of code maps after marking.
457       collector->code_flusher()->AddOptimizedCodeMap(shared);
458       // Treat some references within the code map weakly by marking the
459       // code map itself but not pushing it onto the marking deque.
460       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
461       MarkOptimizedCodeMap(heap, code_map);
462     }
463     if (IsFlushable(heap, shared)) {
464       // This function's code looks flushable. But we have to postpone
465       // the decision until we see all functions that point to the same
466       // SharedFunctionInfo because some of them might be optimized.
467       // That would also make the non-optimized version of the code
468       // non-flushable, because it is required for bailing out from
469       // optimized code.
470       collector->code_flusher()->AddCandidate(shared);
471       // Treat the reference to the code object weakly.
472       VisitSharedFunctionInfoWeakCode(heap, object);
473       return;
474     }
475   } else {
476     if (!shared->optimized_code_map()->IsSmi()) {
477       // Flush optimized code map on major GCs without code flushing,
478       // needed because cached code doesn't contain breakpoints.
479       shared->ClearOptimizedCodeMap();
480     }
481   }
482   VisitSharedFunctionInfoStrongCode(heap, object);
483 }
484
485
486 template <typename StaticVisitor>
487 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
488                                                           HeapObject* object) {
489   Heap* heap = map->GetHeap();
490   JSFunction* function = JSFunction::cast(object);
491   MarkCompactCollector* collector = heap->mark_compact_collector();
492   if (collector->is_code_flushing_enabled()) {
493     if (IsFlushable(heap, function)) {
494       // This function's code looks flushable. But we have to postpone
495       // the decision until we see all functions that point to the same
496       // SharedFunctionInfo because some of them might be optimized.
497       // That would also make the non-optimized version of the code
498       // non-flushable, because it is required for bailing out from
499       // optimized code.
500       collector->code_flusher()->AddCandidate(function);
501       // Visit shared function info immediately to avoid double checking
502       // of its flushability later. This is just an optimization because
503       // the shared function info would eventually be visited.
504       SharedFunctionInfo* shared = function->shared();
505       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
506         StaticVisitor::MarkObject(heap, shared->map());
507         VisitSharedFunctionInfoWeakCode(heap, shared);
508       }
509       // Treat the reference to the code object weakly.
510       VisitJSFunctionWeakCode(heap, object);
511       return;
512     } else {
513       // Visit all unoptimized code objects to prevent flushing them.
514       StaticVisitor::MarkObject(heap, function->shared()->code());
515       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
516         MarkInlinedFunctionsCode(heap, function->code());
517       }
518     }
519   }
520   VisitJSFunctionStrongCode(heap, object);
521 }
522
523
524 template <typename StaticVisitor>
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
526                                                         HeapObject* object) {
527   int last_property_offset =
528       JSRegExp::kSize + kPointerSize * map->GetInObjectProperties();
529   StaticVisitor::VisitPointers(
530       map->GetHeap(), object,
531       HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
532       HeapObject::RawField(object, last_property_offset));
533 }
534
535
536 template <typename StaticVisitor>
537 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
538     Map* map, HeapObject* object) {
539   Heap* heap = map->GetHeap();
540
541   JSArrayBuffer::JSArrayBufferIterateBody<StaticVisitor>(heap, object);
542   if (!JSArrayBuffer::cast(object)->is_external() &&
543       !heap->InNewSpace(object)) {
544     heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
545   }
546 }
547
548
549 template <typename StaticVisitor>
550 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
551     Map* map, HeapObject* object) {
552   StaticVisitor::VisitPointers(
553       map->GetHeap(), object,
554       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
555       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
556 }
557
558
559 template <typename StaticVisitor>
560 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
561                                                           HeapObject* object) {
562   StaticVisitor::VisitPointers(
563       map->GetHeap(), object,
564       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
565       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
566 }
567
568
569 template <typename StaticVisitor>
570 void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
571     Map* map, HeapObject* object) {
572   StaticVisitor::VisitPointers(
573       map->GetHeap(), object,
574       HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
575       HeapObject::RawField(object, BytecodeArray::kHeaderSize));
576 }
577
578
579 template <typename StaticVisitor>
580 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
581                                                           Map* map) {
582   Object* raw_transitions = map->raw_transitions();
583   if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
584     MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
585   }
586
587   // Since descriptor arrays are potentially shared, ensure that only the
588   // descriptors that belong to this map are marked. The first time a non-empty
589   // descriptor array is marked, its header is also visited. The slot holding
590   // the descriptor array will be implicitly recorded when the pointer fields of
591   // this map are visited.  Prototype maps don't keep track of transitions, so
592   // just mark the entire descriptor array.
593   if (!map->is_prototype_map()) {
594     DescriptorArray* descriptors = map->instance_descriptors();
595     if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
596         descriptors->length() > 0) {
597       StaticVisitor::VisitPointers(heap, descriptors,
598                                    descriptors->GetFirstElementAddress(),
599                                    descriptors->GetDescriptorEndSlot(0));
600     }
601     int start = 0;
602     int end = map->NumberOfOwnDescriptors();
603     if (start < end) {
604       StaticVisitor::VisitPointers(heap, descriptors,
605                                    descriptors->GetDescriptorStartSlot(start),
606                                    descriptors->GetDescriptorEndSlot(end));
607     }
608   }
609
610   // Mark the pointer fields of the Map. Since the transitions array has
611   // been marked already, it is fine that one of these fields contains a
612   // pointer to it.
613   StaticVisitor::VisitPointers(
614       heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
615       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
616 }
617
618
619 template <typename StaticVisitor>
620 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
621     Heap* heap, TransitionArray* transitions) {
622   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
623
624   if (transitions->HasPrototypeTransitions()) {
625     StaticVisitor::VisitPointer(heap, transitions,
626                                 transitions->GetPrototypeTransitionsSlot());
627   }
628
629   int num_transitions = TransitionArray::NumberOfTransitions(transitions);
630   for (int i = 0; i < num_transitions; ++i) {
631     StaticVisitor::VisitPointer(heap, transitions, transitions->GetKeySlot(i));
632   }
633 }
634
635
636 template <typename StaticVisitor>
637 void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
638     Heap* heap, FixedArray* code_map) {
639   if (!StaticVisitor::MarkObjectWithoutPush(heap, code_map)) return;
640
641   // Mark the context-independent entry in the optimized code map. Depending on
642   // the age of the code object, we treat it as a strong or a weak reference.
643   Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
644   if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
645       FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
646     StaticVisitor::VisitPointer(
647         heap, code_map,
648         code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
649   }
650 }
651
652
653 template <typename StaticVisitor>
654 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
655                                                                    Code* code) {
656   // For optimized functions we should retain both non-optimized version
657   // of its code and non-optimized version of all inlined functions.
658   // This is required to support bailing out from inlined code.
659   DeoptimizationInputData* const data =
660       DeoptimizationInputData::cast(code->deoptimization_data());
661   FixedArray* const literals = data->LiteralArray();
662   int const inlined_count = data->InlinedFunctionCount()->value();
663   for (int i = 0; i < inlined_count; ++i) {
664     StaticVisitor::MarkObject(
665         heap, SharedFunctionInfo::cast(literals->get(i))->code());
666   }
667 }
668
669
670 inline static bool IsValidNonBuiltinContext(Object* context) {
671   return context->IsContext() &&
672          !Context::cast(context)->global_object()->IsJSBuiltinsObject();
673 }
674
675
676 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
677   Object* undefined = heap->undefined_value();
678   return (info->script() != undefined) &&
679          (reinterpret_cast<Script*>(info->script())->source() != undefined);
680 }
681
682
683 template <typename StaticVisitor>
684 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
685                                                       JSFunction* function) {
686   SharedFunctionInfo* shared_info = function->shared();
687
688   // Code is either on stack, in compilation cache or referenced
689   // by optimized version of function.
690   MarkBit code_mark = Marking::MarkBitFrom(function->code());
691   if (Marking::IsBlackOrGrey(code_mark)) {
692     return false;
693   }
694
695   // The function must have a valid context and not be a builtin.
696   if (!IsValidNonBuiltinContext(function->context())) {
697     return false;
698   }
699
700   // We do not (yet) flush code for optimized functions.
701   if (function->code() != shared_info->code()) {
702     return false;
703   }
704
705   // Check age of optimized code.
706   if (FLAG_age_code && !function->code()->IsOld()) {
707     return false;
708   }
709
710   return IsFlushable(heap, shared_info);
711 }
712
713
714 template <typename StaticVisitor>
715 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
716     Heap* heap, SharedFunctionInfo* shared_info) {
717   // Code is either on stack, in compilation cache or referenced
718   // by optimized version of function.
719   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
720   if (Marking::IsBlackOrGrey(code_mark)) {
721     return false;
722   }
723
724   // The function must be compiled and have the source code available,
725   // to be able to recompile it in case we need the function again.
726   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
727     return false;
728   }
729
730   // We never flush code for API functions.
731   Object* function_data = shared_info->function_data();
732   if (function_data->IsFunctionTemplateInfo()) {
733     return false;
734   }
735
736   // Only flush code for functions.
737   if (shared_info->code()->kind() != Code::FUNCTION) {
738     return false;
739   }
740
741   // Function must be lazy compilable.
742   if (!shared_info->allows_lazy_compilation()) {
743     return false;
744   }
745
746   // We do not (yet?) flush code for generator functions, because we don't know
747   // if there are still live activations (generator objects) on the heap.
748   if (shared_info->is_generator()) {
749     return false;
750   }
751
752   // If this is a full script wrapped in a function we do not flush the code.
753   if (shared_info->is_toplevel()) {
754     return false;
755   }
756
757   // If this is a function initialized with %SetCode then the one-to-one
758   // relation between SharedFunctionInfo and Code is broken.
759   if (shared_info->dont_flush()) {
760     return false;
761   }
762
763   // Check age of code. If code aging is disabled we never flush.
764   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
765     return false;
766   }
767
768   return true;
769 }
770
771
772 template <typename StaticVisitor>
773 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
774     Heap* heap, HeapObject* object) {
775   Object** start_slot = HeapObject::RawField(
776       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
777   Object** end_slot = HeapObject::RawField(
778       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
779   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
780 }
781
782
783 template <typename StaticVisitor>
784 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
785     Heap* heap, HeapObject* object) {
786   Object** name_slot =
787       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
788   StaticVisitor::VisitPointer(heap, object, name_slot);
789
790   // Skip visiting kCodeOffset as it is treated weakly here.
791   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
792                 SharedFunctionInfo::kCodeOffset);
793   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
794                 SharedFunctionInfo::kOptimizedCodeMapOffset);
795
796   Object** start_slot =
797       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
798   Object** end_slot = HeapObject::RawField(
799       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
800   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
801 }
802
803
804 template <typename StaticVisitor>
805 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
806     Heap* heap, HeapObject* object) {
807   Object** start_slot =
808       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
809   Object** end_slot =
810       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
811   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
812
813   VisitCodeEntry(heap, object,
814                  object->address() + JSFunction::kCodeEntryOffset);
815   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
816                 JSFunction::kPrototypeOrInitialMapOffset);
817
818   start_slot =
819       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
820   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
821   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
822 }
823
824
825 template <typename StaticVisitor>
826 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
827     Heap* heap, HeapObject* object) {
828   Object** start_slot =
829       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
830   Object** end_slot =
831       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
832   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
833
834   // Skip visiting kCodeEntryOffset as it is treated weakly here.
835   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
836                 JSFunction::kPrototypeOrInitialMapOffset);
837
838   start_slot =
839       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
840   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
841   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
842 }
843
844
845 void Code::CodeIterateBody(ObjectVisitor* v) {
846   int mode_mask = RelocInfo::kCodeTargetMask |
847                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
848                   RelocInfo::ModeMask(RelocInfo::CELL) |
849                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
850                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
851                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
852                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
853                   RelocInfo::kDebugBreakSlotMask;
854
855   // There are two places where we iterate code bodies: here and the
856   // templated CodeIterateBody (below). They should be kept in sync.
857   IteratePointer(v, kRelocationInfoOffset);
858   IteratePointer(v, kHandlerTableOffset);
859   IteratePointer(v, kDeoptimizationDataOffset);
860   IteratePointer(v, kTypeFeedbackInfoOffset);
861   IterateNextCodeLink(v, kNextCodeLinkOffset);
862
863   RelocIterator it(this, mode_mask);
864   Isolate* isolate = this->GetIsolate();
865   for (; !it.done(); it.next()) {
866     it.rinfo()->Visit(isolate, v);
867   }
868 }
869
870
871 template <typename StaticVisitor>
872 void Code::CodeIterateBody(Heap* heap) {
873   int mode_mask = RelocInfo::kCodeTargetMask |
874                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
875                   RelocInfo::ModeMask(RelocInfo::CELL) |
876                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
877                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
878                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
879                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
880                   RelocInfo::kDebugBreakSlotMask;
881
882   // There are two places where we iterate code bodies: here and the non-
883   // templated CodeIterateBody (above). They should be kept in sync.
884   StaticVisitor::VisitPointer(
885       heap, this,
886       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
887   StaticVisitor::VisitPointer(
888       heap, this,
889       reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
890   StaticVisitor::VisitPointer(
891       heap, this,
892       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
893   StaticVisitor::VisitPointer(
894       heap, this,
895       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
896   StaticVisitor::VisitNextCodeLink(
897       heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
898
899
900   RelocIterator it(this, mode_mask);
901   for (; !it.done(); it.next()) {
902     it.rinfo()->template Visit<StaticVisitor>(heap);
903   }
904 }
905 }
906 }  // namespace v8::internal
907
908 #endif  // V8_OBJECTS_VISITING_INL_H_