4838efb919cd3621b53ecc3f8bf64f5930acd985
[platform/upstream/v8.git] / src / heap / objects-visiting-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8 #include "src/heap/objects-visiting.h"
9 #include "src/ic/ic-state.h"
10 #include "src/macro-assembler.h"
11
12 namespace v8 {
13 namespace internal {
14
15 template <typename StaticVisitor>
16 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
17   table_.Register(
18       kVisitShortcutCandidate,
19       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
20
21   table_.Register(
22       kVisitConsString,
23       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
24
25   table_.Register(kVisitSlicedString,
26                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
27                                     int>::Visit);
28
29   table_.Register(
30       kVisitSymbol,
31       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
32
33   table_.Register(kVisitFixedArray,
34                   &FlexibleBodyVisitor<StaticVisitor,
35                                        FixedArray::BodyDescriptor, int>::Visit);
36
37   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
38   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
39   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
40
41   table_.Register(
42       kVisitNativeContext,
43       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
44                         int>::Visit);
45
46   table_.Register(kVisitByteArray, &VisitByteArray);
47   table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
48
49   table_.Register(
50       kVisitSharedFunctionInfo,
51       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
52                         int>::Visit);
53
54   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
55
56   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
57
58   table_.Register(kVisitJSFunction, &VisitJSFunction);
59
60   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
61
62   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
63
64   table_.Register(kVisitJSDataView, &VisitJSDataView);
65
66   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
67
68   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
69
70   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
71
72   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
73                                           kVisitDataObjectGeneric>();
74
75   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
76                                           kVisitJSObjectGeneric>();
77   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
78                                           kVisitStructGeneric>();
79 }
80
81
82 template <typename StaticVisitor>
83 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
84     Map* map, HeapObject* object) {
85   Heap* heap = map->GetHeap();
86
87   VisitPointers(
88       heap, object,
89       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
90       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
91   if (!JSArrayBuffer::cast(object)->is_external()) {
92     heap->RegisterLiveArrayBuffer(true,
93                                   JSArrayBuffer::cast(object)->backing_store());
94   }
95   return JSArrayBuffer::kSizeWithInternalFields;
96 }
97
98
99 template <typename StaticVisitor>
100 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
101     Map* map, HeapObject* object) {
102   VisitPointers(
103       map->GetHeap(), object,
104       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
105       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
106   return JSTypedArray::kSizeWithInternalFields;
107 }
108
109
110 template <typename StaticVisitor>
111 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
112                                                           HeapObject* object) {
113   VisitPointers(
114       map->GetHeap(), object,
115       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
116       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
117   return JSDataView::kSizeWithInternalFields;
118 }
119
120
121 template <typename StaticVisitor>
122 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
123   table_.Register(kVisitShortcutCandidate,
124                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
125                                     void>::Visit);
126
127   table_.Register(kVisitConsString,
128                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
129                                     void>::Visit);
130
131   table_.Register(kVisitSlicedString,
132                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
133                                     void>::Visit);
134
135   table_.Register(
136       kVisitSymbol,
137       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
138
139   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
140
141   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
142
143   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
144
145   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
146
147   table_.Register(kVisitNativeContext, &VisitNativeContext);
148
149   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
150
151   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
152
153   table_.Register(kVisitBytecodeArray, &DataObjectVisitor::Visit);
154
155   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
156
157   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
158
159   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
160
161   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
162
163   table_.Register(
164       kVisitOddball,
165       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
166
167   table_.Register(kVisitMap, &VisitMap);
168
169   table_.Register(kVisitCode, &VisitCode);
170
171   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
172
173   table_.Register(kVisitJSFunction, &VisitJSFunction);
174
175   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
176
177   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
178
179   table_.Register(kVisitJSDataView, &VisitJSDataView);
180
181   // Registration for kVisitJSRegExp is done by StaticVisitor.
182
183   table_.Register(
184       kVisitCell,
185       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
186
187   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
188
189   table_.Register(kVisitWeakCell, &VisitWeakCell);
190
191   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
192                                           kVisitDataObjectGeneric>();
193
194   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
195                                           kVisitJSObjectGeneric>();
196
197   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
198                                           kVisitStructGeneric>();
199 }
200
201
202 template <typename StaticVisitor>
203 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
204     Heap* heap, HeapObject* object, Address entry_address) {
205   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
206   heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
207                                                       code);
208   StaticVisitor::MarkObject(heap, code);
209 }
210
211
212 template <typename StaticVisitor>
213 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
214     Heap* heap, RelocInfo* rinfo) {
215   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
216   HeapObject* object = HeapObject::cast(rinfo->target_object());
217   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
218   // TODO(ulan): It could be better to record slots only for strongly embedded
219   // objects here and record slots for weakly embedded object during clearing
220   // of non-live references in mark-compact.
221   if (!rinfo->host()->IsWeakObject(object)) {
222     StaticVisitor::MarkObject(heap, object);
223   }
224 }
225
226
227 template <typename StaticVisitor>
228 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
229                                                     RelocInfo* rinfo) {
230   DCHECK(rinfo->rmode() == RelocInfo::CELL);
231   Cell* cell = rinfo->target_cell();
232   heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
233   if (!rinfo->host()->IsWeakObject(cell)) {
234     StaticVisitor::MarkObject(heap, cell);
235   }
236 }
237
238
239 template <typename StaticVisitor>
240 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
241                                                            RelocInfo* rinfo) {
242   DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
243          rinfo->IsPatchedDebugBreakSlotSequence());
244   Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
245   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
246   StaticVisitor::MarkObject(heap, target);
247 }
248
249
250 template <typename StaticVisitor>
251 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
252                                                           RelocInfo* rinfo) {
253   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
254   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
255   // Monomorphic ICs are preserved when possible, but need to be flushed
256   // when they might be keeping a Context alive, or when the heap is about
257   // to be serialized.
258   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
259       !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
260                                   target->ic_age() != heap->global_ic_age())) {
261     ICUtility::Clear(heap->isolate(), rinfo->pc(),
262                      rinfo->host()->constant_pool());
263     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
264   }
265   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
266   StaticVisitor::MarkObject(heap, target);
267 }
268
269
270 template <typename StaticVisitor>
271 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
272     Heap* heap, RelocInfo* rinfo) {
273   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
274   Code* target = rinfo->code_age_stub();
275   DCHECK(target != NULL);
276   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
277   StaticVisitor::MarkObject(heap, target);
278 }
279
280
281 template <typename StaticVisitor>
282 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
283     Map* map, HeapObject* object) {
284   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
285                    void>::Visit(map, object);
286
287   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
288   for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
289        ++idx) {
290     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
291     collector->RecordSlot(object, slot, *slot);
292   }
293 }
294
295
296 template <typename StaticVisitor>
297 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
298                                                    HeapObject* object) {
299   Heap* heap = map->GetHeap();
300   Map* map_object = Map::cast(object);
301
302   // Clears the cache of ICs related to this map.
303   if (FLAG_cleanup_code_caches_at_gc) {
304     map_object->ClearCodeCache(heap);
305   }
306
307   // When map collection is enabled we have to mark through map's transitions
308   // and back pointers in a special way to make these links weak.
309   if (map_object->CanTransition()) {
310     MarkMapContents(heap, map_object);
311   } else {
312     StaticVisitor::VisitPointers(
313         heap, object,
314         HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
315         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
316   }
317 }
318
319
320 template <typename StaticVisitor>
321 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
322     Map* map, HeapObject* object) {
323   Heap* heap = map->GetHeap();
324
325   StaticVisitor::VisitPointers(
326       heap, object,
327       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
328       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
329 }
330
331
332 template <typename StaticVisitor>
333 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
334                                                         HeapObject* object) {
335   Heap* heap = map->GetHeap();
336   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
337   // Enqueue weak cell in linked list of encountered weak collections.
338   // We can ignore weak cells with cleared values because they will always
339   // contain smi zero.
340   if (weak_cell->next_cleared() && !weak_cell->cleared()) {
341     weak_cell->set_next(heap->encountered_weak_cells(),
342                         UPDATE_WEAK_WRITE_BARRIER);
343     heap->set_encountered_weak_cells(weak_cell);
344   }
345 }
346
347
348 template <typename StaticVisitor>
349 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
350     Map* map, HeapObject* object) {
351   Heap* heap = map->GetHeap();
352
353   StaticVisitor::VisitPointers(
354       heap, object,
355       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
356       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
357 }
358
359
360 template <typename StaticVisitor>
361 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
362     Map* map, HeapObject* object) {
363   Heap* heap = map->GetHeap();
364   JSWeakCollection* weak_collection =
365       reinterpret_cast<JSWeakCollection*>(object);
366
367   // Enqueue weak collection in linked list of encountered weak collections.
368   if (weak_collection->next() == heap->undefined_value()) {
369     weak_collection->set_next(heap->encountered_weak_collections());
370     heap->set_encountered_weak_collections(weak_collection);
371   }
372
373   // Skip visiting the backing hash table containing the mappings and the
374   // pointer to the other enqueued weak collections, both are post-processed.
375   StaticVisitor::VisitPointers(
376       heap, object,
377       HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
378       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
379   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
380                 JSWeakCollection::kNextOffset);
381   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
382                 JSWeakCollection::kSize);
383
384   // Partially initialized weak collection is enqueued, but table is ignored.
385   if (!weak_collection->table()->IsHashTable()) return;
386
387   // Mark the backing hash table without pushing it on the marking stack.
388   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
389   HeapObject* obj = HeapObject::cast(*slot);
390   heap->mark_compact_collector()->RecordSlot(object, slot, obj);
391   StaticVisitor::MarkObjectWithoutPush(heap, obj);
392 }
393
394
395 template <typename StaticVisitor>
396 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
397                                                     HeapObject* object) {
398   Heap* heap = map->GetHeap();
399   Code* code = Code::cast(object);
400   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
401     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
402   }
403   code->CodeIterateBody<StaticVisitor>(heap);
404 }
405
406
407 template <typename StaticVisitor>
408 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
409     Map* map, HeapObject* object) {
410   Heap* heap = map->GetHeap();
411   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
412   if (shared->ic_age() != heap->global_ic_age()) {
413     shared->ResetForNewContext(heap->global_ic_age());
414   }
415   if (FLAG_cleanup_code_caches_at_gc) {
416     shared->ClearTypeFeedbackInfoAtGCTime();
417   }
418   if ((FLAG_flush_optimized_code_cache ||
419        heap->isolate()->serializer_enabled()) &&
420       !shared->optimized_code_map()->IsSmi()) {
421     // Always flush the optimized code map if requested by flag.
422     shared->ClearOptimizedCodeMap();
423   }
424   MarkCompactCollector* collector = heap->mark_compact_collector();
425   if (collector->is_code_flushing_enabled()) {
426     if (!shared->optimized_code_map()->IsSmi()) {
427       // Add the shared function info holding an optimized code map to
428       // the code flusher for processing of code maps after marking.
429       collector->code_flusher()->AddOptimizedCodeMap(shared);
430       // Treat some references within the code map weakly by marking the
431       // code map itself but not pushing it onto the marking deque.
432       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
433       MarkOptimizedCodeMap(heap, code_map);
434     }
435     if (IsFlushable(heap, shared)) {
436       // This function's code looks flushable. But we have to postpone
437       // the decision until we see all functions that point to the same
438       // SharedFunctionInfo because some of them might be optimized.
439       // That would also make the non-optimized version of the code
440       // non-flushable, because it is required for bailing out from
441       // optimized code.
442       collector->code_flusher()->AddCandidate(shared);
443       // Treat the reference to the code object weakly.
444       VisitSharedFunctionInfoWeakCode(heap, object);
445       return;
446     }
447   } else {
448     if (!shared->optimized_code_map()->IsSmi()) {
449       // Flush optimized code map on major GCs without code flushing,
450       // needed because cached code doesn't contain breakpoints.
451       shared->ClearOptimizedCodeMap();
452     }
453   }
454   VisitSharedFunctionInfoStrongCode(heap, object);
455 }
456
457
458 template <typename StaticVisitor>
459 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
460                                                           HeapObject* object) {
461   Heap* heap = map->GetHeap();
462   JSFunction* function = JSFunction::cast(object);
463   MarkCompactCollector* collector = heap->mark_compact_collector();
464   if (collector->is_code_flushing_enabled()) {
465     if (IsFlushable(heap, function)) {
466       // This function's code looks flushable. But we have to postpone
467       // the decision until we see all functions that point to the same
468       // SharedFunctionInfo because some of them might be optimized.
469       // That would also make the non-optimized version of the code
470       // non-flushable, because it is required for bailing out from
471       // optimized code.
472       collector->code_flusher()->AddCandidate(function);
473       // Visit shared function info immediately to avoid double checking
474       // of its flushability later. This is just an optimization because
475       // the shared function info would eventually be visited.
476       SharedFunctionInfo* shared = function->shared();
477       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
478         StaticVisitor::MarkObject(heap, shared->map());
479         VisitSharedFunctionInfoWeakCode(heap, shared);
480       }
481       // Treat the reference to the code object weakly.
482       VisitJSFunctionWeakCode(heap, object);
483       return;
484     } else {
485       // Visit all unoptimized code objects to prevent flushing them.
486       StaticVisitor::MarkObject(heap, function->shared()->code());
487       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
488         MarkInlinedFunctionsCode(heap, function->code());
489       }
490     }
491   }
492   VisitJSFunctionStrongCode(heap, object);
493 }
494
495
496 template <typename StaticVisitor>
497 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
498                                                         HeapObject* object) {
499   int last_property_offset =
500       JSRegExp::kSize + kPointerSize * map->inobject_properties();
501   StaticVisitor::VisitPointers(
502       map->GetHeap(), object,
503       HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
504       HeapObject::RawField(object, last_property_offset));
505 }
506
507
508 template <typename StaticVisitor>
509 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
510     Map* map, HeapObject* object) {
511   Heap* heap = map->GetHeap();
512
513   StaticVisitor::VisitPointers(
514       heap, object,
515       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
516       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
517   if (!JSArrayBuffer::cast(object)->is_external()) {
518     heap->RegisterLiveArrayBuffer(false,
519                                   JSArrayBuffer::cast(object)->backing_store());
520   }
521 }
522
523
524 template <typename StaticVisitor>
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
526     Map* map, HeapObject* object) {
527   StaticVisitor::VisitPointers(
528       map->GetHeap(), object,
529       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
530       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
531 }
532
533
534 template <typename StaticVisitor>
535 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
536                                                           HeapObject* object) {
537   StaticVisitor::VisitPointers(
538       map->GetHeap(), object,
539       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
540       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
541 }
542
543
544 template <typename StaticVisitor>
545 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
546                                                           Map* map) {
547   Object* raw_transitions = map->raw_transitions();
548   if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
549     MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
550   }
551
552   // Since descriptor arrays are potentially shared, ensure that only the
553   // descriptors that belong to this map are marked. The first time a non-empty
554   // descriptor array is marked, its header is also visited. The slot holding
555   // the descriptor array will be implicitly recorded when the pointer fields of
556   // this map are visited.  Prototype maps don't keep track of transitions, so
557   // just mark the entire descriptor array.
558   if (!map->is_prototype_map()) {
559     DescriptorArray* descriptors = map->instance_descriptors();
560     if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
561         descriptors->length() > 0) {
562       StaticVisitor::VisitPointers(heap, descriptors,
563                                    descriptors->GetFirstElementAddress(),
564                                    descriptors->GetDescriptorEndSlot(0));
565     }
566     int start = 0;
567     int end = map->NumberOfOwnDescriptors();
568     if (start < end) {
569       StaticVisitor::VisitPointers(heap, descriptors,
570                                    descriptors->GetDescriptorStartSlot(start),
571                                    descriptors->GetDescriptorEndSlot(end));
572     }
573   }
574
575   // Mark the pointer fields of the Map. Since the transitions array has
576   // been marked already, it is fine that one of these fields contains a
577   // pointer to it.
578   StaticVisitor::VisitPointers(
579       heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
580       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
581 }
582
583
584 template <typename StaticVisitor>
585 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
586     Heap* heap, TransitionArray* transitions) {
587   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
588
589   if (transitions->HasPrototypeTransitions()) {
590     StaticVisitor::VisitPointer(heap, transitions,
591                                 transitions->GetPrototypeTransitionsSlot());
592   }
593
594   int num_transitions = TransitionArray::NumberOfTransitions(transitions);
595   for (int i = 0; i < num_transitions; ++i) {
596     StaticVisitor::VisitPointer(heap, transitions, transitions->GetKeySlot(i));
597   }
598 }
599
600
601 template <typename StaticVisitor>
602 void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
603     Heap* heap, FixedArray* code_map) {
604   if (!StaticVisitor::MarkObjectWithoutPush(heap, code_map)) return;
605
606   // Mark the context-independent entry in the optimized code map. Depending on
607   // the age of the code object, we treat it as a strong or a weak reference.
608   Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
609   if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
610       FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
611     StaticVisitor::VisitPointer(
612         heap, code_map,
613         code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
614   }
615 }
616
617
618 template <typename StaticVisitor>
619 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
620                                                                    Code* code) {
621   // For optimized functions we should retain both non-optimized version
622   // of its code and non-optimized version of all inlined functions.
623   // This is required to support bailing out from inlined code.
624   DeoptimizationInputData* const data =
625       DeoptimizationInputData::cast(code->deoptimization_data());
626   FixedArray* const literals = data->LiteralArray();
627   int const inlined_count = data->InlinedFunctionCount()->value();
628   for (int i = 0; i < inlined_count; ++i) {
629     StaticVisitor::MarkObject(
630         heap, SharedFunctionInfo::cast(literals->get(i))->code());
631   }
632 }
633
634
635 inline static bool IsValidNonBuiltinContext(Object* context) {
636   return context->IsContext() &&
637          !Context::cast(context)->global_object()->IsJSBuiltinsObject();
638 }
639
640
641 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
642   Object* undefined = heap->undefined_value();
643   return (info->script() != undefined) &&
644          (reinterpret_cast<Script*>(info->script())->source() != undefined);
645 }
646
647
648 template <typename StaticVisitor>
649 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
650                                                       JSFunction* function) {
651   SharedFunctionInfo* shared_info = function->shared();
652
653   // Code is either on stack, in compilation cache or referenced
654   // by optimized version of function.
655   MarkBit code_mark = Marking::MarkBitFrom(function->code());
656   if (Marking::IsBlackOrGrey(code_mark)) {
657     return false;
658   }
659
660   // The function must have a valid context and not be a builtin.
661   if (!IsValidNonBuiltinContext(function->context())) {
662     return false;
663   }
664
665   // We do not (yet) flush code for optimized functions.
666   if (function->code() != shared_info->code()) {
667     return false;
668   }
669
670   // Check age of optimized code.
671   if (FLAG_age_code && !function->code()->IsOld()) {
672     return false;
673   }
674
675   return IsFlushable(heap, shared_info);
676 }
677
678
679 template <typename StaticVisitor>
680 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
681     Heap* heap, SharedFunctionInfo* shared_info) {
682   // Code is either on stack, in compilation cache or referenced
683   // by optimized version of function.
684   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
685   if (Marking::IsBlackOrGrey(code_mark)) {
686     return false;
687   }
688
689   // The function must be compiled and have the source code available,
690   // to be able to recompile it in case we need the function again.
691   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
692     return false;
693   }
694
695   // We never flush code for API functions.
696   Object* function_data = shared_info->function_data();
697   if (function_data->IsFunctionTemplateInfo()) {
698     return false;
699   }
700
701   // Only flush code for functions.
702   if (shared_info->code()->kind() != Code::FUNCTION) {
703     return false;
704   }
705
706   // Function must be lazy compilable.
707   if (!shared_info->allows_lazy_compilation()) {
708     return false;
709   }
710
711   // We do not (yet?) flush code for generator functions, because we don't know
712   // if there are still live activations (generator objects) on the heap.
713   if (shared_info->is_generator()) {
714     return false;
715   }
716
717   // If this is a full script wrapped in a function we do not flush the code.
718   if (shared_info->is_toplevel()) {
719     return false;
720   }
721
722   // If this is a function initialized with %SetCode then the one-to-one
723   // relation between SharedFunctionInfo and Code is broken.
724   if (shared_info->dont_flush()) {
725     return false;
726   }
727
728   // Check age of code. If code aging is disabled we never flush.
729   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
730     return false;
731   }
732
733   return true;
734 }
735
736
737 template <typename StaticVisitor>
738 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
739     Heap* heap, HeapObject* object) {
740   Object** start_slot = HeapObject::RawField(
741       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
742   Object** end_slot = HeapObject::RawField(
743       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
744   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
745 }
746
747
748 template <typename StaticVisitor>
749 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
750     Heap* heap, HeapObject* object) {
751   Object** name_slot =
752       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
753   StaticVisitor::VisitPointer(heap, object, name_slot);
754
755   // Skip visiting kCodeOffset as it is treated weakly here.
756   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
757                 SharedFunctionInfo::kCodeOffset);
758   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
759                 SharedFunctionInfo::kOptimizedCodeMapOffset);
760
761   Object** start_slot =
762       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
763   Object** end_slot = HeapObject::RawField(
764       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
765   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
766 }
767
768
769 template <typename StaticVisitor>
770 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
771     Heap* heap, HeapObject* object) {
772   Object** start_slot =
773       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
774   Object** end_slot =
775       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
776   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
777
778   VisitCodeEntry(heap, object,
779                  object->address() + JSFunction::kCodeEntryOffset);
780   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
781                 JSFunction::kPrototypeOrInitialMapOffset);
782
783   start_slot =
784       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
785   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
786   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
787 }
788
789
790 template <typename StaticVisitor>
791 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
792     Heap* heap, HeapObject* object) {
793   Object** start_slot =
794       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
795   Object** end_slot =
796       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
797   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
798
799   // Skip visiting kCodeEntryOffset as it is treated weakly here.
800   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
801                 JSFunction::kPrototypeOrInitialMapOffset);
802
803   start_slot =
804       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
805   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
806   StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
807 }
808
809
810 void Code::CodeIterateBody(ObjectVisitor* v) {
811   int mode_mask = RelocInfo::kCodeTargetMask |
812                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
813                   RelocInfo::ModeMask(RelocInfo::CELL) |
814                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
815                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
816                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
817                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
818                   RelocInfo::kDebugBreakSlotMask;
819
820   // There are two places where we iterate code bodies: here and the
821   // templated CodeIterateBody (below). They should be kept in sync.
822   IteratePointer(v, kRelocationInfoOffset);
823   IteratePointer(v, kHandlerTableOffset);
824   IteratePointer(v, kDeoptimizationDataOffset);
825   IteratePointer(v, kTypeFeedbackInfoOffset);
826   IterateNextCodeLink(v, kNextCodeLinkOffset);
827
828   RelocIterator it(this, mode_mask);
829   Isolate* isolate = this->GetIsolate();
830   for (; !it.done(); it.next()) {
831     it.rinfo()->Visit(isolate, v);
832   }
833 }
834
835
836 template <typename StaticVisitor>
837 void Code::CodeIterateBody(Heap* heap) {
838   int mode_mask = RelocInfo::kCodeTargetMask |
839                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
840                   RelocInfo::ModeMask(RelocInfo::CELL) |
841                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
842                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
843                   RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
844                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
845                   RelocInfo::kDebugBreakSlotMask;
846
847   // There are two places where we iterate code bodies: here and the non-
848   // templated CodeIterateBody (above). They should be kept in sync.
849   StaticVisitor::VisitPointer(
850       heap, this,
851       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
852   StaticVisitor::VisitPointer(
853       heap, this,
854       reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
855   StaticVisitor::VisitPointer(
856       heap, this,
857       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
858   StaticVisitor::VisitPointer(
859       heap, this,
860       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
861   StaticVisitor::VisitNextCodeLink(
862       heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
863
864
865   RelocIterator it(this, mode_mask);
866   for (; !it.done(); it.next()) {
867     it.rinfo()->template Visit<StaticVisitor>(heap);
868   }
869 }
870 }
871 }  // namespace v8::internal
872
873 #endif  // V8_OBJECTS_VISITING_INL_H_