deps: upgrade v8 to 3.31.74.1
[platform/upstream/nodejs.git] / deps / v8 / src / heap / objects-visiting-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8
9 namespace v8 {
10 namespace internal {
11
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
14   table_.Register(
15       kVisitShortcutCandidate,
16       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
17
18   table_.Register(
19       kVisitConsString,
20       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
21
22   table_.Register(kVisitSlicedString,
23                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
24                                     int>::Visit);
25
26   table_.Register(
27       kVisitSymbol,
28       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
29
30   table_.Register(kVisitFixedArray,
31                   &FlexibleBodyVisitor<StaticVisitor,
32                                        FixedArray::BodyDescriptor, int>::Visit);
33
34   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
37
38   table_.Register(
39       kVisitNativeContext,
40       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
41                         int>::Visit);
42
43   table_.Register(kVisitByteArray, &VisitByteArray);
44
45   table_.Register(
46       kVisitSharedFunctionInfo,
47       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
48                         int>::Visit);
49
50   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
51
52   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
53
54   table_.Register(kVisitJSFunction, &VisitJSFunction);
55
56   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
57
58   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
59
60   table_.Register(kVisitJSDataView, &VisitJSDataView);
61
62   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
63
64   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
65
66   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
67
68   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69                                           kVisitDataObjectGeneric>();
70
71   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72                                           kVisitJSObjectGeneric>();
73   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74                                           kVisitStructGeneric>();
75 }
76
77
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80     Map* map, HeapObject* object) {
81   Heap* heap = map->GetHeap();
82
83   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85   VisitPointers(heap, HeapObject::RawField(
86                           object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87                 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
88   VisitPointers(
89       heap, HeapObject::RawField(
90                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92   return JSArrayBuffer::kSizeWithInternalFields;
93 }
94
95
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98     Map* map, HeapObject* object) {
99   VisitPointers(
100       map->GetHeap(),
101       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
103   VisitPointers(
104       map->GetHeap(), HeapObject::RawField(
105                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
106       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107   return JSTypedArray::kSizeWithInternalFields;
108 }
109
110
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113                                                           HeapObject* object) {
114   VisitPointers(
115       map->GetHeap(),
116       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
118   VisitPointers(
119       map->GetHeap(),
120       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122   return JSDataView::kSizeWithInternalFields;
123 }
124
125
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128   table_.Register(kVisitShortcutCandidate,
129                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
130                                     void>::Visit);
131
132   table_.Register(kVisitConsString,
133                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
134                                     void>::Visit);
135
136   table_.Register(kVisitSlicedString,
137                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
138                                     void>::Visit);
139
140   table_.Register(
141       kVisitSymbol,
142       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
143
144   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
145
146   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
147
148   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
149
150   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
151
152   table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
153
154   table_.Register(kVisitNativeContext, &VisitNativeContext);
155
156   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
157
158   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
159
160   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
161
162   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
163
164   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
165
166   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
167
168   table_.Register(
169       kVisitOddball,
170       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
171
172   table_.Register(kVisitMap, &VisitMap);
173
174   table_.Register(kVisitCode, &VisitCode);
175
176   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
177
178   table_.Register(kVisitJSFunction, &VisitJSFunction);
179
180   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
181
182   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
183
184   table_.Register(kVisitJSDataView, &VisitJSDataView);
185
186   // Registration for kVisitJSRegExp is done by StaticVisitor.
187
188   table_.Register(
189       kVisitCell,
190       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
191
192   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
193
194   table_.Register(kVisitWeakCell, &VisitWeakCell);
195
196   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
197                                           kVisitDataObjectGeneric>();
198
199   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
200                                           kVisitJSObjectGeneric>();
201
202   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203                                           kVisitStructGeneric>();
204 }
205
206
207 template <typename StaticVisitor>
208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209     Heap* heap, Address entry_address) {
210   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211   heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
212   StaticVisitor::MarkObject(heap, code);
213 }
214
215
216 template <typename StaticVisitor>
217 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
218     Heap* heap, RelocInfo* rinfo) {
219   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
220   HeapObject* object = HeapObject::cast(rinfo->target_object());
221   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
222   // TODO(ulan): It could be better to record slots only for strongly embedded
223   // objects here and record slots for weakly embedded object during clearing
224   // of non-live references in mark-compact.
225   if (!rinfo->host()->IsWeakObject(object)) {
226     StaticVisitor::MarkObject(heap, object);
227   }
228 }
229
230
231 template <typename StaticVisitor>
232 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
233                                                     RelocInfo* rinfo) {
234   DCHECK(rinfo->rmode() == RelocInfo::CELL);
235   Cell* cell = rinfo->target_cell();
236   // No need to record slots because the cell space is not compacted during GC.
237   if (!rinfo->host()->IsWeakObject(cell)) {
238     StaticVisitor::MarkObject(heap, cell);
239   }
240 }
241
242
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
245                                                            RelocInfo* rinfo) {
246   DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
247           rinfo->IsPatchedReturnSequence()) ||
248          (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
249           rinfo->IsPatchedDebugBreakSlotSequence()));
250   Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
251   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
252   StaticVisitor::MarkObject(heap, target);
253 }
254
255
256 template <typename StaticVisitor>
257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
258                                                           RelocInfo* rinfo) {
259   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
260   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
261   // Monomorphic ICs are preserved when possible, but need to be flushed
262   // when they might be keeping a Context alive, or when the heap is about
263   // to be serialized.
264   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
265       !target->is_call_stub() &&
266       ((heap->flush_monomorphic_ics() && !target->embeds_maps_weakly()) ||
267        heap->isolate()->serializer_enabled() ||
268        target->ic_age() != heap->global_ic_age())) {
269     ICUtility::Clear(heap->isolate(), rinfo->pc(),
270                      rinfo->host()->constant_pool());
271     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
272   }
273   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
274   StaticVisitor::MarkObject(heap, target);
275 }
276
277
278 template <typename StaticVisitor>
279 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
280     Heap* heap, RelocInfo* rinfo) {
281   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
282   Code* target = rinfo->code_age_stub();
283   DCHECK(target != NULL);
284   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
285   StaticVisitor::MarkObject(heap, target);
286 }
287
288
289 template <typename StaticVisitor>
290 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
291     Map* map, HeapObject* object) {
292   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
293                    void>::Visit(map, object);
294
295   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
296   for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
297        ++idx) {
298     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
299     collector->RecordSlot(slot, slot, *slot);
300   }
301 }
302
303
304 template <typename StaticVisitor>
305 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
306                                                    HeapObject* object) {
307   Heap* heap = map->GetHeap();
308   Map* map_object = Map::cast(object);
309
310   // Clears the cache of ICs related to this map.
311   if (FLAG_cleanup_code_caches_at_gc) {
312     map_object->ClearCodeCache(heap);
313   }
314
315   // When map collection is enabled we have to mark through map's transitions
316   // and back pointers in a special way to make these links weak.
317   if (FLAG_collect_maps && map_object->CanTransition()) {
318     MarkMapContents(heap, map_object);
319   } else {
320     StaticVisitor::VisitPointers(
321         heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
322         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
323   }
324 }
325
326
327 template <typename StaticVisitor>
328 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
329     Map* map, HeapObject* object) {
330   Heap* heap = map->GetHeap();
331
332   Object** slot =
333       HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
334   if (FLAG_collect_maps) {
335     // Mark property cell dependent codes array but do not push it onto marking
336     // stack, this will make references from it weak. We will clean dead
337     // codes when we iterate over property cells in ClearNonLiveReferences.
338     HeapObject* obj = HeapObject::cast(*slot);
339     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
340     StaticVisitor::MarkObjectWithoutPush(heap, obj);
341   } else {
342     StaticVisitor::VisitPointer(heap, slot);
343   }
344
345   StaticVisitor::VisitPointers(
346       heap,
347       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
348       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
349 }
350
351
352 template <typename StaticVisitor>
353 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
354                                                         HeapObject* object) {
355   Heap* heap = map->GetHeap();
356   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
357   Object* undefined = heap->undefined_value();
358   // Enqueue weak cell in linked list of encountered weak collections.
359   // We can ignore weak cells with cleared values because they will always
360   // contain smi zero.
361   if (weak_cell->next() == undefined && !weak_cell->cleared()) {
362     weak_cell->set_next(heap->encountered_weak_cells());
363     heap->set_encountered_weak_cells(weak_cell);
364   }
365 }
366
367
368 template <typename StaticVisitor>
369 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
370     Map* map, HeapObject* object) {
371   Heap* heap = map->GetHeap();
372
373   Object** slot =
374       HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
375   if (FLAG_collect_maps) {
376     // Mark allocation site dependent codes array but do not push it onto
377     // marking stack, this will make references from it weak. We will clean
378     // dead codes when we iterate over allocation sites in
379     // ClearNonLiveReferences.
380     HeapObject* obj = HeapObject::cast(*slot);
381     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
382     StaticVisitor::MarkObjectWithoutPush(heap, obj);
383   } else {
384     StaticVisitor::VisitPointer(heap, slot);
385   }
386
387   StaticVisitor::VisitPointers(
388       heap,
389       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
390       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
391 }
392
393
394 template <typename StaticVisitor>
395 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
396     Map* map, HeapObject* object) {
397   Heap* heap = map->GetHeap();
398   JSWeakCollection* weak_collection =
399       reinterpret_cast<JSWeakCollection*>(object);
400
401   // Enqueue weak collection in linked list of encountered weak collections.
402   if (weak_collection->next() == heap->undefined_value()) {
403     weak_collection->set_next(heap->encountered_weak_collections());
404     heap->set_encountered_weak_collections(weak_collection);
405   }
406
407   // Skip visiting the backing hash table containing the mappings and the
408   // pointer to the other enqueued weak collections, both are post-processed.
409   StaticVisitor::VisitPointers(
410       heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
411       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
412   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
413                 JSWeakCollection::kNextOffset);
414   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
415                 JSWeakCollection::kSize);
416
417   // Partially initialized weak collection is enqueued, but table is ignored.
418   if (!weak_collection->table()->IsHashTable()) return;
419
420   // Mark the backing hash table without pushing it on the marking stack.
421   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
422   HeapObject* obj = HeapObject::cast(*slot);
423   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
424   StaticVisitor::MarkObjectWithoutPush(heap, obj);
425 }
426
427
428 template <typename StaticVisitor>
429 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
430                                                     HeapObject* object) {
431   Heap* heap = map->GetHeap();
432   Code* code = Code::cast(object);
433   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
434     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
435   }
436   code->CodeIterateBody<StaticVisitor>(heap);
437 }
438
439
440 template <typename StaticVisitor>
441 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
442     Map* map, HeapObject* object) {
443   Heap* heap = map->GetHeap();
444   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
445   if (shared->ic_age() != heap->global_ic_age()) {
446     shared->ResetForNewContext(heap->global_ic_age());
447   }
448   if (FLAG_cleanup_code_caches_at_gc) {
449     shared->ClearTypeFeedbackInfo();
450   }
451   if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
452       !shared->optimized_code_map()->IsSmi()) {
453     // Always flush the optimized code map if requested by flag.
454     shared->ClearOptimizedCodeMap();
455   }
456   MarkCompactCollector* collector = heap->mark_compact_collector();
457   if (collector->is_code_flushing_enabled()) {
458     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
459       // Add the shared function info holding an optimized code map to
460       // the code flusher for processing of code maps after marking.
461       collector->code_flusher()->AddOptimizedCodeMap(shared);
462       // Treat all references within the code map weakly by marking the
463       // code map itself but not pushing it onto the marking deque.
464       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
465       StaticVisitor::MarkObjectWithoutPush(heap, code_map);
466     }
467     if (IsFlushable(heap, shared)) {
468       // This function's code looks flushable. But we have to postpone
469       // the decision until we see all functions that point to the same
470       // SharedFunctionInfo because some of them might be optimized.
471       // That would also make the non-optimized version of the code
472       // non-flushable, because it is required for bailing out from
473       // optimized code.
474       collector->code_flusher()->AddCandidate(shared);
475       // Treat the reference to the code object weakly.
476       VisitSharedFunctionInfoWeakCode(heap, object);
477       return;
478     }
479   } else {
480     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
481       // Flush optimized code map on major GCs without code flushing,
482       // needed because cached code doesn't contain breakpoints.
483       shared->ClearOptimizedCodeMap();
484     }
485   }
486   VisitSharedFunctionInfoStrongCode(heap, object);
487 }
488
489
490 template <typename StaticVisitor>
491 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
492     Map* map, HeapObject* object) {
493   Heap* heap = map->GetHeap();
494   ConstantPoolArray* array = ConstantPoolArray::cast(object);
495   ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
496   while (!code_iter.is_finished()) {
497     Address code_entry = reinterpret_cast<Address>(
498         array->RawFieldOfElementAt(code_iter.next_index()));
499     StaticVisitor::VisitCodeEntry(heap, code_entry);
500   }
501
502   ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
503   while (!heap_iter.is_finished()) {
504     Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
505     HeapObject* object = HeapObject::cast(*slot);
506     heap->mark_compact_collector()->RecordSlot(slot, slot, object);
507     bool is_weak_object =
508         (array->get_weak_object_state() ==
509              ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
510          Code::IsWeakObjectInOptimizedCode(object));
511     if (!is_weak_object) {
512       StaticVisitor::MarkObject(heap, object);
513     }
514   }
515 }
516
517
518 template <typename StaticVisitor>
519 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
520                                                           HeapObject* object) {
521   Heap* heap = map->GetHeap();
522   JSFunction* function = JSFunction::cast(object);
523   MarkCompactCollector* collector = heap->mark_compact_collector();
524   if (collector->is_code_flushing_enabled()) {
525     if (IsFlushable(heap, function)) {
526       // This function's code looks flushable. But we have to postpone
527       // the decision until we see all functions that point to the same
528       // SharedFunctionInfo because some of them might be optimized.
529       // That would also make the non-optimized version of the code
530       // non-flushable, because it is required for bailing out from
531       // optimized code.
532       collector->code_flusher()->AddCandidate(function);
533       // Visit shared function info immediately to avoid double checking
534       // of its flushability later. This is just an optimization because
535       // the shared function info would eventually be visited.
536       SharedFunctionInfo* shared = function->shared();
537       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
538         StaticVisitor::MarkObject(heap, shared->map());
539         VisitSharedFunctionInfoWeakCode(heap, shared);
540       }
541       // Treat the reference to the code object weakly.
542       VisitJSFunctionWeakCode(heap, object);
543       return;
544     } else {
545       // Visit all unoptimized code objects to prevent flushing them.
546       StaticVisitor::MarkObject(heap, function->shared()->code());
547       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
548         MarkInlinedFunctionsCode(heap, function->code());
549       }
550     }
551   }
552   VisitJSFunctionStrongCode(heap, object);
553 }
554
555
556 template <typename StaticVisitor>
557 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
558                                                         HeapObject* object) {
559   int last_property_offset =
560       JSRegExp::kSize + kPointerSize * map->inobject_properties();
561   StaticVisitor::VisitPointers(
562       map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
563       HeapObject::RawField(object, last_property_offset));
564 }
565
566
567 template <typename StaticVisitor>
568 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
569     Map* map, HeapObject* object) {
570   Heap* heap = map->GetHeap();
571
572   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
573                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
574   StaticVisitor::VisitPointers(
575       heap,
576       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
577       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
578   StaticVisitor::VisitPointers(
579       heap, HeapObject::RawField(
580                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
581       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
582 }
583
584
585 template <typename StaticVisitor>
586 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
587     Map* map, HeapObject* object) {
588   StaticVisitor::VisitPointers(
589       map->GetHeap(),
590       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
591       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
592   StaticVisitor::VisitPointers(
593       map->GetHeap(), HeapObject::RawField(
594                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
595       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
596 }
597
598
599 template <typename StaticVisitor>
600 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
601                                                           HeapObject* object) {
602   StaticVisitor::VisitPointers(
603       map->GetHeap(),
604       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
605       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
606   StaticVisitor::VisitPointers(
607       map->GetHeap(),
608       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
609       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
610 }
611
612
613 template <typename StaticVisitor>
614 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
615                                                           Map* map) {
616   // Make sure that the back pointer stored either in the map itself or
617   // inside its transitions array is marked. Skip recording the back
618   // pointer slot since map space is not compacted.
619   StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
620
621   // Treat pointers in the transitions array as weak and also mark that
622   // array to prevent visiting it later. Skip recording the transition
623   // array slot, since it will be implicitly recorded when the pointer
624   // fields of this map are visited.
625   if (map->HasTransitionArray()) {
626     TransitionArray* transitions = map->transitions();
627     MarkTransitionArray(heap, transitions);
628   }
629
630   // Since descriptor arrays are potentially shared, ensure that only the
631   // descriptors that belong to this map are marked. The first time a
632   // non-empty descriptor array is marked, its header is also visited. The slot
633   // holding the descriptor array will be implicitly recorded when the pointer
634   // fields of this map are visited.
635   DescriptorArray* descriptors = map->instance_descriptors();
636   if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
637       descriptors->length() > 0) {
638     StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
639                                  descriptors->GetDescriptorEndSlot(0));
640   }
641   int start = 0;
642   int end = map->NumberOfOwnDescriptors();
643   if (start < end) {
644     StaticVisitor::VisitPointers(heap,
645                                  descriptors->GetDescriptorStartSlot(start),
646                                  descriptors->GetDescriptorEndSlot(end));
647   }
648
649   // Mark prototype dependent codes array but do not push it onto marking
650   // stack, this will make references from it weak. We will clean dead
651   // codes when we iterate over maps in ClearNonLiveTransitions.
652   Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
653   HeapObject* obj = HeapObject::cast(*slot);
654   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
655   StaticVisitor::MarkObjectWithoutPush(heap, obj);
656
657   // Mark the pointer fields of the Map. Since the transitions array has
658   // been marked already, it is fine that one of these fields contains a
659   // pointer to it.
660   StaticVisitor::VisitPointers(
661       heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
662       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
663 }
664
665
666 template <typename StaticVisitor>
667 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
668     Heap* heap, TransitionArray* transitions) {
669   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
670
671   // Simple transitions do not have keys nor prototype transitions.
672   if (transitions->IsSimpleTransition()) return;
673
674   if (transitions->HasPrototypeTransitions()) {
675     // Mark prototype transitions array but do not push it onto marking
676     // stack, this will make references from it weak. We will clean dead
677     // prototype transitions in ClearNonLiveTransitions.
678     Object** slot = transitions->GetPrototypeTransitionsSlot();
679     HeapObject* obj = HeapObject::cast(*slot);
680     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
681     StaticVisitor::MarkObjectWithoutPush(heap, obj);
682   }
683
684   for (int i = 0; i < transitions->number_of_transitions(); ++i) {
685     StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
686   }
687 }
688
689
690 template <typename StaticVisitor>
691 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
692                                                                    Code* code) {
693   // Skip in absence of inlining.
694   // TODO(turbofan): Revisit once we support inlining.
695   if (code->is_turbofanned()) return;
696   // For optimized functions we should retain both non-optimized version
697   // of its code and non-optimized version of all inlined functions.
698   // This is required to support bailing out from inlined code.
699   DeoptimizationInputData* data =
700       DeoptimizationInputData::cast(code->deoptimization_data());
701   FixedArray* literals = data->LiteralArray();
702   for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
703        i++) {
704     JSFunction* inlined = JSFunction::cast(literals->get(i));
705     StaticVisitor::MarkObject(heap, inlined->shared()->code());
706   }
707 }
708
709
710 inline static bool IsValidNonBuiltinContext(Object* context) {
711   return context->IsContext() &&
712          !Context::cast(context)->global_object()->IsJSBuiltinsObject();
713 }
714
715
716 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
717   Object* undefined = heap->undefined_value();
718   return (info->script() != undefined) &&
719          (reinterpret_cast<Script*>(info->script())->source() != undefined);
720 }
721
722
723 template <typename StaticVisitor>
724 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
725                                                       JSFunction* function) {
726   SharedFunctionInfo* shared_info = function->shared();
727
728   // Code is either on stack, in compilation cache or referenced
729   // by optimized version of function.
730   MarkBit code_mark = Marking::MarkBitFrom(function->code());
731   if (code_mark.Get()) {
732     return false;
733   }
734
735   // The function must have a valid context and not be a builtin.
736   if (!IsValidNonBuiltinContext(function->context())) {
737     return false;
738   }
739
740   // We do not (yet) flush code for optimized functions.
741   if (function->code() != shared_info->code()) {
742     return false;
743   }
744
745   // Check age of optimized code.
746   if (FLAG_age_code && !function->code()->IsOld()) {
747     return false;
748   }
749
750   return IsFlushable(heap, shared_info);
751 }
752
753
754 template <typename StaticVisitor>
755 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
756     Heap* heap, SharedFunctionInfo* shared_info) {
757   // Code is either on stack, in compilation cache or referenced
758   // by optimized version of function.
759   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
760   if (code_mark.Get()) {
761     return false;
762   }
763
764   // The function must be compiled and have the source code available,
765   // to be able to recompile it in case we need the function again.
766   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
767     return false;
768   }
769
770   // We never flush code for API functions.
771   Object* function_data = shared_info->function_data();
772   if (function_data->IsFunctionTemplateInfo()) {
773     return false;
774   }
775
776   // Only flush code for functions.
777   if (shared_info->code()->kind() != Code::FUNCTION) {
778     return false;
779   }
780
781   // Function must be lazy compilable.
782   if (!shared_info->allows_lazy_compilation()) {
783     return false;
784   }
785
786   // We do not (yet?) flush code for generator functions, because we don't know
787   // if there are still live activations (generator objects) on the heap.
788   if (shared_info->is_generator()) {
789     return false;
790   }
791
792   // If this is a full script wrapped in a function we do not flush the code.
793   if (shared_info->is_toplevel()) {
794     return false;
795   }
796
797   // If this is a function initialized with %SetCode then the one-to-one
798   // relation between SharedFunctionInfo and Code is broken.
799   if (shared_info->dont_flush()) {
800     return false;
801   }
802
803   // Check age of code. If code aging is disabled we never flush.
804   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
805     return false;
806   }
807
808   return true;
809 }
810
811
812 template <typename StaticVisitor>
813 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
814     Heap* heap, HeapObject* object) {
815   Object** start_slot = HeapObject::RawField(
816       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
817   Object** end_slot = HeapObject::RawField(
818       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
819   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
820 }
821
822
823 template <typename StaticVisitor>
824 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
825     Heap* heap, HeapObject* object) {
826   Object** name_slot =
827       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
828   StaticVisitor::VisitPointer(heap, name_slot);
829
830   // Skip visiting kCodeOffset as it is treated weakly here.
831   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
832                 SharedFunctionInfo::kCodeOffset);
833   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
834                 SharedFunctionInfo::kOptimizedCodeMapOffset);
835
836   Object** start_slot =
837       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
838   Object** end_slot = HeapObject::RawField(
839       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
840   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
841 }
842
843
844 template <typename StaticVisitor>
845 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
846     Heap* heap, HeapObject* object) {
847   Object** start_slot =
848       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
849   Object** end_slot =
850       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
851   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
852
853   VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
854   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
855                 JSFunction::kPrototypeOrInitialMapOffset);
856
857   start_slot =
858       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
859   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
860   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
861 }
862
863
864 template <typename StaticVisitor>
865 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
866     Heap* heap, HeapObject* object) {
867   Object** start_slot =
868       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
869   Object** end_slot =
870       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
871   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
872
873   // Skip visiting kCodeEntryOffset as it is treated weakly here.
874   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
875                 JSFunction::kPrototypeOrInitialMapOffset);
876
877   start_slot =
878       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
879   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
880   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
881 }
882
883
884 void Code::CodeIterateBody(ObjectVisitor* v) {
885   int mode_mask = RelocInfo::kCodeTargetMask |
886                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
887                   RelocInfo::ModeMask(RelocInfo::CELL) |
888                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
889                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
890                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
891                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
892
893   // There are two places where we iterate code bodies: here and the
894   // templated CodeIterateBody (below). They should be kept in sync.
895   IteratePointer(v, kRelocationInfoOffset);
896   IteratePointer(v, kHandlerTableOffset);
897   IteratePointer(v, kDeoptimizationDataOffset);
898   IteratePointer(v, kTypeFeedbackInfoOffset);
899   IterateNextCodeLink(v, kNextCodeLinkOffset);
900   IteratePointer(v, kConstantPoolOffset);
901
902   RelocIterator it(this, mode_mask);
903   Isolate* isolate = this->GetIsolate();
904   for (; !it.done(); it.next()) {
905     it.rinfo()->Visit(isolate, v);
906   }
907 }
908
909
910 template <typename StaticVisitor>
911 void Code::CodeIterateBody(Heap* heap) {
912   int mode_mask = RelocInfo::kCodeTargetMask |
913                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
914                   RelocInfo::ModeMask(RelocInfo::CELL) |
915                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
916                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
917                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
918                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
919
920   // There are two places where we iterate code bodies: here and the non-
921   // templated CodeIterateBody (above). They should be kept in sync.
922   StaticVisitor::VisitPointer(
923       heap,
924       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
925   StaticVisitor::VisitPointer(
926       heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
927   StaticVisitor::VisitPointer(
928       heap,
929       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
930   StaticVisitor::VisitPointer(
931       heap,
932       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
933   StaticVisitor::VisitNextCodeLink(
934       heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
935   StaticVisitor::VisitPointer(
936       heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
937
938
939   RelocIterator it(this, mode_mask);
940   for (; !it.done(); it.next()) {
941     it.rinfo()->template Visit<StaticVisitor>(heap);
942   }
943 }
944 }
945 }  // namespace v8::internal
946
947 #endif  // V8_OBJECTS_VISITING_INL_H_