Merge remote-tracking branch 'joyent/v0.12' into v0.12
[platform/upstream/nodejs.git] / deps / v8 / src / heap / objects-visiting-inl.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
7
8
9 namespace v8 {
10 namespace internal {
11
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
14   table_.Register(
15       kVisitShortcutCandidate,
16       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
17
18   table_.Register(
19       kVisitConsString,
20       &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
21
22   table_.Register(kVisitSlicedString,
23                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
24                                     int>::Visit);
25
26   table_.Register(
27       kVisitSymbol,
28       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
29
30   table_.Register(kVisitFixedArray,
31                   &FlexibleBodyVisitor<StaticVisitor,
32                                        FixedArray::BodyDescriptor, int>::Visit);
33
34   table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35   table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36   table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
37
38   table_.Register(
39       kVisitNativeContext,
40       &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
41                         int>::Visit);
42
43   table_.Register(kVisitByteArray, &VisitByteArray);
44
45   table_.Register(
46       kVisitSharedFunctionInfo,
47       &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
48                         int>::Visit);
49
50   table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
51
52   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
53
54   table_.Register(kVisitJSFunction, &VisitJSFunction);
55
56   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
57
58   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
59
60   table_.Register(kVisitJSDataView, &VisitJSDataView);
61
62   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
63
64   table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
65
66   table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
67
68   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69                                           kVisitDataObjectGeneric>();
70
71   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72                                           kVisitJSObjectGeneric>();
73   table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74                                           kVisitStructGeneric>();
75 }
76
77
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80     Map* map, HeapObject* object) {
81   Heap* heap = map->GetHeap();
82
83   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85   VisitPointers(heap, HeapObject::RawField(
86                           object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87                 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
88   VisitPointers(
89       heap, HeapObject::RawField(
90                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92   return JSArrayBuffer::kSizeWithInternalFields;
93 }
94
95
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98     Map* map, HeapObject* object) {
99   VisitPointers(
100       map->GetHeap(),
101       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
103   VisitPointers(
104       map->GetHeap(), HeapObject::RawField(
105                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
106       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107   return JSTypedArray::kSizeWithInternalFields;
108 }
109
110
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113                                                           HeapObject* object) {
114   VisitPointers(
115       map->GetHeap(),
116       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
118   VisitPointers(
119       map->GetHeap(),
120       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122   return JSDataView::kSizeWithInternalFields;
123 }
124
125
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128   table_.Register(kVisitShortcutCandidate,
129                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
130                                     void>::Visit);
131
132   table_.Register(kVisitConsString,
133                   &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
134                                     void>::Visit);
135
136   table_.Register(kVisitSlicedString,
137                   &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
138                                     void>::Visit);
139
140   table_.Register(
141       kVisitSymbol,
142       &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
143
144   table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
145
146   table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
147
148   table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
149
150   table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
151
152   table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
153
154   table_.Register(kVisitNativeContext, &VisitNativeContext);
155
156   table_.Register(kVisitAllocationSite, &VisitAllocationSite);
157
158   table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
159
160   table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
161
162   table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
163
164   table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
165
166   table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
167
168   table_.Register(
169       kVisitOddball,
170       &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
171
172   table_.Register(kVisitMap, &VisitMap);
173
174   table_.Register(kVisitCode, &VisitCode);
175
176   table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
177
178   table_.Register(kVisitJSFunction, &VisitJSFunction);
179
180   table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
181
182   table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
183
184   table_.Register(kVisitJSDataView, &VisitJSDataView);
185
186   // Registration for kVisitJSRegExp is done by StaticVisitor.
187
188   table_.Register(
189       kVisitCell,
190       &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
191
192   table_.Register(kVisitPropertyCell, &VisitPropertyCell);
193
194   table_.Register(kVisitWeakCell, &VisitWeakCell);
195
196   table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
197                                           kVisitDataObjectGeneric>();
198
199   table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
200                                           kVisitJSObjectGeneric>();
201
202   table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203                                           kVisitStructGeneric>();
204 }
205
206
207 template <typename StaticVisitor>
208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209     Heap* heap, Address entry_address) {
210   Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211   heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
212   StaticVisitor::MarkObject(heap, code);
213 }
214
215
216 template <typename StaticVisitor>
217 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
218     Heap* heap, RelocInfo* rinfo) {
219   DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
220   HeapObject* object = HeapObject::cast(rinfo->target_object());
221   heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
222   // TODO(ulan): It could be better to record slots only for strongly embedded
223   // objects here and record slots for weakly embedded object during clearing
224   // of non-live references in mark-compact.
225   if (!rinfo->host()->IsWeakObject(object)) {
226     StaticVisitor::MarkObject(heap, object);
227   }
228 }
229
230
231 template <typename StaticVisitor>
232 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
233                                                     RelocInfo* rinfo) {
234   DCHECK(rinfo->rmode() == RelocInfo::CELL);
235   Cell* cell = rinfo->target_cell();
236   // No need to record slots because the cell space is not compacted during GC.
237   if (!rinfo->host()->IsWeakObject(cell)) {
238     StaticVisitor::MarkObject(heap, cell);
239   }
240 }
241
242
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
245                                                            RelocInfo* rinfo) {
246   DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
247           rinfo->IsPatchedReturnSequence()) ||
248          (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
249           rinfo->IsPatchedDebugBreakSlotSequence()));
250   Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
251   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
252   StaticVisitor::MarkObject(heap, target);
253 }
254
255
256 template <typename StaticVisitor>
257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
258                                                           RelocInfo* rinfo) {
259   DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
260   Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
261   // Monomorphic ICs are preserved when possible, but need to be flushed
262   // when they might be keeping a Context alive, or when the heap is about
263   // to be serialized.
264   if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
265       !target->is_call_stub() &&
266       (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
267        target->ic_state() == POLYMORPHIC ||
268        (heap->flush_monomorphic_ics() && !target->is_weak_stub()) ||
269        heap->isolate()->serializer_enabled() ||
270        target->ic_age() != heap->global_ic_age() ||
271        target->is_invalidated_weak_stub())) {
272     ICUtility::Clear(heap->isolate(), rinfo->pc(),
273                      rinfo->host()->constant_pool());
274     target = Code::GetCodeFromTargetAddress(rinfo->target_address());
275   }
276   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
277   StaticVisitor::MarkObject(heap, target);
278 }
279
280
281 template <typename StaticVisitor>
282 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
283     Heap* heap, RelocInfo* rinfo) {
284   DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
285   Code* target = rinfo->code_age_stub();
286   DCHECK(target != NULL);
287   heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
288   StaticVisitor::MarkObject(heap, target);
289 }
290
291
292 template <typename StaticVisitor>
293 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
294     Map* map, HeapObject* object) {
295   FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
296                    void>::Visit(map, object);
297
298   MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
299   for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
300        ++idx) {
301     Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
302     collector->RecordSlot(slot, slot, *slot);
303   }
304 }
305
306
307 template <typename StaticVisitor>
308 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
309                                                    HeapObject* object) {
310   Heap* heap = map->GetHeap();
311   Map* map_object = Map::cast(object);
312
313   // Clears the cache of ICs related to this map.
314   if (FLAG_cleanup_code_caches_at_gc) {
315     map_object->ClearCodeCache(heap);
316   }
317
318   // When map collection is enabled we have to mark through map's transitions
319   // and back pointers in a special way to make these links weak.
320   if (FLAG_collect_maps && map_object->CanTransition()) {
321     MarkMapContents(heap, map_object);
322   } else {
323     StaticVisitor::VisitPointers(
324         heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
325         HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
326   }
327 }
328
329
330 template <typename StaticVisitor>
331 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
332     Map* map, HeapObject* object) {
333   Heap* heap = map->GetHeap();
334
335   Object** slot =
336       HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
337   if (FLAG_collect_maps) {
338     // Mark property cell dependent codes array but do not push it onto marking
339     // stack, this will make references from it weak. We will clean dead
340     // codes when we iterate over property cells in ClearNonLiveReferences.
341     HeapObject* obj = HeapObject::cast(*slot);
342     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
343     StaticVisitor::MarkObjectWithoutPush(heap, obj);
344   } else {
345     StaticVisitor::VisitPointer(heap, slot);
346   }
347
348   StaticVisitor::VisitPointers(
349       heap,
350       HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
351       HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
352 }
353
354
355 template <typename StaticVisitor>
356 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
357                                                         HeapObject* object) {
358   Heap* heap = map->GetHeap();
359   WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
360   Object* undefined = heap->undefined_value();
361   // Enqueue weak cell in linked list of encountered weak collections.
362   // We can ignore weak cells with cleared values because they will always
363   // contain smi zero.
364   if (weak_cell->next() == undefined && !weak_cell->cleared()) {
365     weak_cell->set_next(heap->encountered_weak_cells());
366     heap->set_encountered_weak_cells(weak_cell);
367   }
368 }
369
370
371 template <typename StaticVisitor>
372 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
373     Map* map, HeapObject* object) {
374   Heap* heap = map->GetHeap();
375
376   Object** slot =
377       HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
378   if (FLAG_collect_maps) {
379     // Mark allocation site dependent codes array but do not push it onto
380     // marking stack, this will make references from it weak. We will clean
381     // dead codes when we iterate over allocation sites in
382     // ClearNonLiveReferences.
383     HeapObject* obj = HeapObject::cast(*slot);
384     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
385     StaticVisitor::MarkObjectWithoutPush(heap, obj);
386   } else {
387     StaticVisitor::VisitPointer(heap, slot);
388   }
389
390   StaticVisitor::VisitPointers(
391       heap,
392       HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
393       HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
394 }
395
396
397 template <typename StaticVisitor>
398 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
399     Map* map, HeapObject* object) {
400   Heap* heap = map->GetHeap();
401   JSWeakCollection* weak_collection =
402       reinterpret_cast<JSWeakCollection*>(object);
403
404   // Enqueue weak collection in linked list of encountered weak collections.
405   if (weak_collection->next() == heap->undefined_value()) {
406     weak_collection->set_next(heap->encountered_weak_collections());
407     heap->set_encountered_weak_collections(weak_collection);
408   }
409
410   // Skip visiting the backing hash table containing the mappings and the
411   // pointer to the other enqueued weak collections, both are post-processed.
412   StaticVisitor::VisitPointers(
413       heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
414       HeapObject::RawField(object, JSWeakCollection::kTableOffset));
415   STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
416                 JSWeakCollection::kNextOffset);
417   STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
418                 JSWeakCollection::kSize);
419
420   // Partially initialized weak collection is enqueued, but table is ignored.
421   if (!weak_collection->table()->IsHashTable()) return;
422
423   // Mark the backing hash table without pushing it on the marking stack.
424   Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
425   HeapObject* obj = HeapObject::cast(*slot);
426   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
427   StaticVisitor::MarkObjectWithoutPush(heap, obj);
428 }
429
430
431 template <typename StaticVisitor>
432 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
433                                                     HeapObject* object) {
434   Heap* heap = map->GetHeap();
435   Code* code = Code::cast(object);
436   if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
437     code->MakeOlder(heap->mark_compact_collector()->marking_parity());
438   }
439   code->CodeIterateBody<StaticVisitor>(heap);
440 }
441
442
443 template <typename StaticVisitor>
444 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
445     Map* map, HeapObject* object) {
446   Heap* heap = map->GetHeap();
447   SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
448   if (shared->ic_age() != heap->global_ic_age()) {
449     shared->ResetForNewContext(heap->global_ic_age());
450   }
451   if (FLAG_cleanup_code_caches_at_gc) {
452     shared->ClearTypeFeedbackInfo();
453   }
454   if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
455       !shared->optimized_code_map()->IsSmi()) {
456     // Always flush the optimized code map if requested by flag.
457     shared->ClearOptimizedCodeMap();
458   }
459   MarkCompactCollector* collector = heap->mark_compact_collector();
460   if (collector->is_code_flushing_enabled()) {
461     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
462       // Add the shared function info holding an optimized code map to
463       // the code flusher for processing of code maps after marking.
464       collector->code_flusher()->AddOptimizedCodeMap(shared);
465       // Treat all references within the code map weakly by marking the
466       // code map itself but not pushing it onto the marking deque.
467       FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
468       StaticVisitor::MarkObjectWithoutPush(heap, code_map);
469     }
470     if (IsFlushable(heap, shared)) {
471       // This function's code looks flushable. But we have to postpone
472       // the decision until we see all functions that point to the same
473       // SharedFunctionInfo because some of them might be optimized.
474       // That would also make the non-optimized version of the code
475       // non-flushable, because it is required for bailing out from
476       // optimized code.
477       collector->code_flusher()->AddCandidate(shared);
478       // Treat the reference to the code object weakly.
479       VisitSharedFunctionInfoWeakCode(heap, object);
480       return;
481     }
482   } else {
483     if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
484       // Flush optimized code map on major GCs without code flushing,
485       // needed because cached code doesn't contain breakpoints.
486       shared->ClearOptimizedCodeMap();
487     }
488   }
489   VisitSharedFunctionInfoStrongCode(heap, object);
490 }
491
492
493 template <typename StaticVisitor>
494 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
495     Map* map, HeapObject* object) {
496   Heap* heap = map->GetHeap();
497   ConstantPoolArray* array = ConstantPoolArray::cast(object);
498   ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
499   while (!code_iter.is_finished()) {
500     Address code_entry = reinterpret_cast<Address>(
501         array->RawFieldOfElementAt(code_iter.next_index()));
502     StaticVisitor::VisitCodeEntry(heap, code_entry);
503   }
504
505   ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
506   while (!heap_iter.is_finished()) {
507     Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
508     HeapObject* object = HeapObject::cast(*slot);
509     heap->mark_compact_collector()->RecordSlot(slot, slot, object);
510     bool is_weak_object =
511         (array->get_weak_object_state() ==
512              ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
513          Code::IsWeakObjectInOptimizedCode(object)) ||
514         (array->get_weak_object_state() ==
515              ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
516          Code::IsWeakObjectInIC(object));
517     if (!is_weak_object) {
518       StaticVisitor::MarkObject(heap, object);
519     }
520   }
521 }
522
523
524 template <typename StaticVisitor>
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
526                                                           HeapObject* object) {
527   Heap* heap = map->GetHeap();
528   JSFunction* function = JSFunction::cast(object);
529   MarkCompactCollector* collector = heap->mark_compact_collector();
530   if (collector->is_code_flushing_enabled()) {
531     if (IsFlushable(heap, function)) {
532       // This function's code looks flushable. But we have to postpone
533       // the decision until we see all functions that point to the same
534       // SharedFunctionInfo because some of them might be optimized.
535       // That would also make the non-optimized version of the code
536       // non-flushable, because it is required for bailing out from
537       // optimized code.
538       collector->code_flusher()->AddCandidate(function);
539       // Visit shared function info immediately to avoid double checking
540       // of its flushability later. This is just an optimization because
541       // the shared function info would eventually be visited.
542       SharedFunctionInfo* shared = function->shared();
543       if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
544         StaticVisitor::MarkObject(heap, shared->map());
545         VisitSharedFunctionInfoWeakCode(heap, shared);
546       }
547       // Treat the reference to the code object weakly.
548       VisitJSFunctionWeakCode(heap, object);
549       return;
550     } else {
551       // Visit all unoptimized code objects to prevent flushing them.
552       StaticVisitor::MarkObject(heap, function->shared()->code());
553       if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
554         MarkInlinedFunctionsCode(heap, function->code());
555       }
556     }
557   }
558   VisitJSFunctionStrongCode(heap, object);
559 }
560
561
562 template <typename StaticVisitor>
563 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
564                                                         HeapObject* object) {
565   int last_property_offset =
566       JSRegExp::kSize + kPointerSize * map->inobject_properties();
567   StaticVisitor::VisitPointers(
568       map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
569       HeapObject::RawField(object, last_property_offset));
570 }
571
572
573 template <typename StaticVisitor>
574 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
575     Map* map, HeapObject* object) {
576   Heap* heap = map->GetHeap();
577
578   STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
579                 JSArrayBuffer::kWeakNextOffset + kPointerSize);
580   StaticVisitor::VisitPointers(
581       heap,
582       HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
583       HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
584   StaticVisitor::VisitPointers(
585       heap, HeapObject::RawField(
586                 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
587       HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
588 }
589
590
591 template <typename StaticVisitor>
592 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
593     Map* map, HeapObject* object) {
594   StaticVisitor::VisitPointers(
595       map->GetHeap(),
596       HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
597       HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
598   StaticVisitor::VisitPointers(
599       map->GetHeap(), HeapObject::RawField(
600                           object, JSTypedArray::kWeakNextOffset + kPointerSize),
601       HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
602 }
603
604
605 template <typename StaticVisitor>
606 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
607                                                           HeapObject* object) {
608   StaticVisitor::VisitPointers(
609       map->GetHeap(),
610       HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
611       HeapObject::RawField(object, JSDataView::kWeakNextOffset));
612   StaticVisitor::VisitPointers(
613       map->GetHeap(),
614       HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
615       HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
616 }
617
618
619 template <typename StaticVisitor>
620 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
621                                                           Map* map) {
622   // Make sure that the back pointer stored either in the map itself or
623   // inside its transitions array is marked. Skip recording the back
624   // pointer slot since map space is not compacted.
625   StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
626
627   // Treat pointers in the transitions array as weak and also mark that
628   // array to prevent visiting it later. Skip recording the transition
629   // array slot, since it will be implicitly recorded when the pointer
630   // fields of this map are visited.
631   if (map->HasTransitionArray()) {
632     TransitionArray* transitions = map->transitions();
633     MarkTransitionArray(heap, transitions);
634   }
635
636   // Since descriptor arrays are potentially shared, ensure that only the
637   // descriptors that belong to this map are marked. The first time a
638   // non-empty descriptor array is marked, its header is also visited. The slot
639   // holding the descriptor array will be implicitly recorded when the pointer
640   // fields of this map are visited.
641   DescriptorArray* descriptors = map->instance_descriptors();
642   if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
643       descriptors->length() > 0) {
644     StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
645                                  descriptors->GetDescriptorEndSlot(0));
646   }
647   int start = 0;
648   int end = map->NumberOfOwnDescriptors();
649   if (start < end) {
650     StaticVisitor::VisitPointers(heap,
651                                  descriptors->GetDescriptorStartSlot(start),
652                                  descriptors->GetDescriptorEndSlot(end));
653   }
654
655   // Mark prototype dependent codes array but do not push it onto marking
656   // stack, this will make references from it weak. We will clean dead
657   // codes when we iterate over maps in ClearNonLiveTransitions.
658   Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
659   HeapObject* obj = HeapObject::cast(*slot);
660   heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
661   StaticVisitor::MarkObjectWithoutPush(heap, obj);
662
663   // Mark the pointer fields of the Map. Since the transitions array has
664   // been marked already, it is fine that one of these fields contains a
665   // pointer to it.
666   StaticVisitor::VisitPointers(
667       heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
668       HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
669 }
670
671
672 template <typename StaticVisitor>
673 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
674     Heap* heap, TransitionArray* transitions) {
675   if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
676
677   // Simple transitions do not have keys nor prototype transitions.
678   if (transitions->IsSimpleTransition()) return;
679
680   if (transitions->HasPrototypeTransitions()) {
681     // Mark prototype transitions array but do not push it onto marking
682     // stack, this will make references from it weak. We will clean dead
683     // prototype transitions in ClearNonLiveTransitions.
684     Object** slot = transitions->GetPrototypeTransitionsSlot();
685     HeapObject* obj = HeapObject::cast(*slot);
686     heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
687     StaticVisitor::MarkObjectWithoutPush(heap, obj);
688   }
689
690   for (int i = 0; i < transitions->number_of_transitions(); ++i) {
691     StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
692   }
693 }
694
695
696 template <typename StaticVisitor>
697 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
698                                                                    Code* code) {
699   // Skip in absence of inlining.
700   // TODO(turbofan): Revisit once we support inlining.
701   if (code->is_turbofanned()) return;
702   // For optimized functions we should retain both non-optimized version
703   // of its code and non-optimized version of all inlined functions.
704   // This is required to support bailing out from inlined code.
705   DeoptimizationInputData* data =
706       DeoptimizationInputData::cast(code->deoptimization_data());
707   FixedArray* literals = data->LiteralArray();
708   for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
709        i++) {
710     JSFunction* inlined = JSFunction::cast(literals->get(i));
711     StaticVisitor::MarkObject(heap, inlined->shared()->code());
712   }
713 }
714
715
716 inline static bool IsValidNonBuiltinContext(Object* context) {
717   return context->IsContext() &&
718          !Context::cast(context)->global_object()->IsJSBuiltinsObject();
719 }
720
721
722 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
723   Object* undefined = heap->undefined_value();
724   return (info->script() != undefined) &&
725          (reinterpret_cast<Script*>(info->script())->source() != undefined);
726 }
727
728
729 template <typename StaticVisitor>
730 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
731                                                       JSFunction* function) {
732   SharedFunctionInfo* shared_info = function->shared();
733
734   // Code is either on stack, in compilation cache or referenced
735   // by optimized version of function.
736   MarkBit code_mark = Marking::MarkBitFrom(function->code());
737   if (code_mark.Get()) {
738     return false;
739   }
740
741   // The function must have a valid context and not be a builtin.
742   if (!IsValidNonBuiltinContext(function->context())) {
743     return false;
744   }
745
746   // We do not (yet) flush code for optimized functions.
747   if (function->code() != shared_info->code()) {
748     return false;
749   }
750
751   // Check age of optimized code.
752   if (FLAG_age_code && !function->code()->IsOld()) {
753     return false;
754   }
755
756   return IsFlushable(heap, shared_info);
757 }
758
759
760 template <typename StaticVisitor>
761 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
762     Heap* heap, SharedFunctionInfo* shared_info) {
763   // Code is either on stack, in compilation cache or referenced
764   // by optimized version of function.
765   MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
766   if (code_mark.Get()) {
767     return false;
768   }
769
770   // The function must be compiled and have the source code available,
771   // to be able to recompile it in case we need the function again.
772   if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
773     return false;
774   }
775
776   // We never flush code for API functions.
777   Object* function_data = shared_info->function_data();
778   if (function_data->IsFunctionTemplateInfo()) {
779     return false;
780   }
781
782   // Only flush code for functions.
783   if (shared_info->code()->kind() != Code::FUNCTION) {
784     return false;
785   }
786
787   // Function must be lazy compilable.
788   if (!shared_info->allows_lazy_compilation()) {
789     return false;
790   }
791
792   // We do not (yet?) flush code for generator functions, because we don't know
793   // if there are still live activations (generator objects) on the heap.
794   if (shared_info->is_generator()) {
795     return false;
796   }
797
798   // If this is a full script wrapped in a function we do not flush the code.
799   if (shared_info->is_toplevel()) {
800     return false;
801   }
802
803   // If this is a function initialized with %SetCode then the one-to-one
804   // relation between SharedFunctionInfo and Code is broken.
805   if (shared_info->dont_flush()) {
806     return false;
807   }
808
809   // Check age of code. If code aging is disabled we never flush.
810   if (!FLAG_age_code || !shared_info->code()->IsOld()) {
811     return false;
812   }
813
814   return true;
815 }
816
817
818 template <typename StaticVisitor>
819 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
820     Heap* heap, HeapObject* object) {
821   Object** start_slot = HeapObject::RawField(
822       object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
823   Object** end_slot = HeapObject::RawField(
824       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
825   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
826 }
827
828
829 template <typename StaticVisitor>
830 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
831     Heap* heap, HeapObject* object) {
832   Object** name_slot =
833       HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
834   StaticVisitor::VisitPointer(heap, name_slot);
835
836   // Skip visiting kCodeOffset as it is treated weakly here.
837   STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
838                 SharedFunctionInfo::kCodeOffset);
839   STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
840                 SharedFunctionInfo::kOptimizedCodeMapOffset);
841
842   Object** start_slot =
843       HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
844   Object** end_slot = HeapObject::RawField(
845       object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
846   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
847 }
848
849
850 template <typename StaticVisitor>
851 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
852     Heap* heap, HeapObject* object) {
853   Object** start_slot =
854       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
855   Object** end_slot =
856       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
857   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
858
859   VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
860   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
861                 JSFunction::kPrototypeOrInitialMapOffset);
862
863   start_slot =
864       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
865   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
866   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
867 }
868
869
870 template <typename StaticVisitor>
871 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
872     Heap* heap, HeapObject* object) {
873   Object** start_slot =
874       HeapObject::RawField(object, JSFunction::kPropertiesOffset);
875   Object** end_slot =
876       HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
877   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
878
879   // Skip visiting kCodeEntryOffset as it is treated weakly here.
880   STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
881                 JSFunction::kPrototypeOrInitialMapOffset);
882
883   start_slot =
884       HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
885   end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
886   StaticVisitor::VisitPointers(heap, start_slot, end_slot);
887 }
888
889
890 void Code::CodeIterateBody(ObjectVisitor* v) {
891   int mode_mask = RelocInfo::kCodeTargetMask |
892                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
893                   RelocInfo::ModeMask(RelocInfo::CELL) |
894                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
895                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
896                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
897                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
898
899   // There are two places where we iterate code bodies: here and the
900   // templated CodeIterateBody (below). They should be kept in sync.
901   IteratePointer(v, kRelocationInfoOffset);
902   IteratePointer(v, kHandlerTableOffset);
903   IteratePointer(v, kDeoptimizationDataOffset);
904   IteratePointer(v, kTypeFeedbackInfoOffset);
905   IterateNextCodeLink(v, kNextCodeLinkOffset);
906   IteratePointer(v, kConstantPoolOffset);
907
908   RelocIterator it(this, mode_mask);
909   Isolate* isolate = this->GetIsolate();
910   for (; !it.done(); it.next()) {
911     it.rinfo()->Visit(isolate, v);
912   }
913 }
914
915
916 template <typename StaticVisitor>
917 void Code::CodeIterateBody(Heap* heap) {
918   int mode_mask = RelocInfo::kCodeTargetMask |
919                   RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
920                   RelocInfo::ModeMask(RelocInfo::CELL) |
921                   RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
922                   RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
923                   RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
924                   RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
925
926   // There are two places where we iterate code bodies: here and the non-
927   // templated CodeIterateBody (above). They should be kept in sync.
928   StaticVisitor::VisitPointer(
929       heap,
930       reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
931   StaticVisitor::VisitPointer(
932       heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
933   StaticVisitor::VisitPointer(
934       heap,
935       reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
936   StaticVisitor::VisitPointer(
937       heap,
938       reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
939   StaticVisitor::VisitNextCodeLink(
940       heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
941   StaticVisitor::VisitPointer(
942       heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
943
944
945   RelocIterator it(this, mode_mask);
946   for (; !it.done(); it.next()) {
947     it.rinfo()->template Visit<StaticVisitor>(heap);
948   }
949 }
950 }
951 }  // namespace v8::internal
952
953 #endif  // V8_OBJECTS_VISITING_INL_H_