1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
15 kVisitShortcutCandidate,
16 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
20 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
22 table_.Register(kVisitSlicedString,
23 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
28 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
30 table_.Register(kVisitFixedArray,
31 &FlexibleBodyVisitor<StaticVisitor,
32 FixedArray::BodyDescriptor, int>::Visit);
34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
40 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
43 table_.Register(kVisitByteArray, &VisitByteArray);
46 kVisitSharedFunctionInfo,
47 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
54 table_.Register(kVisitJSFunction, &VisitJSFunction);
56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
60 table_.Register(kVisitJSDataView, &VisitJSDataView);
62 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
68 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69 kVisitDataObjectGeneric>();
71 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72 kVisitJSObjectGeneric>();
73 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74 kVisitStructGeneric>();
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80 Map* map, HeapObject* object) {
81 Heap* heap = map->GetHeap();
83 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85 VisitPointers(heap, HeapObject::RawField(
86 object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
89 heap, HeapObject::RawField(
90 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92 return JSArrayBuffer::kSizeWithInternalFields;
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98 Map* map, HeapObject* object) {
101 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
104 map->GetHeap(), HeapObject::RawField(
105 object, JSTypedArray::kWeakNextOffset + kPointerSize),
106 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107 return JSTypedArray::kSizeWithInternalFields;
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113 HeapObject* object) {
116 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
120 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122 return JSDataView::kSizeWithInternalFields;
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128 table_.Register(kVisitShortcutCandidate,
129 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
132 table_.Register(kVisitConsString,
133 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
136 table_.Register(kVisitSlicedString,
137 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
142 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
154 table_.Register(kVisitNativeContext, &VisitNativeContext);
156 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
170 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
172 table_.Register(kVisitMap, &VisitMap);
174 table_.Register(kVisitCode, &VisitCode);
176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
178 table_.Register(kVisitJSFunction, &VisitJSFunction);
180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
184 table_.Register(kVisitJSDataView, &VisitJSDataView);
186 // Registration for kVisitJSRegExp is done by StaticVisitor.
190 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
192 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
194 table_.Register(kVisitWeakCell, &VisitWeakCell);
196 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
197 kVisitDataObjectGeneric>();
199 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
200 kVisitJSObjectGeneric>();
202 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203 kVisitStructGeneric>();
207 template <typename StaticVisitor>
208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209 Heap* heap, Address entry_address) {
210 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
212 StaticVisitor::MarkObject(heap, code);
216 template <typename StaticVisitor>
217 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
218 Heap* heap, RelocInfo* rinfo) {
219 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
220 HeapObject* object = HeapObject::cast(rinfo->target_object());
221 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
222 // TODO(ulan): It could be better to record slots only for strongly embedded
223 // objects here and record slots for weakly embedded object during clearing
224 // of non-live references in mark-compact.
225 if (!rinfo->host()->IsWeakObject(object)) {
226 StaticVisitor::MarkObject(heap, object);
231 template <typename StaticVisitor>
232 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
234 DCHECK(rinfo->rmode() == RelocInfo::CELL);
235 Cell* cell = rinfo->target_cell();
236 // No need to record slots because the cell space is not compacted during GC.
237 if (!rinfo->host()->IsWeakObject(cell)) {
238 StaticVisitor::MarkObject(heap, cell);
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
246 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
247 rinfo->IsPatchedReturnSequence()) ||
248 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
249 rinfo->IsPatchedDebugBreakSlotSequence()));
250 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
251 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
252 StaticVisitor::MarkObject(heap, target);
256 template <typename StaticVisitor>
257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
259 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
260 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
261 // Monomorphic ICs are preserved when possible, but need to be flushed
262 // when they might be keeping a Context alive, or when the heap is about
264 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
265 !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
266 target->ic_age() != heap->global_ic_age())) {
267 ICUtility::Clear(heap->isolate(), rinfo->pc(),
268 rinfo->host()->constant_pool());
269 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
271 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
272 StaticVisitor::MarkObject(heap, target);
276 template <typename StaticVisitor>
277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
278 Heap* heap, RelocInfo* rinfo) {
279 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
280 Code* target = rinfo->code_age_stub();
281 DCHECK(target != NULL);
282 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
283 StaticVisitor::MarkObject(heap, target);
287 template <typename StaticVisitor>
288 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
289 Map* map, HeapObject* object) {
290 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
291 void>::Visit(map, object);
293 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
294 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
296 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
297 collector->RecordSlot(slot, slot, *slot);
302 template <typename StaticVisitor>
303 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
304 HeapObject* object) {
305 Heap* heap = map->GetHeap();
306 Map* map_object = Map::cast(object);
308 // Clears the cache of ICs related to this map.
309 if (FLAG_cleanup_code_caches_at_gc) {
310 map_object->ClearCodeCache(heap);
313 // When map collection is enabled we have to mark through map's transitions
314 // and back pointers in a special way to make these links weak.
315 if (FLAG_collect_maps && map_object->CanTransition()) {
316 MarkMapContents(heap, map_object);
318 StaticVisitor::VisitPointers(
319 heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
320 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
325 template <typename StaticVisitor>
326 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
327 Map* map, HeapObject* object) {
328 Heap* heap = map->GetHeap();
330 StaticVisitor::VisitPointers(
332 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
333 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
337 template <typename StaticVisitor>
338 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
339 HeapObject* object) {
340 Heap* heap = map->GetHeap();
341 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
342 Object* undefined = heap->undefined_value();
343 // Enqueue weak cell in linked list of encountered weak collections.
344 // We can ignore weak cells with cleared values because they will always
346 if (weak_cell->next() == undefined && !weak_cell->cleared()) {
347 weak_cell->set_next(heap->encountered_weak_cells());
348 heap->set_encountered_weak_cells(weak_cell);
353 template <typename StaticVisitor>
354 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
355 Map* map, HeapObject* object) {
356 Heap* heap = map->GetHeap();
358 StaticVisitor::VisitPointers(
360 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
361 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
365 template <typename StaticVisitor>
366 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
367 Map* map, HeapObject* object) {
368 Heap* heap = map->GetHeap();
369 JSWeakCollection* weak_collection =
370 reinterpret_cast<JSWeakCollection*>(object);
372 // Enqueue weak collection in linked list of encountered weak collections.
373 if (weak_collection->next() == heap->undefined_value()) {
374 weak_collection->set_next(heap->encountered_weak_collections());
375 heap->set_encountered_weak_collections(weak_collection);
378 // Skip visiting the backing hash table containing the mappings and the
379 // pointer to the other enqueued weak collections, both are post-processed.
380 StaticVisitor::VisitPointers(
381 heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
382 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
383 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
384 JSWeakCollection::kNextOffset);
385 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
386 JSWeakCollection::kSize);
388 // Partially initialized weak collection is enqueued, but table is ignored.
389 if (!weak_collection->table()->IsHashTable()) return;
391 // Mark the backing hash table without pushing it on the marking stack.
392 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
393 HeapObject* obj = HeapObject::cast(*slot);
394 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
395 StaticVisitor::MarkObjectWithoutPush(heap, obj);
399 template <typename StaticVisitor>
400 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
401 HeapObject* object) {
402 Heap* heap = map->GetHeap();
403 Code* code = Code::cast(object);
404 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
405 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
407 code->CodeIterateBody<StaticVisitor>(heap);
411 template <typename StaticVisitor>
412 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
413 Map* map, HeapObject* object) {
414 Heap* heap = map->GetHeap();
415 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
416 if (shared->ic_age() != heap->global_ic_age()) {
417 shared->ResetForNewContext(heap->global_ic_age());
419 if (FLAG_cleanup_code_caches_at_gc) {
420 shared->ClearTypeFeedbackInfoAtGCTime();
422 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
423 !shared->optimized_code_map()->IsSmi()) {
424 // Always flush the optimized code map if requested by flag.
425 shared->ClearOptimizedCodeMap();
427 MarkCompactCollector* collector = heap->mark_compact_collector();
428 if (collector->is_code_flushing_enabled()) {
429 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
430 // Add the shared function info holding an optimized code map to
431 // the code flusher for processing of code maps after marking.
432 collector->code_flusher()->AddOptimizedCodeMap(shared);
433 // Treat all references within the code map weakly by marking the
434 // code map itself but not pushing it onto the marking deque.
435 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
436 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
438 if (IsFlushable(heap, shared)) {
439 // This function's code looks flushable. But we have to postpone
440 // the decision until we see all functions that point to the same
441 // SharedFunctionInfo because some of them might be optimized.
442 // That would also make the non-optimized version of the code
443 // non-flushable, because it is required for bailing out from
445 collector->code_flusher()->AddCandidate(shared);
446 // Treat the reference to the code object weakly.
447 VisitSharedFunctionInfoWeakCode(heap, object);
451 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
452 // Flush optimized code map on major GCs without code flushing,
453 // needed because cached code doesn't contain breakpoints.
454 shared->ClearOptimizedCodeMap();
457 VisitSharedFunctionInfoStrongCode(heap, object);
461 template <typename StaticVisitor>
462 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
463 Map* map, HeapObject* object) {
464 Heap* heap = map->GetHeap();
465 ConstantPoolArray* array = ConstantPoolArray::cast(object);
466 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
467 while (!code_iter.is_finished()) {
468 Address code_entry = reinterpret_cast<Address>(
469 array->RawFieldOfElementAt(code_iter.next_index()));
470 StaticVisitor::VisitCodeEntry(heap, code_entry);
473 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
474 while (!heap_iter.is_finished()) {
475 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
476 HeapObject* object = HeapObject::cast(*slot);
477 heap->mark_compact_collector()->RecordSlot(slot, slot, object);
478 bool is_weak_object =
479 (array->get_weak_object_state() ==
480 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
481 Code::IsWeakObjectInOptimizedCode(object));
482 if (!is_weak_object) {
483 StaticVisitor::MarkObject(heap, object);
489 template <typename StaticVisitor>
490 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
491 HeapObject* object) {
492 Heap* heap = map->GetHeap();
493 JSFunction* function = JSFunction::cast(object);
494 MarkCompactCollector* collector = heap->mark_compact_collector();
495 if (collector->is_code_flushing_enabled()) {
496 if (IsFlushable(heap, function)) {
497 // This function's code looks flushable. But we have to postpone
498 // the decision until we see all functions that point to the same
499 // SharedFunctionInfo because some of them might be optimized.
500 // That would also make the non-optimized version of the code
501 // non-flushable, because it is required for bailing out from
503 collector->code_flusher()->AddCandidate(function);
504 // Visit shared function info immediately to avoid double checking
505 // of its flushability later. This is just an optimization because
506 // the shared function info would eventually be visited.
507 SharedFunctionInfo* shared = function->shared();
508 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
509 StaticVisitor::MarkObject(heap, shared->map());
510 VisitSharedFunctionInfoWeakCode(heap, shared);
512 // Treat the reference to the code object weakly.
513 VisitJSFunctionWeakCode(heap, object);
516 // Visit all unoptimized code objects to prevent flushing them.
517 StaticVisitor::MarkObject(heap, function->shared()->code());
518 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
519 MarkInlinedFunctionsCode(heap, function->code());
523 VisitJSFunctionStrongCode(heap, object);
527 template <typename StaticVisitor>
528 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
529 HeapObject* object) {
530 int last_property_offset =
531 JSRegExp::kSize + kPointerSize * map->inobject_properties();
532 StaticVisitor::VisitPointers(
533 map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
534 HeapObject::RawField(object, last_property_offset));
538 template <typename StaticVisitor>
539 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
540 Map* map, HeapObject* object) {
541 Heap* heap = map->GetHeap();
543 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
544 JSArrayBuffer::kWeakNextOffset + kPointerSize);
545 StaticVisitor::VisitPointers(
547 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
548 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
549 StaticVisitor::VisitPointers(
550 heap, HeapObject::RawField(
551 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
552 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
556 template <typename StaticVisitor>
557 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
558 Map* map, HeapObject* object) {
559 StaticVisitor::VisitPointers(
561 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
562 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
563 StaticVisitor::VisitPointers(
564 map->GetHeap(), HeapObject::RawField(
565 object, JSTypedArray::kWeakNextOffset + kPointerSize),
566 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
570 template <typename StaticVisitor>
571 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
572 HeapObject* object) {
573 StaticVisitor::VisitPointers(
575 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
576 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
577 StaticVisitor::VisitPointers(
579 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
580 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
584 template <typename StaticVisitor>
585 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
587 Object* raw_transitions = map->raw_transitions();
588 if (TransitionArray::IsSimpleTransition(raw_transitions)) {
589 StaticVisitor::VisitPointer(
590 heap, HeapObject::RawField(map, Map::kTransitionsOffset));
592 if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
593 MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
596 // Since descriptor arrays are potentially shared, ensure that only the
597 // descriptors that belong to this map are marked. The first time a
598 // non-empty descriptor array is marked, its header is also visited. The slot
599 // holding the descriptor array will be implicitly recorded when the pointer
600 // fields of this map are visited.
601 DescriptorArray* descriptors = map->instance_descriptors();
602 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
603 descriptors->length() > 0) {
604 StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
605 descriptors->GetDescriptorEndSlot(0));
608 int end = map->NumberOfOwnDescriptors();
610 StaticVisitor::VisitPointers(heap,
611 descriptors->GetDescriptorStartSlot(start),
612 descriptors->GetDescriptorEndSlot(end));
615 // Mark the pointer fields of the Map. Since the transitions array has
616 // been marked already, it is fine that one of these fields contains a
618 StaticVisitor::VisitPointers(
619 heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
620 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
624 template <typename StaticVisitor>
625 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
626 Heap* heap, TransitionArray* transitions) {
627 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
629 if (transitions->HasPrototypeTransitions()) {
630 // Mark prototype transitions array but do not push it onto marking
631 // stack, this will make references from it weak. We will clean dead
632 // prototype transitions in ClearNonLiveReferences.
633 Object** slot = transitions->GetPrototypeTransitionsSlot();
634 HeapObject* obj = HeapObject::cast(*slot);
635 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
636 StaticVisitor::MarkObjectWithoutPush(heap, obj);
639 int num_transitions = TransitionArray::NumberOfTransitions(transitions);
640 for (int i = 0; i < num_transitions; ++i) {
641 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
646 template <typename StaticVisitor>
647 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
649 // Skip in absence of inlining.
650 // TODO(turbofan): Revisit once we support inlining.
651 if (code->is_turbofanned()) return;
652 // For optimized functions we should retain both non-optimized version
653 // of its code and non-optimized version of all inlined functions.
654 // This is required to support bailing out from inlined code.
655 DeoptimizationInputData* data =
656 DeoptimizationInputData::cast(code->deoptimization_data());
657 FixedArray* literals = data->LiteralArray();
658 for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
660 JSFunction* inlined = JSFunction::cast(literals->get(i));
661 StaticVisitor::MarkObject(heap, inlined->shared()->code());
666 inline static bool IsValidNonBuiltinContext(Object* context) {
667 return context->IsContext() &&
668 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
672 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
673 Object* undefined = heap->undefined_value();
674 return (info->script() != undefined) &&
675 (reinterpret_cast<Script*>(info->script())->source() != undefined);
679 template <typename StaticVisitor>
680 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
681 JSFunction* function) {
682 SharedFunctionInfo* shared_info = function->shared();
684 // Code is either on stack, in compilation cache or referenced
685 // by optimized version of function.
686 MarkBit code_mark = Marking::MarkBitFrom(function->code());
687 if (code_mark.Get()) {
691 // The function must have a valid context and not be a builtin.
692 if (!IsValidNonBuiltinContext(function->context())) {
696 // We do not (yet) flush code for optimized functions.
697 if (function->code() != shared_info->code()) {
701 // Check age of optimized code.
702 if (FLAG_age_code && !function->code()->IsOld()) {
706 return IsFlushable(heap, shared_info);
710 template <typename StaticVisitor>
711 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
712 Heap* heap, SharedFunctionInfo* shared_info) {
713 // Code is either on stack, in compilation cache or referenced
714 // by optimized version of function.
715 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
716 if (code_mark.Get()) {
720 // The function must be compiled and have the source code available,
721 // to be able to recompile it in case we need the function again.
722 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
726 // We never flush code for API functions.
727 Object* function_data = shared_info->function_data();
728 if (function_data->IsFunctionTemplateInfo()) {
732 // Only flush code for functions.
733 if (shared_info->code()->kind() != Code::FUNCTION) {
737 // Function must be lazy compilable.
738 if (!shared_info->allows_lazy_compilation()) {
742 // We do not (yet?) flush code for generator functions, because we don't know
743 // if there are still live activations (generator objects) on the heap.
744 if (shared_info->is_generator()) {
748 // If this is a full script wrapped in a function we do not flush the code.
749 if (shared_info->is_toplevel()) {
753 // If this is a function initialized with %SetCode then the one-to-one
754 // relation between SharedFunctionInfo and Code is broken.
755 if (shared_info->dont_flush()) {
759 // Check age of code. If code aging is disabled we never flush.
760 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
768 template <typename StaticVisitor>
769 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
770 Heap* heap, HeapObject* object) {
771 Object** start_slot = HeapObject::RawField(
772 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
773 Object** end_slot = HeapObject::RawField(
774 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
775 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
779 template <typename StaticVisitor>
780 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
781 Heap* heap, HeapObject* object) {
783 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
784 StaticVisitor::VisitPointer(heap, name_slot);
786 // Skip visiting kCodeOffset as it is treated weakly here.
787 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
788 SharedFunctionInfo::kCodeOffset);
789 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
790 SharedFunctionInfo::kOptimizedCodeMapOffset);
792 Object** start_slot =
793 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
794 Object** end_slot = HeapObject::RawField(
795 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
796 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
800 template <typename StaticVisitor>
801 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
802 Heap* heap, HeapObject* object) {
803 Object** start_slot =
804 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
806 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
807 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
809 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
810 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
811 JSFunction::kPrototypeOrInitialMapOffset);
814 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
815 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
816 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
820 template <typename StaticVisitor>
821 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
822 Heap* heap, HeapObject* object) {
823 Object** start_slot =
824 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
826 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
827 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
829 // Skip visiting kCodeEntryOffset as it is treated weakly here.
830 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
831 JSFunction::kPrototypeOrInitialMapOffset);
834 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
835 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
836 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
840 void Code::CodeIterateBody(ObjectVisitor* v) {
841 int mode_mask = RelocInfo::kCodeTargetMask |
842 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
843 RelocInfo::ModeMask(RelocInfo::CELL) |
844 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
845 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
846 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
847 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
848 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
849 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
851 // There are two places where we iterate code bodies: here and the
852 // templated CodeIterateBody (below). They should be kept in sync.
853 IteratePointer(v, kRelocationInfoOffset);
854 IteratePointer(v, kHandlerTableOffset);
855 IteratePointer(v, kDeoptimizationDataOffset);
856 IteratePointer(v, kTypeFeedbackInfoOffset);
857 IterateNextCodeLink(v, kNextCodeLinkOffset);
858 IteratePointer(v, kConstantPoolOffset);
860 RelocIterator it(this, mode_mask);
861 Isolate* isolate = this->GetIsolate();
862 for (; !it.done(); it.next()) {
863 it.rinfo()->Visit(isolate, v);
868 template <typename StaticVisitor>
869 void Code::CodeIterateBody(Heap* heap) {
870 int mode_mask = RelocInfo::kCodeTargetMask |
871 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
872 RelocInfo::ModeMask(RelocInfo::CELL) |
873 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
874 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
875 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
876 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
877 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
878 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
880 // There are two places where we iterate code bodies: here and the non-
881 // templated CodeIterateBody (above). They should be kept in sync.
882 StaticVisitor::VisitPointer(
884 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
885 StaticVisitor::VisitPointer(
886 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
887 StaticVisitor::VisitPointer(
889 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
890 StaticVisitor::VisitPointer(
892 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
893 StaticVisitor::VisitNextCodeLink(
894 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
895 StaticVisitor::VisitPointer(
896 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
899 RelocIterator it(this, mode_mask);
900 for (; !it.done(); it.next()) {
901 it.rinfo()->template Visit<StaticVisitor>(heap);
905 } // namespace v8::internal
907 #endif // V8_OBJECTS_VISITING_INL_H_