1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
8 #include "src/heap/array-buffer-tracker.h"
9 #include "src/heap/objects-visiting.h"
10 #include "src/ic/ic-state.h"
11 #include "src/macro-assembler.h"
17 template <typename Callback>
18 Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
19 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
23 template <typename StaticVisitor>
24 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
26 kVisitShortcutCandidate,
27 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
31 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
33 table_.Register(kVisitSlicedString,
34 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
39 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
41 table_.Register(kVisitFixedArray,
42 &FlexibleBodyVisitor<StaticVisitor,
43 FixedArray::BodyDescriptor, int>::Visit);
45 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
46 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
47 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
51 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
54 table_.Register(kVisitByteArray, &VisitByteArray);
55 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
58 kVisitSharedFunctionInfo,
59 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
62 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
64 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
66 table_.Register(kVisitJSFunction, &VisitJSFunction);
68 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
70 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
72 table_.Register(kVisitJSDataView, &VisitJSDataView);
74 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
76 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
78 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
80 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
81 kVisitDataObjectGeneric>();
83 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
84 kVisitJSObjectGeneric>();
85 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
86 kVisitStructGeneric>();
90 template <typename StaticVisitor>
91 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
92 Map* map, HeapObject* object) {
93 Heap* heap = map->GetHeap();
95 JSArrayBuffer::JSArrayBufferIterateBody<
96 StaticNewSpaceVisitor<StaticVisitor> >(heap, object);
97 if (!JSArrayBuffer::cast(object)->is_external()) {
98 heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
100 return JSArrayBuffer::kSizeWithInternalFields;
104 template <typename StaticVisitor>
105 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
106 Map* map, HeapObject* object) {
108 map->GetHeap(), object,
109 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
110 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
111 return JSTypedArray::kSizeWithInternalFields;
115 template <typename StaticVisitor>
116 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
117 HeapObject* object) {
119 map->GetHeap(), object,
120 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122 return JSDataView::kSizeWithInternalFields;
126 template <typename StaticVisitor>
127 int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
128 Map* map, HeapObject* object) {
130 map->GetHeap(), object,
131 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
132 HeapObject::RawField(object, BytecodeArray::kHeaderSize));
133 return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
137 template <typename StaticVisitor>
138 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
139 table_.Register(kVisitShortcutCandidate,
140 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
143 table_.Register(kVisitConsString,
144 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
147 table_.Register(kVisitSlicedString,
148 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
153 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
155 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
157 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
159 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
161 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
163 table_.Register(kVisitNativeContext, &VisitNativeContext);
165 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
167 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
169 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
171 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
173 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
175 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
177 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
181 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
183 table_.Register(kVisitMap, &VisitMap);
185 table_.Register(kVisitCode, &VisitCode);
187 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
189 table_.Register(kVisitJSFunction, &VisitJSFunction);
191 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
193 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
195 table_.Register(kVisitJSDataView, &VisitJSDataView);
197 // Registration for kVisitJSRegExp is done by StaticVisitor.
201 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
203 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
205 table_.Register(kVisitWeakCell, &VisitWeakCell);
207 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
208 kVisitDataObjectGeneric>();
210 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
211 kVisitJSObjectGeneric>();
213 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
214 kVisitStructGeneric>();
218 template <typename StaticVisitor>
219 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
220 Heap* heap, HeapObject* object, Address entry_address) {
221 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
222 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
224 StaticVisitor::MarkObject(heap, code);
228 template <typename StaticVisitor>
229 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
230 Heap* heap, RelocInfo* rinfo) {
231 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
232 HeapObject* object = HeapObject::cast(rinfo->target_object());
233 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
234 // TODO(ulan): It could be better to record slots only for strongly embedded
235 // objects here and record slots for weakly embedded object during clearing
236 // of non-live references in mark-compact.
237 if (!rinfo->host()->IsWeakObject(object)) {
238 StaticVisitor::MarkObject(heap, object);
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
246 DCHECK(rinfo->rmode() == RelocInfo::CELL);
247 Cell* cell = rinfo->target_cell();
248 heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
249 if (!rinfo->host()->IsWeakObject(cell)) {
250 StaticVisitor::MarkObject(heap, cell);
255 template <typename StaticVisitor>
256 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
258 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
259 rinfo->IsPatchedDebugBreakSlotSequence());
260 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
261 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
262 StaticVisitor::MarkObject(heap, target);
266 template <typename StaticVisitor>
267 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
269 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
270 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
271 // Monomorphic ICs are preserved when possible, but need to be flushed
272 // when they might be keeping a Context alive, or when the heap is about
274 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
275 !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
276 target->ic_age() != heap->global_ic_age())) {
277 ICUtility::Clear(heap->isolate(), rinfo->pc(),
278 rinfo->host()->constant_pool());
279 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
281 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
282 StaticVisitor::MarkObject(heap, target);
286 template <typename StaticVisitor>
287 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
288 Heap* heap, RelocInfo* rinfo) {
289 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
290 Code* target = rinfo->code_age_stub();
291 DCHECK(target != NULL);
292 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
293 StaticVisitor::MarkObject(heap, target);
297 template <typename StaticVisitor>
298 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
299 Map* map, HeapObject* object) {
300 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
301 void>::Visit(map, object);
303 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
304 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
306 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
307 collector->RecordSlot(object, slot, *slot);
312 template <typename StaticVisitor>
313 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
314 HeapObject* object) {
315 Heap* heap = map->GetHeap();
316 Map* map_object = Map::cast(object);
318 // Clears the cache of ICs related to this map.
319 if (FLAG_cleanup_code_caches_at_gc) {
320 map_object->ClearCodeCache(heap);
323 // When map collection is enabled we have to mark through map's transitions
324 // and back pointers in a special way to make these links weak.
325 if (map_object->CanTransition()) {
326 MarkMapContents(heap, map_object);
328 StaticVisitor::VisitPointers(
330 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
331 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
336 template <typename StaticVisitor>
337 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
338 Map* map, HeapObject* object) {
339 Heap* heap = map->GetHeap();
341 StaticVisitor::VisitPointers(
343 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
344 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
348 template <typename StaticVisitor>
349 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
350 HeapObject* object) {
351 Heap* heap = map->GetHeap();
352 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
353 // Enqueue weak cell in linked list of encountered weak collections.
354 // We can ignore weak cells with cleared values because they will always
356 if (weak_cell->next_cleared() && !weak_cell->cleared()) {
357 HeapObject* value = HeapObject::cast(weak_cell->value());
358 if (MarkCompactCollector::IsMarked(value)) {
359 // Weak cells with live values are directly processed here to reduce
360 // the processing time of weak cells during the main GC pause.
361 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
362 map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
365 // If we do not know about liveness of values of weak cells, we have to
366 // process them when we know the liveness of the whole transitive
368 weak_cell->set_next(heap->encountered_weak_cells(),
369 UPDATE_WEAK_WRITE_BARRIER);
370 heap->set_encountered_weak_cells(weak_cell);
376 template <typename StaticVisitor>
377 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
378 Map* map, HeapObject* object) {
379 Heap* heap = map->GetHeap();
381 StaticVisitor::VisitPointers(
383 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
384 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
388 template <typename StaticVisitor>
389 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
390 Map* map, HeapObject* object) {
391 Heap* heap = map->GetHeap();
392 JSWeakCollection* weak_collection =
393 reinterpret_cast<JSWeakCollection*>(object);
395 // Enqueue weak collection in linked list of encountered weak collections.
396 if (weak_collection->next() == heap->undefined_value()) {
397 weak_collection->set_next(heap->encountered_weak_collections());
398 heap->set_encountered_weak_collections(weak_collection);
401 // Skip visiting the backing hash table containing the mappings and the
402 // pointer to the other enqueued weak collections, both are post-processed.
403 StaticVisitor::VisitPointers(
405 HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
406 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
407 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
408 JSWeakCollection::kNextOffset);
409 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
410 JSWeakCollection::kSize);
412 // Partially initialized weak collection is enqueued, but table is ignored.
413 if (!weak_collection->table()->IsHashTable()) return;
415 // Mark the backing hash table without pushing it on the marking stack.
416 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
417 HeapObject* obj = HeapObject::cast(*slot);
418 heap->mark_compact_collector()->RecordSlot(object, slot, obj);
419 StaticVisitor::MarkObjectWithoutPush(heap, obj);
423 template <typename StaticVisitor>
424 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
425 HeapObject* object) {
426 Heap* heap = map->GetHeap();
427 Code* code = Code::cast(object);
428 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
429 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
431 code->CodeIterateBody<StaticVisitor>(heap);
435 template <typename StaticVisitor>
436 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
437 Map* map, HeapObject* object) {
438 Heap* heap = map->GetHeap();
439 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
440 if (shared->ic_age() != heap->global_ic_age()) {
441 shared->ResetForNewContext(heap->global_ic_age());
443 if (FLAG_cleanup_code_caches_at_gc) {
444 shared->ClearTypeFeedbackInfoAtGCTime();
446 if ((FLAG_flush_optimized_code_cache ||
447 heap->isolate()->serializer_enabled()) &&
448 !shared->optimized_code_map()->IsSmi()) {
449 // Always flush the optimized code map if requested by flag.
450 shared->ClearOptimizedCodeMap();
452 MarkCompactCollector* collector = heap->mark_compact_collector();
453 if (collector->is_code_flushing_enabled()) {
454 if (!shared->optimized_code_map()->IsSmi()) {
455 // Add the shared function info holding an optimized code map to
456 // the code flusher for processing of code maps after marking.
457 collector->code_flusher()->AddOptimizedCodeMap(shared);
458 // Treat some references within the code map weakly by marking the
459 // code map itself but not pushing it onto the marking deque.
460 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
461 MarkOptimizedCodeMap(heap, code_map);
463 if (IsFlushable(heap, shared)) {
464 // This function's code looks flushable. But we have to postpone
465 // the decision until we see all functions that point to the same
466 // SharedFunctionInfo because some of them might be optimized.
467 // That would also make the non-optimized version of the code
468 // non-flushable, because it is required for bailing out from
470 collector->code_flusher()->AddCandidate(shared);
471 // Treat the reference to the code object weakly.
472 VisitSharedFunctionInfoWeakCode(heap, object);
476 if (!shared->optimized_code_map()->IsSmi()) {
477 // Flush optimized code map on major GCs without code flushing,
478 // needed because cached code doesn't contain breakpoints.
479 shared->ClearOptimizedCodeMap();
482 VisitSharedFunctionInfoStrongCode(heap, object);
486 template <typename StaticVisitor>
487 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
488 HeapObject* object) {
489 Heap* heap = map->GetHeap();
490 JSFunction* function = JSFunction::cast(object);
491 MarkCompactCollector* collector = heap->mark_compact_collector();
492 if (collector->is_code_flushing_enabled()) {
493 if (IsFlushable(heap, function)) {
494 // This function's code looks flushable. But we have to postpone
495 // the decision until we see all functions that point to the same
496 // SharedFunctionInfo because some of them might be optimized.
497 // That would also make the non-optimized version of the code
498 // non-flushable, because it is required for bailing out from
500 collector->code_flusher()->AddCandidate(function);
501 // Visit shared function info immediately to avoid double checking
502 // of its flushability later. This is just an optimization because
503 // the shared function info would eventually be visited.
504 SharedFunctionInfo* shared = function->shared();
505 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
506 StaticVisitor::MarkObject(heap, shared->map());
507 VisitSharedFunctionInfoWeakCode(heap, shared);
509 // Treat the reference to the code object weakly.
510 VisitJSFunctionWeakCode(heap, object);
513 // Visit all unoptimized code objects to prevent flushing them.
514 StaticVisitor::MarkObject(heap, function->shared()->code());
515 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
516 MarkInlinedFunctionsCode(heap, function->code());
520 VisitJSFunctionStrongCode(heap, object);
524 template <typename StaticVisitor>
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
526 HeapObject* object) {
527 int last_property_offset =
528 JSRegExp::kSize + kPointerSize * map->GetInObjectProperties();
529 StaticVisitor::VisitPointers(
530 map->GetHeap(), object,
531 HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
532 HeapObject::RawField(object, last_property_offset));
536 template <typename StaticVisitor>
537 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
538 Map* map, HeapObject* object) {
539 Heap* heap = map->GetHeap();
541 JSArrayBuffer::JSArrayBufferIterateBody<StaticVisitor>(heap, object);
542 if (!JSArrayBuffer::cast(object)->is_external() &&
543 !heap->InNewSpace(object)) {
544 heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
549 template <typename StaticVisitor>
550 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
551 Map* map, HeapObject* object) {
552 StaticVisitor::VisitPointers(
553 map->GetHeap(), object,
554 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
555 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
559 template <typename StaticVisitor>
560 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
561 HeapObject* object) {
562 StaticVisitor::VisitPointers(
563 map->GetHeap(), object,
564 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
565 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
569 template <typename StaticVisitor>
570 void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
571 Map* map, HeapObject* object) {
572 StaticVisitor::VisitPointers(
573 map->GetHeap(), object,
574 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
575 HeapObject::RawField(object, BytecodeArray::kHeaderSize));
579 template <typename StaticVisitor>
580 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
582 Object* raw_transitions = map->raw_transitions();
583 if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
584 MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
587 // Since descriptor arrays are potentially shared, ensure that only the
588 // descriptors that belong to this map are marked. The first time a non-empty
589 // descriptor array is marked, its header is also visited. The slot holding
590 // the descriptor array will be implicitly recorded when the pointer fields of
591 // this map are visited. Prototype maps don't keep track of transitions, so
592 // just mark the entire descriptor array.
593 if (!map->is_prototype_map()) {
594 DescriptorArray* descriptors = map->instance_descriptors();
595 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
596 descriptors->length() > 0) {
597 StaticVisitor::VisitPointers(heap, descriptors,
598 descriptors->GetFirstElementAddress(),
599 descriptors->GetDescriptorEndSlot(0));
602 int end = map->NumberOfOwnDescriptors();
604 StaticVisitor::VisitPointers(heap, descriptors,
605 descriptors->GetDescriptorStartSlot(start),
606 descriptors->GetDescriptorEndSlot(end));
610 // Mark the pointer fields of the Map. Since the transitions array has
611 // been marked already, it is fine that one of these fields contains a
613 StaticVisitor::VisitPointers(
614 heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
615 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
619 template <typename StaticVisitor>
620 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
621 Heap* heap, TransitionArray* transitions) {
622 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
624 if (transitions->HasPrototypeTransitions()) {
625 StaticVisitor::VisitPointer(heap, transitions,
626 transitions->GetPrototypeTransitionsSlot());
629 int num_transitions = TransitionArray::NumberOfTransitions(transitions);
630 for (int i = 0; i < num_transitions; ++i) {
631 StaticVisitor::VisitPointer(heap, transitions, transitions->GetKeySlot(i));
636 template <typename StaticVisitor>
637 void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
638 Heap* heap, FixedArray* code_map) {
639 if (!StaticVisitor::MarkObjectWithoutPush(heap, code_map)) return;
641 // Mark the context-independent entry in the optimized code map. Depending on
642 // the age of the code object, we treat it as a strong or a weak reference.
643 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
644 if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
645 FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
646 StaticVisitor::VisitPointer(
648 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
653 template <typename StaticVisitor>
654 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
656 // For optimized functions we should retain both non-optimized version
657 // of its code and non-optimized version of all inlined functions.
658 // This is required to support bailing out from inlined code.
659 DeoptimizationInputData* const data =
660 DeoptimizationInputData::cast(code->deoptimization_data());
661 FixedArray* const literals = data->LiteralArray();
662 int const inlined_count = data->InlinedFunctionCount()->value();
663 for (int i = 0; i < inlined_count; ++i) {
664 StaticVisitor::MarkObject(
665 heap, SharedFunctionInfo::cast(literals->get(i))->code());
670 inline static bool IsValidNonBuiltinContext(Object* context) {
671 return context->IsContext() &&
672 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
676 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
677 Object* undefined = heap->undefined_value();
678 return (info->script() != undefined) &&
679 (reinterpret_cast<Script*>(info->script())->source() != undefined);
683 template <typename StaticVisitor>
684 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
685 JSFunction* function) {
686 SharedFunctionInfo* shared_info = function->shared();
688 // Code is either on stack, in compilation cache or referenced
689 // by optimized version of function.
690 MarkBit code_mark = Marking::MarkBitFrom(function->code());
691 if (Marking::IsBlackOrGrey(code_mark)) {
695 // The function must have a valid context and not be a builtin.
696 if (!IsValidNonBuiltinContext(function->context())) {
700 // We do not (yet) flush code for optimized functions.
701 if (function->code() != shared_info->code()) {
705 // Check age of optimized code.
706 if (FLAG_age_code && !function->code()->IsOld()) {
710 return IsFlushable(heap, shared_info);
714 template <typename StaticVisitor>
715 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
716 Heap* heap, SharedFunctionInfo* shared_info) {
717 // Code is either on stack, in compilation cache or referenced
718 // by optimized version of function.
719 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
720 if (Marking::IsBlackOrGrey(code_mark)) {
724 // The function must be compiled and have the source code available,
725 // to be able to recompile it in case we need the function again.
726 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
730 // We never flush code for API functions.
731 Object* function_data = shared_info->function_data();
732 if (function_data->IsFunctionTemplateInfo()) {
736 // Only flush code for functions.
737 if (shared_info->code()->kind() != Code::FUNCTION) {
741 // Function must be lazy compilable.
742 if (!shared_info->allows_lazy_compilation()) {
746 // We do not (yet?) flush code for generator functions, because we don't know
747 // if there are still live activations (generator objects) on the heap.
748 if (shared_info->is_generator()) {
752 // If this is a full script wrapped in a function we do not flush the code.
753 if (shared_info->is_toplevel()) {
757 // If this is a function initialized with %SetCode then the one-to-one
758 // relation between SharedFunctionInfo and Code is broken.
759 if (shared_info->dont_flush()) {
763 // Check age of code. If code aging is disabled we never flush.
764 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
772 template <typename StaticVisitor>
773 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
774 Heap* heap, HeapObject* object) {
775 Object** start_slot = HeapObject::RawField(
776 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
777 Object** end_slot = HeapObject::RawField(
778 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
779 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
783 template <typename StaticVisitor>
784 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
785 Heap* heap, HeapObject* object) {
787 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
788 StaticVisitor::VisitPointer(heap, object, name_slot);
790 // Skip visiting kCodeOffset as it is treated weakly here.
791 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
792 SharedFunctionInfo::kCodeOffset);
793 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
794 SharedFunctionInfo::kOptimizedCodeMapOffset);
796 Object** start_slot =
797 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
798 Object** end_slot = HeapObject::RawField(
799 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
800 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
804 template <typename StaticVisitor>
805 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
806 Heap* heap, HeapObject* object) {
807 Object** start_slot =
808 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
810 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
811 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
813 VisitCodeEntry(heap, object,
814 object->address() + JSFunction::kCodeEntryOffset);
815 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
816 JSFunction::kPrototypeOrInitialMapOffset);
819 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
820 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
821 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
825 template <typename StaticVisitor>
826 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
827 Heap* heap, HeapObject* object) {
828 Object** start_slot =
829 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
831 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
832 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
834 // Skip visiting kCodeEntryOffset as it is treated weakly here.
835 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
836 JSFunction::kPrototypeOrInitialMapOffset);
839 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
840 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
841 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
845 void Code::CodeIterateBody(ObjectVisitor* v) {
846 int mode_mask = RelocInfo::kCodeTargetMask |
847 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
848 RelocInfo::ModeMask(RelocInfo::CELL) |
849 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
850 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
851 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
852 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
853 RelocInfo::kDebugBreakSlotMask;
855 // There are two places where we iterate code bodies: here and the
856 // templated CodeIterateBody (below). They should be kept in sync.
857 IteratePointer(v, kRelocationInfoOffset);
858 IteratePointer(v, kHandlerTableOffset);
859 IteratePointer(v, kDeoptimizationDataOffset);
860 IteratePointer(v, kTypeFeedbackInfoOffset);
861 IterateNextCodeLink(v, kNextCodeLinkOffset);
863 RelocIterator it(this, mode_mask);
864 Isolate* isolate = this->GetIsolate();
865 for (; !it.done(); it.next()) {
866 it.rinfo()->Visit(isolate, v);
871 template <typename StaticVisitor>
872 void Code::CodeIterateBody(Heap* heap) {
873 int mode_mask = RelocInfo::kCodeTargetMask |
874 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
875 RelocInfo::ModeMask(RelocInfo::CELL) |
876 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
877 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
878 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
879 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
880 RelocInfo::kDebugBreakSlotMask;
882 // There are two places where we iterate code bodies: here and the non-
883 // templated CodeIterateBody (above). They should be kept in sync.
884 StaticVisitor::VisitPointer(
886 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
887 StaticVisitor::VisitPointer(
889 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
890 StaticVisitor::VisitPointer(
892 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
893 StaticVisitor::VisitPointer(
895 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
896 StaticVisitor::VisitNextCodeLink(
897 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
900 RelocIterator it(this, mode_mask);
901 for (; !it.done(); it.next()) {
902 it.rinfo()->template Visit<StaticVisitor>(heap);
905 } // namespace internal
908 #endif // V8_OBJECTS_VISITING_INL_H_