1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
15 kVisitShortcutCandidate,
16 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
20 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
22 table_.Register(kVisitSlicedString,
23 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
28 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
30 table_.Register(kVisitFixedArray,
31 &FlexibleBodyVisitor<StaticVisitor,
32 FixedArray::BodyDescriptor, int>::Visit);
34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
40 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
43 table_.Register(kVisitByteArray, &VisitByteArray);
46 kVisitSharedFunctionInfo,
47 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
54 table_.Register(kVisitJSFunction, &VisitJSFunction);
56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
60 table_.Register(kVisitJSDataView, &VisitJSDataView);
62 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
68 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69 kVisitDataObjectGeneric>();
71 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72 kVisitJSObjectGeneric>();
73 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74 kVisitStructGeneric>();
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80 Map* map, HeapObject* object) {
81 Heap* heap = map->GetHeap();
83 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85 VisitPointers(heap, HeapObject::RawField(
86 object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
89 heap, HeapObject::RawField(
90 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92 return JSArrayBuffer::kSizeWithInternalFields;
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98 Map* map, HeapObject* object) {
101 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
104 map->GetHeap(), HeapObject::RawField(
105 object, JSTypedArray::kWeakNextOffset + kPointerSize),
106 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107 return JSTypedArray::kSizeWithInternalFields;
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113 HeapObject* object) {
116 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
120 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122 return JSDataView::kSizeWithInternalFields;
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128 table_.Register(kVisitShortcutCandidate,
129 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
132 table_.Register(kVisitConsString,
133 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
136 table_.Register(kVisitSlicedString,
137 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
142 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
154 table_.Register(kVisitNativeContext, &VisitNativeContext);
156 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
170 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
172 table_.Register(kVisitMap, &VisitMap);
174 table_.Register(kVisitCode, &VisitCode);
176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
178 table_.Register(kVisitJSFunction, &VisitJSFunction);
180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
184 table_.Register(kVisitJSDataView, &VisitJSDataView);
186 // Registration for kVisitJSRegExp is done by StaticVisitor.
190 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
192 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
194 table_.Register(kVisitWeakCell, &VisitWeakCell);
196 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
197 kVisitDataObjectGeneric>();
199 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
200 kVisitJSObjectGeneric>();
202 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203 kVisitStructGeneric>();
207 template <typename StaticVisitor>
208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209 Heap* heap, Address entry_address) {
210 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
212 StaticVisitor::MarkObject(heap, code);
216 template <typename StaticVisitor>
217 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
218 Heap* heap, RelocInfo* rinfo) {
219 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
220 HeapObject* object = HeapObject::cast(rinfo->target_object());
221 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
222 // TODO(ulan): It could be better to record slots only for strongly embedded
223 // objects here and record slots for weakly embedded object during clearing
224 // of non-live references in mark-compact.
225 if (!rinfo->host()->IsWeakObject(object)) {
226 StaticVisitor::MarkObject(heap, object);
231 template <typename StaticVisitor>
232 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
234 DCHECK(rinfo->rmode() == RelocInfo::CELL);
235 Cell* cell = rinfo->target_cell();
236 // No need to record slots because the cell space is not compacted during GC.
237 if (!rinfo->host()->IsWeakObject(cell)) {
238 StaticVisitor::MarkObject(heap, cell);
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
246 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
247 rinfo->IsPatchedReturnSequence()) ||
248 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
249 rinfo->IsPatchedDebugBreakSlotSequence()));
250 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
251 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
252 StaticVisitor::MarkObject(heap, target);
256 template <typename StaticVisitor>
257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
259 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
260 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
261 // Monomorphic ICs are preserved when possible, but need to be flushed
262 // when they might be keeping a Context alive, or when the heap is about
264 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
265 !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
266 target->ic_age() != heap->global_ic_age())) {
267 ICUtility::Clear(heap->isolate(), rinfo->pc(),
268 rinfo->host()->constant_pool());
269 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
271 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
272 StaticVisitor::MarkObject(heap, target);
276 template <typename StaticVisitor>
277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
278 Heap* heap, RelocInfo* rinfo) {
279 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
280 Code* target = rinfo->code_age_stub();
281 DCHECK(target != NULL);
282 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
283 StaticVisitor::MarkObject(heap, target);
287 template <typename StaticVisitor>
288 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
289 Map* map, HeapObject* object) {
290 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
291 void>::Visit(map, object);
293 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
294 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
296 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
297 collector->RecordSlot(slot, slot, *slot);
302 template <typename StaticVisitor>
303 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
304 HeapObject* object) {
305 Heap* heap = map->GetHeap();
306 Map* map_object = Map::cast(object);
308 // Clears the cache of ICs related to this map.
309 if (FLAG_cleanup_code_caches_at_gc) {
310 map_object->ClearCodeCache(heap);
313 // When map collection is enabled we have to mark through map's transitions
314 // and back pointers in a special way to make these links weak.
315 if (FLAG_collect_maps && map_object->CanTransition()) {
316 MarkMapContents(heap, map_object);
318 StaticVisitor::VisitPointers(
319 heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
320 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
325 template <typename StaticVisitor>
326 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
327 Map* map, HeapObject* object) {
328 Heap* heap = map->GetHeap();
330 StaticVisitor::VisitPointers(
332 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
333 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
337 template <typename StaticVisitor>
338 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
339 HeapObject* object) {
340 Heap* heap = map->GetHeap();
341 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
342 Object* undefined = heap->undefined_value();
343 // Enqueue weak cell in linked list of encountered weak collections.
344 // We can ignore weak cells with cleared values because they will always
346 if (weak_cell->next() == undefined && !weak_cell->cleared()) {
347 weak_cell->set_next(heap->encountered_weak_cells());
348 heap->set_encountered_weak_cells(weak_cell);
353 template <typename StaticVisitor>
354 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
355 Map* map, HeapObject* object) {
356 Heap* heap = map->GetHeap();
358 StaticVisitor::VisitPointers(
360 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
361 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
365 template <typename StaticVisitor>
366 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
367 Map* map, HeapObject* object) {
368 Heap* heap = map->GetHeap();
369 JSWeakCollection* weak_collection =
370 reinterpret_cast<JSWeakCollection*>(object);
372 // Enqueue weak collection in linked list of encountered weak collections.
373 if (weak_collection->next() == heap->undefined_value()) {
374 weak_collection->set_next(heap->encountered_weak_collections());
375 heap->set_encountered_weak_collections(weak_collection);
378 // Skip visiting the backing hash table containing the mappings and the
379 // pointer to the other enqueued weak collections, both are post-processed.
380 StaticVisitor::VisitPointers(
381 heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
382 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
383 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
384 JSWeakCollection::kNextOffset);
385 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
386 JSWeakCollection::kSize);
388 // Partially initialized weak collection is enqueued, but table is ignored.
389 if (!weak_collection->table()->IsHashTable()) return;
391 // Mark the backing hash table without pushing it on the marking stack.
392 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
393 HeapObject* obj = HeapObject::cast(*slot);
394 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
395 StaticVisitor::MarkObjectWithoutPush(heap, obj);
399 template <typename StaticVisitor>
400 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
401 HeapObject* object) {
402 Heap* heap = map->GetHeap();
403 Code* code = Code::cast(object);
404 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
405 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
407 code->CodeIterateBody<StaticVisitor>(heap);
411 template <typename StaticVisitor>
412 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
413 Map* map, HeapObject* object) {
414 Heap* heap = map->GetHeap();
415 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
416 if (shared->ic_age() != heap->global_ic_age()) {
417 shared->ResetForNewContext(heap->global_ic_age());
419 if (FLAG_cleanup_code_caches_at_gc) {
420 shared->ClearTypeFeedbackInfoAtGCTime();
422 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
423 !shared->optimized_code_map()->IsSmi()) {
424 // Always flush the optimized code map if requested by flag.
425 shared->ClearOptimizedCodeMap();
427 MarkCompactCollector* collector = heap->mark_compact_collector();
428 if (collector->is_code_flushing_enabled()) {
429 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
430 // Add the shared function info holding an optimized code map to
431 // the code flusher for processing of code maps after marking.
432 collector->code_flusher()->AddOptimizedCodeMap(shared);
433 // Treat all references within the code map weakly by marking the
434 // code map itself but not pushing it onto the marking deque.
435 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
436 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
438 if (IsFlushable(heap, shared)) {
439 // This function's code looks flushable. But we have to postpone
440 // the decision until we see all functions that point to the same
441 // SharedFunctionInfo because some of them might be optimized.
442 // That would also make the non-optimized version of the code
443 // non-flushable, because it is required for bailing out from
445 collector->code_flusher()->AddCandidate(shared);
446 // Treat the reference to the code object weakly.
447 VisitSharedFunctionInfoWeakCode(heap, object);
451 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
452 // Flush optimized code map on major GCs without code flushing,
453 // needed because cached code doesn't contain breakpoints.
454 shared->ClearOptimizedCodeMap();
457 VisitSharedFunctionInfoStrongCode(heap, object);
461 template <typename StaticVisitor>
462 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
463 Map* map, HeapObject* object) {
464 Heap* heap = map->GetHeap();
465 ConstantPoolArray* array = ConstantPoolArray::cast(object);
466 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
467 while (!code_iter.is_finished()) {
468 Address code_entry = reinterpret_cast<Address>(
469 array->RawFieldOfElementAt(code_iter.next_index()));
470 StaticVisitor::VisitCodeEntry(heap, code_entry);
473 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
474 while (!heap_iter.is_finished()) {
475 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
476 HeapObject* object = HeapObject::cast(*slot);
477 heap->mark_compact_collector()->RecordSlot(slot, slot, object);
478 bool is_weak_object =
479 (array->get_weak_object_state() ==
480 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
481 Code::IsWeakObjectInOptimizedCode(object));
482 if (!is_weak_object) {
483 StaticVisitor::MarkObject(heap, object);
489 template <typename StaticVisitor>
490 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
491 HeapObject* object) {
492 Heap* heap = map->GetHeap();
493 JSFunction* function = JSFunction::cast(object);
494 MarkCompactCollector* collector = heap->mark_compact_collector();
495 if (collector->is_code_flushing_enabled()) {
496 if (IsFlushable(heap, function)) {
497 // This function's code looks flushable. But we have to postpone
498 // the decision until we see all functions that point to the same
499 // SharedFunctionInfo because some of them might be optimized.
500 // That would also make the non-optimized version of the code
501 // non-flushable, because it is required for bailing out from
503 collector->code_flusher()->AddCandidate(function);
504 // Visit shared function info immediately to avoid double checking
505 // of its flushability later. This is just an optimization because
506 // the shared function info would eventually be visited.
507 SharedFunctionInfo* shared = function->shared();
508 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
509 StaticVisitor::MarkObject(heap, shared->map());
510 VisitSharedFunctionInfoWeakCode(heap, shared);
512 // Treat the reference to the code object weakly.
513 VisitJSFunctionWeakCode(heap, object);
516 // Visit all unoptimized code objects to prevent flushing them.
517 StaticVisitor::MarkObject(heap, function->shared()->code());
518 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
519 MarkInlinedFunctionsCode(heap, function->code());
523 VisitJSFunctionStrongCode(heap, object);
527 template <typename StaticVisitor>
528 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
529 HeapObject* object) {
530 int last_property_offset =
531 JSRegExp::kSize + kPointerSize * map->inobject_properties();
532 StaticVisitor::VisitPointers(
533 map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
534 HeapObject::RawField(object, last_property_offset));
538 template <typename StaticVisitor>
539 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
540 Map* map, HeapObject* object) {
541 Heap* heap = map->GetHeap();
543 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
544 JSArrayBuffer::kWeakNextOffset + kPointerSize);
545 StaticVisitor::VisitPointers(
547 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
548 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
549 StaticVisitor::VisitPointers(
550 heap, HeapObject::RawField(
551 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
552 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
556 template <typename StaticVisitor>
557 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
558 Map* map, HeapObject* object) {
559 StaticVisitor::VisitPointers(
561 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
562 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
563 StaticVisitor::VisitPointers(
564 map->GetHeap(), HeapObject::RawField(
565 object, JSTypedArray::kWeakNextOffset + kPointerSize),
566 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
570 template <typename StaticVisitor>
571 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
572 HeapObject* object) {
573 StaticVisitor::VisitPointers(
575 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
576 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
577 StaticVisitor::VisitPointers(
579 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
580 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
584 template <typename StaticVisitor>
585 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
587 // Make sure that the back pointer stored either in the map itself or
588 // inside its transitions array is marked. Skip recording the back
589 // pointer slot since map space is not compacted.
590 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
592 // Treat pointers in the transitions array as weak and also mark that
593 // array to prevent visiting it later. Skip recording the transition
594 // array slot, since it will be implicitly recorded when the pointer
595 // fields of this map are visited.
596 if (map->HasTransitionArray()) {
597 TransitionArray* transitions = map->transitions();
598 MarkTransitionArray(heap, transitions);
601 // Since descriptor arrays are potentially shared, ensure that only the
602 // descriptors that belong to this map are marked. The first time a
603 // non-empty descriptor array is marked, its header is also visited. The slot
604 // holding the descriptor array will be implicitly recorded when the pointer
605 // fields of this map are visited.
606 DescriptorArray* descriptors = map->instance_descriptors();
607 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
608 descriptors->length() > 0) {
609 StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
610 descriptors->GetDescriptorEndSlot(0));
613 int end = map->NumberOfOwnDescriptors();
615 StaticVisitor::VisitPointers(heap,
616 descriptors->GetDescriptorStartSlot(start),
617 descriptors->GetDescriptorEndSlot(end));
620 // Mark the pointer fields of the Map. Since the transitions array has
621 // been marked already, it is fine that one of these fields contains a
623 StaticVisitor::VisitPointers(
624 heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
625 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
629 template <typename StaticVisitor>
630 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
631 Heap* heap, TransitionArray* transitions) {
632 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
634 // Simple transitions do not have keys nor prototype transitions.
635 if (transitions->IsSimpleTransition()) return;
637 if (transitions->HasPrototypeTransitions()) {
638 // Mark prototype transitions array but do not push it onto marking
639 // stack, this will make references from it weak. We will clean dead
640 // prototype transitions in ClearNonLiveTransitions.
641 Object** slot = transitions->GetPrototypeTransitionsSlot();
642 HeapObject* obj = HeapObject::cast(*slot);
643 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
644 StaticVisitor::MarkObjectWithoutPush(heap, obj);
647 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
648 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
653 template <typename StaticVisitor>
654 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
656 // Skip in absence of inlining.
657 // TODO(turbofan): Revisit once we support inlining.
658 if (code->is_turbofanned()) return;
659 // For optimized functions we should retain both non-optimized version
660 // of its code and non-optimized version of all inlined functions.
661 // This is required to support bailing out from inlined code.
662 DeoptimizationInputData* data =
663 DeoptimizationInputData::cast(code->deoptimization_data());
664 FixedArray* literals = data->LiteralArray();
665 for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
667 JSFunction* inlined = JSFunction::cast(literals->get(i));
668 StaticVisitor::MarkObject(heap, inlined->shared()->code());
673 inline static bool IsValidNonBuiltinContext(Object* context) {
674 return context->IsContext() &&
675 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
679 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
680 Object* undefined = heap->undefined_value();
681 return (info->script() != undefined) &&
682 (reinterpret_cast<Script*>(info->script())->source() != undefined);
686 template <typename StaticVisitor>
687 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
688 JSFunction* function) {
689 SharedFunctionInfo* shared_info = function->shared();
691 // Code is either on stack, in compilation cache or referenced
692 // by optimized version of function.
693 MarkBit code_mark = Marking::MarkBitFrom(function->code());
694 if (code_mark.Get()) {
698 // The function must have a valid context and not be a builtin.
699 if (!IsValidNonBuiltinContext(function->context())) {
703 // We do not (yet) flush code for optimized functions.
704 if (function->code() != shared_info->code()) {
708 // Check age of optimized code.
709 if (FLAG_age_code && !function->code()->IsOld()) {
713 return IsFlushable(heap, shared_info);
717 template <typename StaticVisitor>
718 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
719 Heap* heap, SharedFunctionInfo* shared_info) {
720 // Code is either on stack, in compilation cache or referenced
721 // by optimized version of function.
722 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
723 if (code_mark.Get()) {
727 // The function must be compiled and have the source code available,
728 // to be able to recompile it in case we need the function again.
729 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
733 // We never flush code for API functions.
734 Object* function_data = shared_info->function_data();
735 if (function_data->IsFunctionTemplateInfo()) {
739 // Only flush code for functions.
740 if (shared_info->code()->kind() != Code::FUNCTION) {
744 // Function must be lazy compilable.
745 if (!shared_info->allows_lazy_compilation()) {
749 // We do not (yet?) flush code for generator functions, because we don't know
750 // if there are still live activations (generator objects) on the heap.
751 if (shared_info->is_generator()) {
755 // If this is a full script wrapped in a function we do not flush the code.
756 if (shared_info->is_toplevel()) {
760 // If this is a function initialized with %SetCode then the one-to-one
761 // relation between SharedFunctionInfo and Code is broken.
762 if (shared_info->dont_flush()) {
766 // Check age of code. If code aging is disabled we never flush.
767 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
775 template <typename StaticVisitor>
776 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
777 Heap* heap, HeapObject* object) {
778 Object** start_slot = HeapObject::RawField(
779 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
780 Object** end_slot = HeapObject::RawField(
781 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
782 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
786 template <typename StaticVisitor>
787 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
788 Heap* heap, HeapObject* object) {
790 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
791 StaticVisitor::VisitPointer(heap, name_slot);
793 // Skip visiting kCodeOffset as it is treated weakly here.
794 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
795 SharedFunctionInfo::kCodeOffset);
796 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
797 SharedFunctionInfo::kOptimizedCodeMapOffset);
799 Object** start_slot =
800 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
801 Object** end_slot = HeapObject::RawField(
802 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
803 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
807 template <typename StaticVisitor>
808 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
809 Heap* heap, HeapObject* object) {
810 Object** start_slot =
811 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
813 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
814 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
816 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
817 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
818 JSFunction::kPrototypeOrInitialMapOffset);
821 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
822 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
823 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
827 template <typename StaticVisitor>
828 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
829 Heap* heap, HeapObject* object) {
830 Object** start_slot =
831 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
833 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
834 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
836 // Skip visiting kCodeEntryOffset as it is treated weakly here.
837 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
838 JSFunction::kPrototypeOrInitialMapOffset);
841 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
842 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
843 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
847 void Code::CodeIterateBody(ObjectVisitor* v) {
848 int mode_mask = RelocInfo::kCodeTargetMask |
849 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
850 RelocInfo::ModeMask(RelocInfo::CELL) |
851 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
852 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
853 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
854 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
856 // There are two places where we iterate code bodies: here and the
857 // templated CodeIterateBody (below). They should be kept in sync.
858 IteratePointer(v, kRelocationInfoOffset);
859 IteratePointer(v, kHandlerTableOffset);
860 IteratePointer(v, kDeoptimizationDataOffset);
861 IteratePointer(v, kTypeFeedbackInfoOffset);
862 IterateNextCodeLink(v, kNextCodeLinkOffset);
863 IteratePointer(v, kConstantPoolOffset);
865 RelocIterator it(this, mode_mask);
866 Isolate* isolate = this->GetIsolate();
867 for (; !it.done(); it.next()) {
868 it.rinfo()->Visit(isolate, v);
873 template <typename StaticVisitor>
874 void Code::CodeIterateBody(Heap* heap) {
875 int mode_mask = RelocInfo::kCodeTargetMask |
876 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
877 RelocInfo::ModeMask(RelocInfo::CELL) |
878 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
879 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
880 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
881 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
883 // There are two places where we iterate code bodies: here and the non-
884 // templated CodeIterateBody (above). They should be kept in sync.
885 StaticVisitor::VisitPointer(
887 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
888 StaticVisitor::VisitPointer(
889 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
890 StaticVisitor::VisitPointer(
892 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
893 StaticVisitor::VisitPointer(
895 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
896 StaticVisitor::VisitNextCodeLink(
897 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
898 StaticVisitor::VisitPointer(
899 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
902 RelocIterator it(this, mode_mask);
903 for (; !it.done(); it.next()) {
904 it.rinfo()->template Visit<StaticVisitor>(heap);
908 } // namespace v8::internal
910 #endif // V8_OBJECTS_VISITING_INL_H_