1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
15 kVisitShortcutCandidate,
16 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
20 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
22 table_.Register(kVisitSlicedString,
23 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
28 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
30 table_.Register(kVisitFixedArray,
31 &FlexibleBodyVisitor<StaticVisitor,
32 FixedArray::BodyDescriptor, int>::Visit);
34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
40 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
43 table_.Register(kVisitByteArray, &VisitByteArray);
46 kVisitSharedFunctionInfo,
47 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
54 table_.Register(kVisitJSFunction, &VisitJSFunction);
56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
60 table_.Register(kVisitJSDataView, &VisitJSDataView);
62 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
68 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69 kVisitDataObjectGeneric>();
71 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72 kVisitJSObjectGeneric>();
73 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74 kVisitStructGeneric>();
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80 Map* map, HeapObject* object) {
81 Heap* heap = map->GetHeap();
83 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85 VisitPointers(heap, HeapObject::RawField(
86 object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
89 heap, HeapObject::RawField(
90 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92 return JSArrayBuffer::kSizeWithInternalFields;
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98 Map* map, HeapObject* object) {
101 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
104 map->GetHeap(), HeapObject::RawField(
105 object, JSTypedArray::kWeakNextOffset + kPointerSize),
106 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107 return JSTypedArray::kSizeWithInternalFields;
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113 HeapObject* object) {
116 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
120 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122 return JSDataView::kSizeWithInternalFields;
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128 table_.Register(kVisitShortcutCandidate,
129 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
132 table_.Register(kVisitConsString,
133 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
136 table_.Register(kVisitSlicedString,
137 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
142 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
154 table_.Register(kVisitNativeContext, &VisitNativeContext);
156 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
170 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
172 table_.Register(kVisitMap, &VisitMap);
174 table_.Register(kVisitCode, &VisitCode);
176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
178 table_.Register(kVisitJSFunction, &VisitJSFunction);
180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
184 table_.Register(kVisitJSDataView, &VisitJSDataView);
186 // Registration for kVisitJSRegExp is done by StaticVisitor.
190 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
192 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
194 table_.Register(kVisitWeakCell, &VisitWeakCell);
196 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
197 kVisitDataObjectGeneric>();
199 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
200 kVisitJSObjectGeneric>();
202 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203 kVisitStructGeneric>();
207 template <typename StaticVisitor>
208 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
209 Heap* heap, Address entry_address) {
210 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
211 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
212 StaticVisitor::MarkObject(heap, code);
216 template <typename StaticVisitor>
217 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
218 Heap* heap, RelocInfo* rinfo) {
219 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
220 HeapObject* object = HeapObject::cast(rinfo->target_object());
221 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
222 // TODO(ulan): It could be better to record slots only for strongly embedded
223 // objects here and record slots for weakly embedded object during clearing
224 // of non-live references in mark-compact.
225 if (!rinfo->host()->IsWeakObject(object)) {
226 StaticVisitor::MarkObject(heap, object);
231 template <typename StaticVisitor>
232 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
234 DCHECK(rinfo->rmode() == RelocInfo::CELL);
235 Cell* cell = rinfo->target_cell();
236 // No need to record slots because the cell space is not compacted during GC.
237 if (!rinfo->host()->IsWeakObject(cell)) {
238 StaticVisitor::MarkObject(heap, cell);
243 template <typename StaticVisitor>
244 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
246 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
247 rinfo->IsPatchedReturnSequence()) ||
248 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
249 rinfo->IsPatchedDebugBreakSlotSequence()));
250 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
251 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
252 StaticVisitor::MarkObject(heap, target);
256 template <typename StaticVisitor>
257 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
259 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
260 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
261 // Monomorphic ICs are preserved when possible, but need to be flushed
262 // when they might be keeping a Context alive, or when the heap is about
264 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
265 !target->is_call_stub() &&
266 (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
267 target->ic_state() == POLYMORPHIC ||
268 (heap->flush_monomorphic_ics() && !target->is_weak_stub()) ||
269 heap->isolate()->serializer_enabled() ||
270 target->ic_age() != heap->global_ic_age() ||
271 target->is_invalidated_weak_stub())) {
272 ICUtility::Clear(heap->isolate(), rinfo->pc(),
273 rinfo->host()->constant_pool());
274 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
276 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
277 StaticVisitor::MarkObject(heap, target);
281 template <typename StaticVisitor>
282 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
283 Heap* heap, RelocInfo* rinfo) {
284 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
285 Code* target = rinfo->code_age_stub();
286 DCHECK(target != NULL);
287 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
288 StaticVisitor::MarkObject(heap, target);
292 template <typename StaticVisitor>
293 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
294 Map* map, HeapObject* object) {
295 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
296 void>::Visit(map, object);
298 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
299 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
301 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
302 collector->RecordSlot(slot, slot, *slot);
307 template <typename StaticVisitor>
308 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
309 HeapObject* object) {
310 Heap* heap = map->GetHeap();
311 Map* map_object = Map::cast(object);
313 // Clears the cache of ICs related to this map.
314 if (FLAG_cleanup_code_caches_at_gc) {
315 map_object->ClearCodeCache(heap);
318 // When map collection is enabled we have to mark through map's transitions
319 // and back pointers in a special way to make these links weak.
320 if (FLAG_collect_maps && map_object->CanTransition()) {
321 MarkMapContents(heap, map_object);
323 StaticVisitor::VisitPointers(
324 heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
325 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
330 template <typename StaticVisitor>
331 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
332 Map* map, HeapObject* object) {
333 Heap* heap = map->GetHeap();
336 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
337 if (FLAG_collect_maps) {
338 // Mark property cell dependent codes array but do not push it onto marking
339 // stack, this will make references from it weak. We will clean dead
340 // codes when we iterate over property cells in ClearNonLiveReferences.
341 HeapObject* obj = HeapObject::cast(*slot);
342 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
343 StaticVisitor::MarkObjectWithoutPush(heap, obj);
345 StaticVisitor::VisitPointer(heap, slot);
348 StaticVisitor::VisitPointers(
350 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
351 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
355 template <typename StaticVisitor>
356 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
357 HeapObject* object) {
358 Heap* heap = map->GetHeap();
359 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
360 Object* undefined = heap->undefined_value();
361 // Enqueue weak cell in linked list of encountered weak collections.
362 // We can ignore weak cells with cleared values because they will always
364 if (weak_cell->next() == undefined && !weak_cell->cleared()) {
365 weak_cell->set_next(heap->encountered_weak_cells());
366 heap->set_encountered_weak_cells(weak_cell);
371 template <typename StaticVisitor>
372 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
373 Map* map, HeapObject* object) {
374 Heap* heap = map->GetHeap();
377 HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
378 if (FLAG_collect_maps) {
379 // Mark allocation site dependent codes array but do not push it onto
380 // marking stack, this will make references from it weak. We will clean
381 // dead codes when we iterate over allocation sites in
382 // ClearNonLiveReferences.
383 HeapObject* obj = HeapObject::cast(*slot);
384 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
385 StaticVisitor::MarkObjectWithoutPush(heap, obj);
387 StaticVisitor::VisitPointer(heap, slot);
390 StaticVisitor::VisitPointers(
392 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
393 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
397 template <typename StaticVisitor>
398 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
399 Map* map, HeapObject* object) {
400 Heap* heap = map->GetHeap();
401 JSWeakCollection* weak_collection =
402 reinterpret_cast<JSWeakCollection*>(object);
404 // Enqueue weak collection in linked list of encountered weak collections.
405 if (weak_collection->next() == heap->undefined_value()) {
406 weak_collection->set_next(heap->encountered_weak_collections());
407 heap->set_encountered_weak_collections(weak_collection);
410 // Skip visiting the backing hash table containing the mappings and the
411 // pointer to the other enqueued weak collections, both are post-processed.
412 StaticVisitor::VisitPointers(
413 heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
414 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
415 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
416 JSWeakCollection::kNextOffset);
417 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
418 JSWeakCollection::kSize);
420 // Partially initialized weak collection is enqueued, but table is ignored.
421 if (!weak_collection->table()->IsHashTable()) return;
423 // Mark the backing hash table without pushing it on the marking stack.
424 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
425 HeapObject* obj = HeapObject::cast(*slot);
426 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
427 StaticVisitor::MarkObjectWithoutPush(heap, obj);
431 template <typename StaticVisitor>
432 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
433 HeapObject* object) {
434 Heap* heap = map->GetHeap();
435 Code* code = Code::cast(object);
436 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
437 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
439 code->CodeIterateBody<StaticVisitor>(heap);
443 template <typename StaticVisitor>
444 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
445 Map* map, HeapObject* object) {
446 Heap* heap = map->GetHeap();
447 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
448 if (shared->ic_age() != heap->global_ic_age()) {
449 shared->ResetForNewContext(heap->global_ic_age());
451 if (FLAG_cleanup_code_caches_at_gc) {
452 shared->ClearTypeFeedbackInfo();
454 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
455 !shared->optimized_code_map()->IsSmi()) {
456 // Always flush the optimized code map if requested by flag.
457 shared->ClearOptimizedCodeMap();
459 MarkCompactCollector* collector = heap->mark_compact_collector();
460 if (collector->is_code_flushing_enabled()) {
461 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
462 // Add the shared function info holding an optimized code map to
463 // the code flusher for processing of code maps after marking.
464 collector->code_flusher()->AddOptimizedCodeMap(shared);
465 // Treat all references within the code map weakly by marking the
466 // code map itself but not pushing it onto the marking deque.
467 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
468 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
470 if (IsFlushable(heap, shared)) {
471 // This function's code looks flushable. But we have to postpone
472 // the decision until we see all functions that point to the same
473 // SharedFunctionInfo because some of them might be optimized.
474 // That would also make the non-optimized version of the code
475 // non-flushable, because it is required for bailing out from
477 collector->code_flusher()->AddCandidate(shared);
478 // Treat the reference to the code object weakly.
479 VisitSharedFunctionInfoWeakCode(heap, object);
483 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
484 // Flush optimized code map on major GCs without code flushing,
485 // needed because cached code doesn't contain breakpoints.
486 shared->ClearOptimizedCodeMap();
489 VisitSharedFunctionInfoStrongCode(heap, object);
493 template <typename StaticVisitor>
494 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
495 Map* map, HeapObject* object) {
496 Heap* heap = map->GetHeap();
497 ConstantPoolArray* array = ConstantPoolArray::cast(object);
498 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
499 while (!code_iter.is_finished()) {
500 Address code_entry = reinterpret_cast<Address>(
501 array->RawFieldOfElementAt(code_iter.next_index()));
502 StaticVisitor::VisitCodeEntry(heap, code_entry);
505 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
506 while (!heap_iter.is_finished()) {
507 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
508 HeapObject* object = HeapObject::cast(*slot);
509 heap->mark_compact_collector()->RecordSlot(slot, slot, object);
510 bool is_weak_object =
511 (array->get_weak_object_state() ==
512 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
513 Code::IsWeakObjectInOptimizedCode(object)) ||
514 (array->get_weak_object_state() ==
515 ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
516 Code::IsWeakObjectInIC(object));
517 if (!is_weak_object) {
518 StaticVisitor::MarkObject(heap, object);
524 template <typename StaticVisitor>
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
526 HeapObject* object) {
527 Heap* heap = map->GetHeap();
528 JSFunction* function = JSFunction::cast(object);
529 MarkCompactCollector* collector = heap->mark_compact_collector();
530 if (collector->is_code_flushing_enabled()) {
531 if (IsFlushable(heap, function)) {
532 // This function's code looks flushable. But we have to postpone
533 // the decision until we see all functions that point to the same
534 // SharedFunctionInfo because some of them might be optimized.
535 // That would also make the non-optimized version of the code
536 // non-flushable, because it is required for bailing out from
538 collector->code_flusher()->AddCandidate(function);
539 // Visit shared function info immediately to avoid double checking
540 // of its flushability later. This is just an optimization because
541 // the shared function info would eventually be visited.
542 SharedFunctionInfo* shared = function->shared();
543 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
544 StaticVisitor::MarkObject(heap, shared->map());
545 VisitSharedFunctionInfoWeakCode(heap, shared);
547 // Treat the reference to the code object weakly.
548 VisitJSFunctionWeakCode(heap, object);
551 // Visit all unoptimized code objects to prevent flushing them.
552 StaticVisitor::MarkObject(heap, function->shared()->code());
553 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
554 MarkInlinedFunctionsCode(heap, function->code());
558 VisitJSFunctionStrongCode(heap, object);
562 template <typename StaticVisitor>
563 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
564 HeapObject* object) {
565 int last_property_offset =
566 JSRegExp::kSize + kPointerSize * map->inobject_properties();
567 StaticVisitor::VisitPointers(
568 map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
569 HeapObject::RawField(object, last_property_offset));
573 template <typename StaticVisitor>
574 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
575 Map* map, HeapObject* object) {
576 Heap* heap = map->GetHeap();
578 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
579 JSArrayBuffer::kWeakNextOffset + kPointerSize);
580 StaticVisitor::VisitPointers(
582 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
583 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
584 StaticVisitor::VisitPointers(
585 heap, HeapObject::RawField(
586 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
587 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
591 template <typename StaticVisitor>
592 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
593 Map* map, HeapObject* object) {
594 StaticVisitor::VisitPointers(
596 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
597 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
598 StaticVisitor::VisitPointers(
599 map->GetHeap(), HeapObject::RawField(
600 object, JSTypedArray::kWeakNextOffset + kPointerSize),
601 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
605 template <typename StaticVisitor>
606 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
607 HeapObject* object) {
608 StaticVisitor::VisitPointers(
610 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
611 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
612 StaticVisitor::VisitPointers(
614 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
615 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
619 template <typename StaticVisitor>
620 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
622 // Make sure that the back pointer stored either in the map itself or
623 // inside its transitions array is marked. Skip recording the back
624 // pointer slot since map space is not compacted.
625 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
627 // Treat pointers in the transitions array as weak and also mark that
628 // array to prevent visiting it later. Skip recording the transition
629 // array slot, since it will be implicitly recorded when the pointer
630 // fields of this map are visited.
631 if (map->HasTransitionArray()) {
632 TransitionArray* transitions = map->transitions();
633 MarkTransitionArray(heap, transitions);
636 // Since descriptor arrays are potentially shared, ensure that only the
637 // descriptors that belong to this map are marked. The first time a
638 // non-empty descriptor array is marked, its header is also visited. The slot
639 // holding the descriptor array will be implicitly recorded when the pointer
640 // fields of this map are visited.
641 DescriptorArray* descriptors = map->instance_descriptors();
642 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
643 descriptors->length() > 0) {
644 StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
645 descriptors->GetDescriptorEndSlot(0));
648 int end = map->NumberOfOwnDescriptors();
650 StaticVisitor::VisitPointers(heap,
651 descriptors->GetDescriptorStartSlot(start),
652 descriptors->GetDescriptorEndSlot(end));
655 // Mark prototype dependent codes array but do not push it onto marking
656 // stack, this will make references from it weak. We will clean dead
657 // codes when we iterate over maps in ClearNonLiveTransitions.
658 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
659 HeapObject* obj = HeapObject::cast(*slot);
660 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
661 StaticVisitor::MarkObjectWithoutPush(heap, obj);
663 // Mark the pointer fields of the Map. Since the transitions array has
664 // been marked already, it is fine that one of these fields contains a
666 StaticVisitor::VisitPointers(
667 heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
668 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
672 template <typename StaticVisitor>
673 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
674 Heap* heap, TransitionArray* transitions) {
675 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
677 // Simple transitions do not have keys nor prototype transitions.
678 if (transitions->IsSimpleTransition()) return;
680 if (transitions->HasPrototypeTransitions()) {
681 // Mark prototype transitions array but do not push it onto marking
682 // stack, this will make references from it weak. We will clean dead
683 // prototype transitions in ClearNonLiveTransitions.
684 Object** slot = transitions->GetPrototypeTransitionsSlot();
685 HeapObject* obj = HeapObject::cast(*slot);
686 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
687 StaticVisitor::MarkObjectWithoutPush(heap, obj);
690 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
691 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
696 template <typename StaticVisitor>
697 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
699 // Skip in absence of inlining.
700 // TODO(turbofan): Revisit once we support inlining.
701 if (code->is_turbofanned()) return;
702 // For optimized functions we should retain both non-optimized version
703 // of its code and non-optimized version of all inlined functions.
704 // This is required to support bailing out from inlined code.
705 DeoptimizationInputData* data =
706 DeoptimizationInputData::cast(code->deoptimization_data());
707 FixedArray* literals = data->LiteralArray();
708 for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
710 JSFunction* inlined = JSFunction::cast(literals->get(i));
711 StaticVisitor::MarkObject(heap, inlined->shared()->code());
716 inline static bool IsValidNonBuiltinContext(Object* context) {
717 return context->IsContext() &&
718 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
722 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
723 Object* undefined = heap->undefined_value();
724 return (info->script() != undefined) &&
725 (reinterpret_cast<Script*>(info->script())->source() != undefined);
729 template <typename StaticVisitor>
730 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
731 JSFunction* function) {
732 SharedFunctionInfo* shared_info = function->shared();
734 // Code is either on stack, in compilation cache or referenced
735 // by optimized version of function.
736 MarkBit code_mark = Marking::MarkBitFrom(function->code());
737 if (code_mark.Get()) {
741 // The function must have a valid context and not be a builtin.
742 if (!IsValidNonBuiltinContext(function->context())) {
746 // We do not (yet) flush code for optimized functions.
747 if (function->code() != shared_info->code()) {
751 // Check age of optimized code.
752 if (FLAG_age_code && !function->code()->IsOld()) {
756 return IsFlushable(heap, shared_info);
760 template <typename StaticVisitor>
761 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
762 Heap* heap, SharedFunctionInfo* shared_info) {
763 // Code is either on stack, in compilation cache or referenced
764 // by optimized version of function.
765 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
766 if (code_mark.Get()) {
770 // The function must be compiled and have the source code available,
771 // to be able to recompile it in case we need the function again.
772 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
776 // We never flush code for API functions.
777 Object* function_data = shared_info->function_data();
778 if (function_data->IsFunctionTemplateInfo()) {
782 // Only flush code for functions.
783 if (shared_info->code()->kind() != Code::FUNCTION) {
787 // Function must be lazy compilable.
788 if (!shared_info->allows_lazy_compilation()) {
792 // We do not (yet?) flush code for generator functions, because we don't know
793 // if there are still live activations (generator objects) on the heap.
794 if (shared_info->is_generator()) {
798 // If this is a full script wrapped in a function we do not flush the code.
799 if (shared_info->is_toplevel()) {
803 // If this is a function initialized with %SetCode then the one-to-one
804 // relation between SharedFunctionInfo and Code is broken.
805 if (shared_info->dont_flush()) {
809 // Check age of code. If code aging is disabled we never flush.
810 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
818 template <typename StaticVisitor>
819 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
820 Heap* heap, HeapObject* object) {
821 Object** start_slot = HeapObject::RawField(
822 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
823 Object** end_slot = HeapObject::RawField(
824 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
825 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
829 template <typename StaticVisitor>
830 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
831 Heap* heap, HeapObject* object) {
833 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
834 StaticVisitor::VisitPointer(heap, name_slot);
836 // Skip visiting kCodeOffset as it is treated weakly here.
837 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
838 SharedFunctionInfo::kCodeOffset);
839 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
840 SharedFunctionInfo::kOptimizedCodeMapOffset);
842 Object** start_slot =
843 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
844 Object** end_slot = HeapObject::RawField(
845 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
846 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
850 template <typename StaticVisitor>
851 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
852 Heap* heap, HeapObject* object) {
853 Object** start_slot =
854 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
856 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
857 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
859 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
860 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
861 JSFunction::kPrototypeOrInitialMapOffset);
864 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
865 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
866 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
870 template <typename StaticVisitor>
871 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
872 Heap* heap, HeapObject* object) {
873 Object** start_slot =
874 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
876 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
877 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
879 // Skip visiting kCodeEntryOffset as it is treated weakly here.
880 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
881 JSFunction::kPrototypeOrInitialMapOffset);
884 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
885 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
886 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
890 void Code::CodeIterateBody(ObjectVisitor* v) {
891 int mode_mask = RelocInfo::kCodeTargetMask |
892 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
893 RelocInfo::ModeMask(RelocInfo::CELL) |
894 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
895 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
896 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
897 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
899 // There are two places where we iterate code bodies: here and the
900 // templated CodeIterateBody (below). They should be kept in sync.
901 IteratePointer(v, kRelocationInfoOffset);
902 IteratePointer(v, kHandlerTableOffset);
903 IteratePointer(v, kDeoptimizationDataOffset);
904 IteratePointer(v, kTypeFeedbackInfoOffset);
905 IterateNextCodeLink(v, kNextCodeLinkOffset);
906 IteratePointer(v, kConstantPoolOffset);
908 RelocIterator it(this, mode_mask);
909 Isolate* isolate = this->GetIsolate();
910 for (; !it.done(); it.next()) {
911 it.rinfo()->Visit(isolate, v);
916 template <typename StaticVisitor>
917 void Code::CodeIterateBody(Heap* heap) {
918 int mode_mask = RelocInfo::kCodeTargetMask |
919 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
920 RelocInfo::ModeMask(RelocInfo::CELL) |
921 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
922 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
923 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
924 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
926 // There are two places where we iterate code bodies: here and the non-
927 // templated CodeIterateBody (above). They should be kept in sync.
928 StaticVisitor::VisitPointer(
930 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
931 StaticVisitor::VisitPointer(
932 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
933 StaticVisitor::VisitPointer(
935 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
936 StaticVisitor::VisitPointer(
938 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
939 StaticVisitor::VisitNextCodeLink(
940 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
941 StaticVisitor::VisitPointer(
942 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
945 RelocIterator it(this, mode_mask);
946 for (; !it.done(); it.next()) {
947 it.rinfo()->template Visit<StaticVisitor>(heap);
951 } // namespace v8::internal
953 #endif // V8_OBJECTS_VISITING_INL_H_