1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
15 kVisitShortcutCandidate,
16 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
20 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
22 table_.Register(kVisitSlicedString,
23 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
28 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
30 table_.Register(kVisitFixedArray,
31 &FlexibleBodyVisitor<StaticVisitor,
32 FixedArray::BodyDescriptor, int>::Visit);
34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
40 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
43 table_.Register(kVisitByteArray, &VisitByteArray);
46 kVisitSharedFunctionInfo,
47 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
54 table_.Register(kVisitJSFunction, &VisitJSFunction);
56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
60 table_.Register(kVisitJSDataView, &VisitJSDataView);
62 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
68 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69 kVisitDataObjectGeneric>();
71 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72 kVisitJSObjectGeneric>();
73 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74 kVisitStructGeneric>();
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80 Map* map, HeapObject* object) {
81 Heap* heap = map->GetHeap();
83 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85 VisitPointers(heap, HeapObject::RawField(
86 object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
89 heap, HeapObject::RawField(
90 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92 return JSArrayBuffer::kSizeWithInternalFields;
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98 Map* map, HeapObject* object) {
101 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
104 map->GetHeap(), HeapObject::RawField(
105 object, JSTypedArray::kWeakNextOffset + kPointerSize),
106 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107 return JSTypedArray::kSizeWithInternalFields;
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113 HeapObject* object) {
116 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
120 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122 return JSDataView::kSizeWithInternalFields;
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128 table_.Register(kVisitShortcutCandidate,
129 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
132 table_.Register(kVisitConsString,
133 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
136 table_.Register(kVisitSlicedString,
137 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
142 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
154 table_.Register(kVisitNativeContext, &VisitNativeContext);
156 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
170 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
172 table_.Register(kVisitMap, &VisitMap);
174 table_.Register(kVisitCode, &VisitCode);
176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
178 table_.Register(kVisitJSFunction, &VisitJSFunction);
180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
184 table_.Register(kVisitJSDataView, &VisitJSDataView);
186 // Registration for kVisitJSRegExp is done by StaticVisitor.
190 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
192 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
194 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
195 kVisitDataObjectGeneric>();
197 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
198 kVisitJSObjectGeneric>();
200 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
201 kVisitStructGeneric>();
205 template <typename StaticVisitor>
206 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
207 Heap* heap, Address entry_address) {
208 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
209 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
210 StaticVisitor::MarkObject(heap, code);
214 template <typename StaticVisitor>
215 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
216 Heap* heap, RelocInfo* rinfo) {
217 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
218 HeapObject* object = HeapObject::cast(rinfo->target_object());
219 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
220 // TODO(ulan): It could be better to record slots only for strongly embedded
221 // objects here and record slots for weakly embedded object during clearing
222 // of non-live references in mark-compact.
223 if (!rinfo->host()->IsWeakObject(object)) {
224 StaticVisitor::MarkObject(heap, object);
229 template <typename StaticVisitor>
230 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
232 DCHECK(rinfo->rmode() == RelocInfo::CELL);
233 Cell* cell = rinfo->target_cell();
234 // No need to record slots because the cell space is not compacted during GC.
235 if (!rinfo->host()->IsWeakObject(cell)) {
236 StaticVisitor::MarkObject(heap, cell);
241 template <typename StaticVisitor>
242 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
244 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
245 rinfo->IsPatchedReturnSequence()) ||
246 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
247 rinfo->IsPatchedDebugBreakSlotSequence()));
248 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
249 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
250 StaticVisitor::MarkObject(heap, target);
254 template <typename StaticVisitor>
255 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
257 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
258 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
259 // Monomorphic ICs are preserved when possible, but need to be flushed
260 // when they might be keeping a Context alive, or when the heap is about
262 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
263 (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
264 target->ic_state() == POLYMORPHIC ||
265 (heap->flush_monomorphic_ics() && !target->is_weak_stub()) ||
266 heap->isolate()->serializer_enabled() ||
267 target->ic_age() != heap->global_ic_age() ||
268 target->is_invalidated_weak_stub())) {
269 ICUtility::Clear(heap->isolate(), rinfo->pc(),
270 rinfo->host()->constant_pool());
271 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
273 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
274 StaticVisitor::MarkObject(heap, target);
278 template <typename StaticVisitor>
279 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
280 Heap* heap, RelocInfo* rinfo) {
281 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
282 Code* target = rinfo->code_age_stub();
283 DCHECK(target != NULL);
284 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
285 StaticVisitor::MarkObject(heap, target);
289 template <typename StaticVisitor>
290 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
291 Map* map, HeapObject* object) {
292 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
293 void>::Visit(map, object);
295 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
296 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
298 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
299 collector->RecordSlot(slot, slot, *slot);
304 template <typename StaticVisitor>
305 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
306 HeapObject* object) {
307 Heap* heap = map->GetHeap();
308 Map* map_object = Map::cast(object);
310 // Clears the cache of ICs related to this map.
311 if (FLAG_cleanup_code_caches_at_gc) {
312 map_object->ClearCodeCache(heap);
315 // When map collection is enabled we have to mark through map's transitions
316 // and back pointers in a special way to make these links weak.
317 if (FLAG_collect_maps && map_object->CanTransition()) {
318 MarkMapContents(heap, map_object);
320 StaticVisitor::VisitPointers(
321 heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
322 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
327 template <typename StaticVisitor>
328 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
329 Map* map, HeapObject* object) {
330 Heap* heap = map->GetHeap();
333 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
334 if (FLAG_collect_maps) {
335 // Mark property cell dependent codes array but do not push it onto marking
336 // stack, this will make references from it weak. We will clean dead
337 // codes when we iterate over property cells in ClearNonLiveReferences.
338 HeapObject* obj = HeapObject::cast(*slot);
339 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
340 StaticVisitor::MarkObjectWithoutPush(heap, obj);
342 StaticVisitor::VisitPointer(heap, slot);
345 StaticVisitor::VisitPointers(
347 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
348 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
352 template <typename StaticVisitor>
353 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
354 Map* map, HeapObject* object) {
355 Heap* heap = map->GetHeap();
358 HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
359 if (FLAG_collect_maps) {
360 // Mark allocation site dependent codes array but do not push it onto
361 // marking stack, this will make references from it weak. We will clean
362 // dead codes when we iterate over allocation sites in
363 // ClearNonLiveReferences.
364 HeapObject* obj = HeapObject::cast(*slot);
365 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
366 StaticVisitor::MarkObjectWithoutPush(heap, obj);
368 StaticVisitor::VisitPointer(heap, slot);
371 StaticVisitor::VisitPointers(
373 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
374 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
378 template <typename StaticVisitor>
379 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
380 Map* map, HeapObject* object) {
381 Heap* heap = map->GetHeap();
382 JSWeakCollection* weak_collection =
383 reinterpret_cast<JSWeakCollection*>(object);
385 // Enqueue weak collection in linked list of encountered weak collections.
386 if (weak_collection->next() == heap->undefined_value()) {
387 weak_collection->set_next(heap->encountered_weak_collections());
388 heap->set_encountered_weak_collections(weak_collection);
391 // Skip visiting the backing hash table containing the mappings and the
392 // pointer to the other enqueued weak collections, both are post-processed.
393 StaticVisitor::VisitPointers(
394 heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
395 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
396 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
397 JSWeakCollection::kNextOffset);
398 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
399 JSWeakCollection::kSize);
401 // Partially initialized weak collection is enqueued, but table is ignored.
402 if (!weak_collection->table()->IsHashTable()) return;
404 // Mark the backing hash table without pushing it on the marking stack.
405 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
406 HeapObject* obj = HeapObject::cast(*slot);
407 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
408 StaticVisitor::MarkObjectWithoutPush(heap, obj);
412 template <typename StaticVisitor>
413 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
414 HeapObject* object) {
415 Heap* heap = map->GetHeap();
416 Code* code = Code::cast(object);
417 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
418 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
420 code->CodeIterateBody<StaticVisitor>(heap);
424 template <typename StaticVisitor>
425 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
426 Map* map, HeapObject* object) {
427 Heap* heap = map->GetHeap();
428 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
429 if (shared->ic_age() != heap->global_ic_age()) {
430 shared->ResetForNewContext(heap->global_ic_age());
432 if (FLAG_cleanup_code_caches_at_gc) {
433 shared->ClearTypeFeedbackInfo();
435 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
436 !shared->optimized_code_map()->IsSmi()) {
437 // Always flush the optimized code map if requested by flag.
438 shared->ClearOptimizedCodeMap();
440 MarkCompactCollector* collector = heap->mark_compact_collector();
441 if (collector->is_code_flushing_enabled()) {
442 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
443 // Add the shared function info holding an optimized code map to
444 // the code flusher for processing of code maps after marking.
445 collector->code_flusher()->AddOptimizedCodeMap(shared);
446 // Treat all references within the code map weakly by marking the
447 // code map itself but not pushing it onto the marking deque.
448 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
449 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
451 if (IsFlushable(heap, shared)) {
452 // This function's code looks flushable. But we have to postpone
453 // the decision until we see all functions that point to the same
454 // SharedFunctionInfo because some of them might be optimized.
455 // That would also make the non-optimized version of the code
456 // non-flushable, because it is required for bailing out from
458 collector->code_flusher()->AddCandidate(shared);
459 // Treat the reference to the code object weakly.
460 VisitSharedFunctionInfoWeakCode(heap, object);
464 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
465 // Flush optimized code map on major GCs without code flushing,
466 // needed because cached code doesn't contain breakpoints.
467 shared->ClearOptimizedCodeMap();
470 VisitSharedFunctionInfoStrongCode(heap, object);
474 template <typename StaticVisitor>
475 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
476 Map* map, HeapObject* object) {
477 Heap* heap = map->GetHeap();
478 ConstantPoolArray* array = ConstantPoolArray::cast(object);
479 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
480 while (!code_iter.is_finished()) {
481 Address code_entry = reinterpret_cast<Address>(
482 array->RawFieldOfElementAt(code_iter.next_index()));
483 StaticVisitor::VisitCodeEntry(heap, code_entry);
486 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
487 while (!heap_iter.is_finished()) {
488 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
489 HeapObject* object = HeapObject::cast(*slot);
490 heap->mark_compact_collector()->RecordSlot(slot, slot, object);
491 bool is_weak_object =
492 (array->get_weak_object_state() ==
493 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
494 Code::IsWeakObjectInOptimizedCode(object)) ||
495 (array->get_weak_object_state() ==
496 ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
497 Code::IsWeakObjectInIC(object));
498 if (!is_weak_object) {
499 StaticVisitor::MarkObject(heap, object);
505 template <typename StaticVisitor>
506 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
507 HeapObject* object) {
508 Heap* heap = map->GetHeap();
509 JSFunction* function = JSFunction::cast(object);
510 MarkCompactCollector* collector = heap->mark_compact_collector();
511 if (collector->is_code_flushing_enabled()) {
512 if (IsFlushable(heap, function)) {
513 // This function's code looks flushable. But we have to postpone
514 // the decision until we see all functions that point to the same
515 // SharedFunctionInfo because some of them might be optimized.
516 // That would also make the non-optimized version of the code
517 // non-flushable, because it is required for bailing out from
519 collector->code_flusher()->AddCandidate(function);
520 // Visit shared function info immediately to avoid double checking
521 // of its flushability later. This is just an optimization because
522 // the shared function info would eventually be visited.
523 SharedFunctionInfo* shared = function->shared();
524 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
525 StaticVisitor::MarkObject(heap, shared->map());
526 VisitSharedFunctionInfoWeakCode(heap, shared);
528 // Treat the reference to the code object weakly.
529 VisitJSFunctionWeakCode(heap, object);
532 // Visit all unoptimized code objects to prevent flushing them.
533 StaticVisitor::MarkObject(heap, function->shared()->code());
534 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
535 MarkInlinedFunctionsCode(heap, function->code());
539 VisitJSFunctionStrongCode(heap, object);
543 template <typename StaticVisitor>
544 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
545 HeapObject* object) {
546 int last_property_offset =
547 JSRegExp::kSize + kPointerSize * map->inobject_properties();
548 StaticVisitor::VisitPointers(
549 map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
550 HeapObject::RawField(object, last_property_offset));
554 template <typename StaticVisitor>
555 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
556 Map* map, HeapObject* object) {
557 Heap* heap = map->GetHeap();
559 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
560 JSArrayBuffer::kWeakNextOffset + kPointerSize);
561 StaticVisitor::VisitPointers(
563 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
564 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
565 StaticVisitor::VisitPointers(
566 heap, HeapObject::RawField(
567 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
568 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
572 template <typename StaticVisitor>
573 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
574 Map* map, HeapObject* object) {
575 StaticVisitor::VisitPointers(
577 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
578 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
579 StaticVisitor::VisitPointers(
580 map->GetHeap(), HeapObject::RawField(
581 object, JSTypedArray::kWeakNextOffset + kPointerSize),
582 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
586 template <typename StaticVisitor>
587 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
588 HeapObject* object) {
589 StaticVisitor::VisitPointers(
591 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
592 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
593 StaticVisitor::VisitPointers(
595 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
596 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
600 template <typename StaticVisitor>
601 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
603 // Make sure that the back pointer stored either in the map itself or
604 // inside its transitions array is marked. Skip recording the back
605 // pointer slot since map space is not compacted.
606 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
608 // Treat pointers in the transitions array as weak and also mark that
609 // array to prevent visiting it later. Skip recording the transition
610 // array slot, since it will be implicitly recorded when the pointer
611 // fields of this map are visited.
612 if (map->HasTransitionArray()) {
613 TransitionArray* transitions = map->transitions();
614 MarkTransitionArray(heap, transitions);
617 // Since descriptor arrays are potentially shared, ensure that only the
618 // descriptors that belong to this map are marked. The first time a
619 // non-empty descriptor array is marked, its header is also visited. The slot
620 // holding the descriptor array will be implicitly recorded when the pointer
621 // fields of this map are visited.
622 DescriptorArray* descriptors = map->instance_descriptors();
623 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
624 descriptors->length() > 0) {
625 StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
626 descriptors->GetDescriptorEndSlot(0));
629 int end = map->NumberOfOwnDescriptors();
631 StaticVisitor::VisitPointers(heap,
632 descriptors->GetDescriptorStartSlot(start),
633 descriptors->GetDescriptorEndSlot(end));
636 // Mark prototype dependent codes array but do not push it onto marking
637 // stack, this will make references from it weak. We will clean dead
638 // codes when we iterate over maps in ClearNonLiveTransitions.
639 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
640 HeapObject* obj = HeapObject::cast(*slot);
641 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
642 StaticVisitor::MarkObjectWithoutPush(heap, obj);
644 // Mark the pointer fields of the Map. Since the transitions array has
645 // been marked already, it is fine that one of these fields contains a
647 StaticVisitor::VisitPointers(
648 heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
649 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
653 template <typename StaticVisitor>
654 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
655 Heap* heap, TransitionArray* transitions) {
656 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
658 // Simple transitions do not have keys nor prototype transitions.
659 if (transitions->IsSimpleTransition()) return;
661 if (transitions->HasPrototypeTransitions()) {
662 // Mark prototype transitions array but do not push it onto marking
663 // stack, this will make references from it weak. We will clean dead
664 // prototype transitions in ClearNonLiveTransitions.
665 Object** slot = transitions->GetPrototypeTransitionsSlot();
666 HeapObject* obj = HeapObject::cast(*slot);
667 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
668 StaticVisitor::MarkObjectWithoutPush(heap, obj);
671 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
672 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
677 template <typename StaticVisitor>
678 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
680 // Skip in absence of inlining.
681 // TODO(turbofan): Revisit once we support inlining.
682 if (code->is_turbofanned()) return;
683 // For optimized functions we should retain both non-optimized version
684 // of its code and non-optimized version of all inlined functions.
685 // This is required to support bailing out from inlined code.
686 DeoptimizationInputData* data =
687 DeoptimizationInputData::cast(code->deoptimization_data());
688 FixedArray* literals = data->LiteralArray();
689 for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
691 JSFunction* inlined = JSFunction::cast(literals->get(i));
692 StaticVisitor::MarkObject(heap, inlined->shared()->code());
697 inline static bool IsValidNonBuiltinContext(Object* context) {
698 return context->IsContext() &&
699 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
703 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
704 Object* undefined = heap->undefined_value();
705 return (info->script() != undefined) &&
706 (reinterpret_cast<Script*>(info->script())->source() != undefined);
710 template <typename StaticVisitor>
711 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
712 JSFunction* function) {
713 SharedFunctionInfo* shared_info = function->shared();
715 // Code is either on stack, in compilation cache or referenced
716 // by optimized version of function.
717 MarkBit code_mark = Marking::MarkBitFrom(function->code());
718 if (code_mark.Get()) {
722 // The function must have a valid context and not be a builtin.
723 if (!IsValidNonBuiltinContext(function->context())) {
727 // We do not (yet) flush code for optimized functions.
728 if (function->code() != shared_info->code()) {
732 // Check age of optimized code.
733 if (FLAG_age_code && !function->code()->IsOld()) {
737 return IsFlushable(heap, shared_info);
741 template <typename StaticVisitor>
742 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
743 Heap* heap, SharedFunctionInfo* shared_info) {
744 // Code is either on stack, in compilation cache or referenced
745 // by optimized version of function.
746 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
747 if (code_mark.Get()) {
751 // The function must be compiled and have the source code available,
752 // to be able to recompile it in case we need the function again.
753 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
757 // We never flush code for API functions.
758 Object* function_data = shared_info->function_data();
759 if (function_data->IsFunctionTemplateInfo()) {
763 // Only flush code for functions.
764 if (shared_info->code()->kind() != Code::FUNCTION) {
768 // Function must be lazy compilable.
769 if (!shared_info->allows_lazy_compilation()) {
773 // We do not (yet?) flush code for generator functions, because we don't know
774 // if there are still live activations (generator objects) on the heap.
775 if (shared_info->is_generator()) {
779 // If this is a full script wrapped in a function we do not flush the code.
780 if (shared_info->is_toplevel()) {
784 // If this is a function initialized with %SetCode then the one-to-one
785 // relation between SharedFunctionInfo and Code is broken.
786 if (shared_info->dont_flush()) {
790 // Check age of code. If code aging is disabled we never flush.
791 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
799 template <typename StaticVisitor>
800 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
801 Heap* heap, HeapObject* object) {
802 Object** start_slot = HeapObject::RawField(
803 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
804 Object** end_slot = HeapObject::RawField(
805 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
806 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
810 template <typename StaticVisitor>
811 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
812 Heap* heap, HeapObject* object) {
814 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
815 StaticVisitor::VisitPointer(heap, name_slot);
817 // Skip visiting kCodeOffset as it is treated weakly here.
818 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
819 SharedFunctionInfo::kCodeOffset);
820 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
821 SharedFunctionInfo::kOptimizedCodeMapOffset);
823 Object** start_slot =
824 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
825 Object** end_slot = HeapObject::RawField(
826 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
827 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
831 template <typename StaticVisitor>
832 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
833 Heap* heap, HeapObject* object) {
834 Object** start_slot =
835 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
837 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
838 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
840 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
841 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
842 JSFunction::kPrototypeOrInitialMapOffset);
845 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
846 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
847 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
851 template <typename StaticVisitor>
852 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
853 Heap* heap, HeapObject* object) {
854 Object** start_slot =
855 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
857 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
858 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
860 // Skip visiting kCodeEntryOffset as it is treated weakly here.
861 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
862 JSFunction::kPrototypeOrInitialMapOffset);
865 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
866 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
867 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
871 void Code::CodeIterateBody(ObjectVisitor* v) {
872 int mode_mask = RelocInfo::kCodeTargetMask |
873 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
874 RelocInfo::ModeMask(RelocInfo::CELL) |
875 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
876 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
877 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
878 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
880 // There are two places where we iterate code bodies: here and the
881 // templated CodeIterateBody (below). They should be kept in sync.
882 IteratePointer(v, kRelocationInfoOffset);
883 IteratePointer(v, kHandlerTableOffset);
884 IteratePointer(v, kDeoptimizationDataOffset);
885 IteratePointer(v, kTypeFeedbackInfoOffset);
886 IterateNextCodeLink(v, kNextCodeLinkOffset);
887 IteratePointer(v, kConstantPoolOffset);
889 RelocIterator it(this, mode_mask);
890 Isolate* isolate = this->GetIsolate();
891 for (; !it.done(); it.next()) {
892 it.rinfo()->Visit(isolate, v);
897 template <typename StaticVisitor>
898 void Code::CodeIterateBody(Heap* heap) {
899 int mode_mask = RelocInfo::kCodeTargetMask |
900 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
901 RelocInfo::ModeMask(RelocInfo::CELL) |
902 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
903 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
904 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
905 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
907 // There are two places where we iterate code bodies: here and the non-
908 // templated CodeIterateBody (above). They should be kept in sync.
909 StaticVisitor::VisitPointer(
911 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
912 StaticVisitor::VisitPointer(
913 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
914 StaticVisitor::VisitPointer(
916 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
917 StaticVisitor::VisitPointer(
919 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
920 StaticVisitor::VisitNextCodeLink(
921 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
922 StaticVisitor::VisitPointer(
923 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
926 RelocIterator it(this, mode_mask);
927 for (; !it.done(); it.next()) {
928 it.rinfo()->template Visit<StaticVisitor>(heap);
932 } // namespace v8::internal
934 #endif // V8_OBJECTS_VISITING_INL_H_