1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
12 template <typename StaticVisitor>
13 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
15 kVisitShortcutCandidate,
16 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
20 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
22 table_.Register(kVisitSlicedString,
23 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
28 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
30 table_.Register(kVisitFixedArray,
31 &FlexibleBodyVisitor<StaticVisitor,
32 FixedArray::BodyDescriptor, int>::Visit);
34 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
35 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
36 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
40 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
43 table_.Register(kVisitByteArray, &VisitByteArray);
46 kVisitSharedFunctionInfo,
47 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
50 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
52 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
54 table_.Register(kVisitJSFunction, &VisitJSFunction);
56 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
58 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
60 table_.Register(kVisitJSDataView, &VisitJSDataView);
62 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
64 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
66 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
68 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
69 kVisitDataObjectGeneric>();
71 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
72 kVisitJSObjectGeneric>();
73 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
74 kVisitStructGeneric>();
78 template <typename StaticVisitor>
79 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
80 Map* map, HeapObject* object) {
81 Heap* heap = map->GetHeap();
83 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
84 JSArrayBuffer::kWeakNextOffset + kPointerSize);
85 VisitPointers(heap, HeapObject::RawField(
86 object, JSArrayBuffer::BodyDescriptor::kStartOffset),
87 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
89 heap, HeapObject::RawField(
90 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
91 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
92 return JSArrayBuffer::kSizeWithInternalFields;
96 template <typename StaticVisitor>
97 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
98 Map* map, HeapObject* object) {
101 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
102 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
104 map->GetHeap(), HeapObject::RawField(
105 object, JSTypedArray::kWeakNextOffset + kPointerSize),
106 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
107 return JSTypedArray::kSizeWithInternalFields;
111 template <typename StaticVisitor>
112 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
113 HeapObject* object) {
116 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
117 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
120 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
121 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
122 return JSDataView::kSizeWithInternalFields;
126 template <typename StaticVisitor>
127 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
128 table_.Register(kVisitShortcutCandidate,
129 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
132 table_.Register(kVisitConsString,
133 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
136 table_.Register(kVisitSlicedString,
137 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
142 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
144 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
146 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
148 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
150 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
152 table_.Register(kVisitConstantPoolArray, &VisitConstantPoolArray);
154 table_.Register(kVisitNativeContext, &VisitNativeContext);
156 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
162 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
164 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
166 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
170 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
172 table_.Register(kVisitMap, &VisitMap);
174 table_.Register(kVisitCode, &VisitCode);
176 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
178 table_.Register(kVisitJSFunction, &VisitJSFunction);
180 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
182 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
184 table_.Register(kVisitJSDataView, &VisitJSDataView);
186 // Registration for kVisitJSRegExp is done by StaticVisitor.
190 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
192 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
194 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
195 kVisitDataObjectGeneric>();
197 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
198 kVisitJSObjectGeneric>();
200 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
201 kVisitStructGeneric>();
205 template <typename StaticVisitor>
206 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
207 Heap* heap, Address entry_address) {
208 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
209 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
210 StaticVisitor::MarkObject(heap, code);
214 template <typename StaticVisitor>
215 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
216 Heap* heap, RelocInfo* rinfo) {
217 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
218 HeapObject* object = HeapObject::cast(rinfo->target_object());
219 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
220 // TODO(ulan): It could be better to record slots only for strongly embedded
221 // objects here and record slots for weakly embedded object during clearing
222 // of non-live references in mark-compact.
223 if (!rinfo->host()->IsWeakObject(object)) {
224 StaticVisitor::MarkObject(heap, object);
229 template <typename StaticVisitor>
230 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
232 DCHECK(rinfo->rmode() == RelocInfo::CELL);
233 Cell* cell = rinfo->target_cell();
234 // No need to record slots because the cell space is not compacted during GC.
235 if (!rinfo->host()->IsWeakObject(cell)) {
236 StaticVisitor::MarkObject(heap, cell);
241 template <typename StaticVisitor>
242 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
244 DCHECK((RelocInfo::IsJSReturn(rinfo->rmode()) &&
245 rinfo->IsPatchedReturnSequence()) ||
246 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
247 rinfo->IsPatchedDebugBreakSlotSequence()));
248 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
249 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
250 StaticVisitor::MarkObject(heap, target);
254 template <typename StaticVisitor>
255 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
257 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
258 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
259 // Monomorphic ICs are preserved when possible, but need to be flushed
260 // when they might be keeping a Context alive, or when the heap is about
262 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
263 (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
264 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
265 heap->isolate()->serializer_enabled() ||
266 target->ic_age() != heap->global_ic_age() ||
267 target->is_invalidated_weak_stub())) {
268 IC::Clear(heap->isolate(), rinfo->pc(), rinfo->host()->constant_pool());
269 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
271 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
272 StaticVisitor::MarkObject(heap, target);
276 template <typename StaticVisitor>
277 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
278 Heap* heap, RelocInfo* rinfo) {
279 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
280 Code* target = rinfo->code_age_stub();
281 DCHECK(target != NULL);
282 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
283 StaticVisitor::MarkObject(heap, target);
287 template <typename StaticVisitor>
288 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
289 Map* map, HeapObject* object) {
290 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
291 void>::Visit(map, object);
293 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
294 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
296 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
297 collector->RecordSlot(slot, slot, *slot);
302 template <typename StaticVisitor>
303 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
304 HeapObject* object) {
305 Heap* heap = map->GetHeap();
306 Map* map_object = Map::cast(object);
308 // Clears the cache of ICs related to this map.
309 if (FLAG_cleanup_code_caches_at_gc) {
310 map_object->ClearCodeCache(heap);
313 // When map collection is enabled we have to mark through map's transitions
314 // and back pointers in a special way to make these links weak.
315 if (FLAG_collect_maps && map_object->CanTransition()) {
316 MarkMapContents(heap, map_object);
318 StaticVisitor::VisitPointers(
319 heap, HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
320 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
325 template <typename StaticVisitor>
326 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
327 Map* map, HeapObject* object) {
328 Heap* heap = map->GetHeap();
331 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
332 if (FLAG_collect_maps) {
333 // Mark property cell dependent codes array but do not push it onto marking
334 // stack, this will make references from it weak. We will clean dead
335 // codes when we iterate over property cells in ClearNonLiveReferences.
336 HeapObject* obj = HeapObject::cast(*slot);
337 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
338 StaticVisitor::MarkObjectWithoutPush(heap, obj);
340 StaticVisitor::VisitPointer(heap, slot);
343 StaticVisitor::VisitPointers(
345 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
346 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
350 template <typename StaticVisitor>
351 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
352 Map* map, HeapObject* object) {
353 Heap* heap = map->GetHeap();
356 HeapObject::RawField(object, AllocationSite::kDependentCodeOffset);
357 if (FLAG_collect_maps) {
358 // Mark allocation site dependent codes array but do not push it onto
359 // marking stack, this will make references from it weak. We will clean
360 // dead codes when we iterate over allocation sites in
361 // ClearNonLiveReferences.
362 HeapObject* obj = HeapObject::cast(*slot);
363 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
364 StaticVisitor::MarkObjectWithoutPush(heap, obj);
366 StaticVisitor::VisitPointer(heap, slot);
369 StaticVisitor::VisitPointers(
371 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
372 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
376 template <typename StaticVisitor>
377 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
378 Map* map, HeapObject* object) {
379 Heap* heap = map->GetHeap();
380 JSWeakCollection* weak_collection =
381 reinterpret_cast<JSWeakCollection*>(object);
383 // Enqueue weak collection in linked list of encountered weak collections.
384 if (weak_collection->next() == heap->undefined_value()) {
385 weak_collection->set_next(heap->encountered_weak_collections());
386 heap->set_encountered_weak_collections(weak_collection);
389 // Skip visiting the backing hash table containing the mappings and the
390 // pointer to the other enqueued weak collections, both are post-processed.
391 StaticVisitor::VisitPointers(
392 heap, HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
393 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
394 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
395 JSWeakCollection::kNextOffset);
396 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
397 JSWeakCollection::kSize);
399 // Partially initialized weak collection is enqueued, but table is ignored.
400 if (!weak_collection->table()->IsHashTable()) return;
402 // Mark the backing hash table without pushing it on the marking stack.
403 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
404 HeapObject* obj = HeapObject::cast(*slot);
405 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
406 StaticVisitor::MarkObjectWithoutPush(heap, obj);
410 template <typename StaticVisitor>
411 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
412 HeapObject* object) {
413 Heap* heap = map->GetHeap();
414 Code* code = Code::cast(object);
415 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
416 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
418 code->CodeIterateBody<StaticVisitor>(heap);
422 template <typename StaticVisitor>
423 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
424 Map* map, HeapObject* object) {
425 Heap* heap = map->GetHeap();
426 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
427 if (shared->ic_age() != heap->global_ic_age()) {
428 shared->ResetForNewContext(heap->global_ic_age());
430 if (FLAG_cleanup_code_caches_at_gc) {
431 shared->ClearTypeFeedbackInfo();
433 if (FLAG_cache_optimized_code && FLAG_flush_optimized_code_cache &&
434 !shared->optimized_code_map()->IsSmi()) {
435 // Always flush the optimized code map if requested by flag.
436 shared->ClearOptimizedCodeMap();
438 MarkCompactCollector* collector = heap->mark_compact_collector();
439 if (collector->is_code_flushing_enabled()) {
440 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
441 // Add the shared function info holding an optimized code map to
442 // the code flusher for processing of code maps after marking.
443 collector->code_flusher()->AddOptimizedCodeMap(shared);
444 // Treat all references within the code map weakly by marking the
445 // code map itself but not pushing it onto the marking deque.
446 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
447 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
449 if (IsFlushable(heap, shared)) {
450 // This function's code looks flushable. But we have to postpone
451 // the decision until we see all functions that point to the same
452 // SharedFunctionInfo because some of them might be optimized.
453 // That would also make the non-optimized version of the code
454 // non-flushable, because it is required for bailing out from
456 collector->code_flusher()->AddCandidate(shared);
457 // Treat the reference to the code object weakly.
458 VisitSharedFunctionInfoWeakCode(heap, object);
462 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
463 // Flush optimized code map on major GCs without code flushing,
464 // needed because cached code doesn't contain breakpoints.
465 shared->ClearOptimizedCodeMap();
468 VisitSharedFunctionInfoStrongCode(heap, object);
472 template <typename StaticVisitor>
473 void StaticMarkingVisitor<StaticVisitor>::VisitConstantPoolArray(
474 Map* map, HeapObject* object) {
475 Heap* heap = map->GetHeap();
476 ConstantPoolArray* array = ConstantPoolArray::cast(object);
477 ConstantPoolArray::Iterator code_iter(array, ConstantPoolArray::CODE_PTR);
478 while (!code_iter.is_finished()) {
479 Address code_entry = reinterpret_cast<Address>(
480 array->RawFieldOfElementAt(code_iter.next_index()));
481 StaticVisitor::VisitCodeEntry(heap, code_entry);
484 ConstantPoolArray::Iterator heap_iter(array, ConstantPoolArray::HEAP_PTR);
485 while (!heap_iter.is_finished()) {
486 Object** slot = array->RawFieldOfElementAt(heap_iter.next_index());
487 HeapObject* object = HeapObject::cast(*slot);
488 heap->mark_compact_collector()->RecordSlot(slot, slot, object);
489 bool is_weak_object =
490 (array->get_weak_object_state() ==
491 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE &&
492 Code::IsWeakObjectInOptimizedCode(object)) ||
493 (array->get_weak_object_state() ==
494 ConstantPoolArray::WEAK_OBJECTS_IN_IC &&
495 Code::IsWeakObjectInIC(object));
496 if (!is_weak_object) {
497 StaticVisitor::MarkObject(heap, object);
503 template <typename StaticVisitor>
504 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
505 HeapObject* object) {
506 Heap* heap = map->GetHeap();
507 JSFunction* function = JSFunction::cast(object);
508 MarkCompactCollector* collector = heap->mark_compact_collector();
509 if (collector->is_code_flushing_enabled()) {
510 if (IsFlushable(heap, function)) {
511 // This function's code looks flushable. But we have to postpone
512 // the decision until we see all functions that point to the same
513 // SharedFunctionInfo because some of them might be optimized.
514 // That would also make the non-optimized version of the code
515 // non-flushable, because it is required for bailing out from
517 collector->code_flusher()->AddCandidate(function);
518 // Visit shared function info immediately to avoid double checking
519 // of its flushability later. This is just an optimization because
520 // the shared function info would eventually be visited.
521 SharedFunctionInfo* shared = function->shared();
522 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
523 StaticVisitor::MarkObject(heap, shared->map());
524 VisitSharedFunctionInfoWeakCode(heap, shared);
526 // Treat the reference to the code object weakly.
527 VisitJSFunctionWeakCode(heap, object);
530 // Visit all unoptimized code objects to prevent flushing them.
531 StaticVisitor::MarkObject(heap, function->shared()->code());
532 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
533 MarkInlinedFunctionsCode(heap, function->code());
537 VisitJSFunctionStrongCode(heap, object);
541 template <typename StaticVisitor>
542 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
543 HeapObject* object) {
544 int last_property_offset =
545 JSRegExp::kSize + kPointerSize * map->inobject_properties();
546 StaticVisitor::VisitPointers(
547 map->GetHeap(), HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
548 HeapObject::RawField(object, last_property_offset));
552 template <typename StaticVisitor>
553 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
554 Map* map, HeapObject* object) {
555 Heap* heap = map->GetHeap();
557 STATIC_ASSERT(JSArrayBuffer::kWeakFirstViewOffset ==
558 JSArrayBuffer::kWeakNextOffset + kPointerSize);
559 StaticVisitor::VisitPointers(
561 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
562 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
563 StaticVisitor::VisitPointers(
564 heap, HeapObject::RawField(
565 object, JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
566 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
570 template <typename StaticVisitor>
571 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
572 Map* map, HeapObject* object) {
573 StaticVisitor::VisitPointers(
575 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
576 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
577 StaticVisitor::VisitPointers(
578 map->GetHeap(), HeapObject::RawField(
579 object, JSTypedArray::kWeakNextOffset + kPointerSize),
580 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
584 template <typename StaticVisitor>
585 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
586 HeapObject* object) {
587 StaticVisitor::VisitPointers(
589 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
590 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
591 StaticVisitor::VisitPointers(
593 HeapObject::RawField(object, JSDataView::kWeakNextOffset + kPointerSize),
594 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
598 template <typename StaticVisitor>
599 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
601 // Make sure that the back pointer stored either in the map itself or
602 // inside its transitions array is marked. Skip recording the back
603 // pointer slot since map space is not compacted.
604 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
606 // Treat pointers in the transitions array as weak and also mark that
607 // array to prevent visiting it later. Skip recording the transition
608 // array slot, since it will be implicitly recorded when the pointer
609 // fields of this map are visited.
610 if (map->HasTransitionArray()) {
611 TransitionArray* transitions = map->transitions();
612 MarkTransitionArray(heap, transitions);
615 // Since descriptor arrays are potentially shared, ensure that only the
616 // descriptors that belong to this map are marked. The first time a
617 // non-empty descriptor array is marked, its header is also visited. The slot
618 // holding the descriptor array will be implicitly recorded when the pointer
619 // fields of this map are visited.
620 DescriptorArray* descriptors = map->instance_descriptors();
621 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
622 descriptors->length() > 0) {
623 StaticVisitor::VisitPointers(heap, descriptors->GetFirstElementAddress(),
624 descriptors->GetDescriptorEndSlot(0));
627 int end = map->NumberOfOwnDescriptors();
629 StaticVisitor::VisitPointers(heap,
630 descriptors->GetDescriptorStartSlot(start),
631 descriptors->GetDescriptorEndSlot(end));
634 // Mark prototype dependent codes array but do not push it onto marking
635 // stack, this will make references from it weak. We will clean dead
636 // codes when we iterate over maps in ClearNonLiveTransitions.
637 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
638 HeapObject* obj = HeapObject::cast(*slot);
639 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
640 StaticVisitor::MarkObjectWithoutPush(heap, obj);
642 // Mark the pointer fields of the Map. Since the transitions array has
643 // been marked already, it is fine that one of these fields contains a
645 StaticVisitor::VisitPointers(
646 heap, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
647 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
651 template <typename StaticVisitor>
652 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
653 Heap* heap, TransitionArray* transitions) {
654 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
656 // Simple transitions do not have keys nor prototype transitions.
657 if (transitions->IsSimpleTransition()) return;
659 if (transitions->HasPrototypeTransitions()) {
660 // Mark prototype transitions array but do not push it onto marking
661 // stack, this will make references from it weak. We will clean dead
662 // prototype transitions in ClearNonLiveTransitions.
663 Object** slot = transitions->GetPrototypeTransitionsSlot();
664 HeapObject* obj = HeapObject::cast(*slot);
665 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
666 StaticVisitor::MarkObjectWithoutPush(heap, obj);
669 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
670 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
675 template <typename StaticVisitor>
676 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
678 // Skip in absence of inlining.
679 // TODO(turbofan): Revisit once we support inlining.
680 if (code->is_turbofanned()) return;
681 // For optimized functions we should retain both non-optimized version
682 // of its code and non-optimized version of all inlined functions.
683 // This is required to support bailing out from inlined code.
684 DeoptimizationInputData* data =
685 DeoptimizationInputData::cast(code->deoptimization_data());
686 FixedArray* literals = data->LiteralArray();
687 for (int i = 0, count = data->InlinedFunctionCount()->value(); i < count;
689 JSFunction* inlined = JSFunction::cast(literals->get(i));
690 StaticVisitor::MarkObject(heap, inlined->shared()->code());
695 inline static bool IsValidNonBuiltinContext(Object* context) {
696 return context->IsContext() &&
697 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
701 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
702 Object* undefined = heap->undefined_value();
703 return (info->script() != undefined) &&
704 (reinterpret_cast<Script*>(info->script())->source() != undefined);
708 template <typename StaticVisitor>
709 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
710 JSFunction* function) {
711 SharedFunctionInfo* shared_info = function->shared();
713 // Code is either on stack, in compilation cache or referenced
714 // by optimized version of function.
715 MarkBit code_mark = Marking::MarkBitFrom(function->code());
716 if (code_mark.Get()) {
720 // The function must have a valid context and not be a builtin.
721 if (!IsValidNonBuiltinContext(function->context())) {
725 // We do not (yet) flush code for optimized functions.
726 if (function->code() != shared_info->code()) {
730 // Check age of optimized code.
731 if (FLAG_age_code && !function->code()->IsOld()) {
735 return IsFlushable(heap, shared_info);
739 template <typename StaticVisitor>
740 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
741 Heap* heap, SharedFunctionInfo* shared_info) {
742 // Code is either on stack, in compilation cache or referenced
743 // by optimized version of function.
744 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
745 if (code_mark.Get()) {
749 // The function must be compiled and have the source code available,
750 // to be able to recompile it in case we need the function again.
751 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
755 // We never flush code for API functions.
756 Object* function_data = shared_info->function_data();
757 if (function_data->IsFunctionTemplateInfo()) {
761 // Only flush code for functions.
762 if (shared_info->code()->kind() != Code::FUNCTION) {
766 // Function must be lazy compilable.
767 if (!shared_info->allows_lazy_compilation()) {
771 // We do not (yet?) flush code for generator functions, because we don't know
772 // if there are still live activations (generator objects) on the heap.
773 if (shared_info->is_generator()) {
777 // If this is a full script wrapped in a function we do not flush the code.
778 if (shared_info->is_toplevel()) {
782 // If this is a function initialized with %SetCode then the one-to-one
783 // relation between SharedFunctionInfo and Code is broken.
784 if (shared_info->dont_flush()) {
788 // Check age of code. If code aging is disabled we never flush.
789 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
797 template <typename StaticVisitor>
798 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
799 Heap* heap, HeapObject* object) {
800 Object** start_slot = HeapObject::RawField(
801 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
802 Object** end_slot = HeapObject::RawField(
803 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
804 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
808 template <typename StaticVisitor>
809 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
810 Heap* heap, HeapObject* object) {
812 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
813 StaticVisitor::VisitPointer(heap, name_slot);
815 // Skip visiting kCodeOffset as it is treated weakly here.
816 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
817 SharedFunctionInfo::kCodeOffset);
818 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
819 SharedFunctionInfo::kOptimizedCodeMapOffset);
821 Object** start_slot =
822 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
823 Object** end_slot = HeapObject::RawField(
824 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
825 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
829 template <typename StaticVisitor>
830 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
831 Heap* heap, HeapObject* object) {
832 Object** start_slot =
833 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
835 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
836 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
838 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
839 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
840 JSFunction::kPrototypeOrInitialMapOffset);
843 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
844 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
845 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
849 template <typename StaticVisitor>
850 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
851 Heap* heap, HeapObject* object) {
852 Object** start_slot =
853 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
855 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
856 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
858 // Skip visiting kCodeEntryOffset as it is treated weakly here.
859 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
860 JSFunction::kPrototypeOrInitialMapOffset);
863 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
864 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
865 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
869 void Code::CodeIterateBody(ObjectVisitor* v) {
870 int mode_mask = RelocInfo::kCodeTargetMask |
871 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
872 RelocInfo::ModeMask(RelocInfo::CELL) |
873 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
874 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
875 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
876 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
878 // There are two places where we iterate code bodies: here and the
879 // templated CodeIterateBody (below). They should be kept in sync.
880 IteratePointer(v, kRelocationInfoOffset);
881 IteratePointer(v, kHandlerTableOffset);
882 IteratePointer(v, kDeoptimizationDataOffset);
883 IteratePointer(v, kTypeFeedbackInfoOffset);
884 IterateNextCodeLink(v, kNextCodeLinkOffset);
885 IteratePointer(v, kConstantPoolOffset);
887 RelocIterator it(this, mode_mask);
888 Isolate* isolate = this->GetIsolate();
889 for (; !it.done(); it.next()) {
890 it.rinfo()->Visit(isolate, v);
895 template <typename StaticVisitor>
896 void Code::CodeIterateBody(Heap* heap) {
897 int mode_mask = RelocInfo::kCodeTargetMask |
898 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
899 RelocInfo::ModeMask(RelocInfo::CELL) |
900 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
901 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
902 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
903 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
905 // There are two places where we iterate code bodies: here and the non-
906 // templated CodeIterateBody (above). They should be kept in sync.
907 StaticVisitor::VisitPointer(
909 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
910 StaticVisitor::VisitPointer(
911 heap, reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
912 StaticVisitor::VisitPointer(
914 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
915 StaticVisitor::VisitPointer(
917 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
918 StaticVisitor::VisitNextCodeLink(
919 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
920 StaticVisitor::VisitPointer(
921 heap, reinterpret_cast<Object**>(this->address() + kConstantPoolOffset));
924 RelocIterator it(this, mode_mask);
925 for (; !it.done(); it.next()) {
926 it.rinfo()->template Visit<StaticVisitor>(heap);
930 } // namespace v8::internal
932 #endif // V8_OBJECTS_VISITING_INL_H_