1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_INL_H_
6 #define V8_OBJECTS_VISITING_INL_H_
8 #include "src/heap/objects-visiting.h"
9 #include "src/ic/ic-state.h"
10 #include "src/macro-assembler.h"
15 template <typename StaticVisitor>
16 void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
18 kVisitShortcutCandidate,
19 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
23 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
25 table_.Register(kVisitSlicedString,
26 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
31 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
33 table_.Register(kVisitFixedArray,
34 &FlexibleBodyVisitor<StaticVisitor,
35 FixedArray::BodyDescriptor, int>::Visit);
37 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
38 table_.Register(kVisitFixedTypedArray, &VisitFixedTypedArray);
39 table_.Register(kVisitFixedFloat64Array, &VisitFixedTypedArray);
43 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
46 table_.Register(kVisitByteArray, &VisitByteArray);
47 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
50 kVisitSharedFunctionInfo,
51 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
54 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
56 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
58 table_.Register(kVisitJSFunction, &VisitJSFunction);
60 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
62 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
64 table_.Register(kVisitJSDataView, &VisitJSDataView);
66 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
68 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
70 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
72 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
73 kVisitDataObjectGeneric>();
75 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
76 kVisitJSObjectGeneric>();
77 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
78 kVisitStructGeneric>();
82 template <typename StaticVisitor>
83 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
84 Map* map, HeapObject* object) {
85 Heap* heap = map->GetHeap();
89 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
90 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
91 if (!JSArrayBuffer::cast(object)->is_external()) {
92 heap->RegisterLiveArrayBuffer(true,
93 JSArrayBuffer::cast(object)->backing_store());
95 return JSArrayBuffer::kSizeWithInternalFields;
99 template <typename StaticVisitor>
100 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
101 Map* map, HeapObject* object) {
103 map->GetHeap(), object,
104 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
105 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
106 return JSTypedArray::kSizeWithInternalFields;
110 template <typename StaticVisitor>
111 int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(Map* map,
112 HeapObject* object) {
114 map->GetHeap(), object,
115 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
116 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
117 return JSDataView::kSizeWithInternalFields;
121 template <typename StaticVisitor>
122 void StaticMarkingVisitor<StaticVisitor>::Initialize() {
123 table_.Register(kVisitShortcutCandidate,
124 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
127 table_.Register(kVisitConsString,
128 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
131 table_.Register(kVisitSlicedString,
132 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
137 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
139 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
141 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
143 table_.Register(kVisitFixedTypedArray, &DataObjectVisitor::Visit);
145 table_.Register(kVisitFixedFloat64Array, &DataObjectVisitor::Visit);
147 table_.Register(kVisitNativeContext, &VisitNativeContext);
149 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
151 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
153 table_.Register(kVisitBytecodeArray, &DataObjectVisitor::Visit);
155 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
157 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
159 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
161 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
165 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
167 table_.Register(kVisitMap, &VisitMap);
169 table_.Register(kVisitCode, &VisitCode);
171 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
173 table_.Register(kVisitJSFunction, &VisitJSFunction);
175 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
177 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
179 table_.Register(kVisitJSDataView, &VisitJSDataView);
181 // Registration for kVisitJSRegExp is done by StaticVisitor.
185 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
187 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
189 table_.Register(kVisitWeakCell, &VisitWeakCell);
191 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
192 kVisitDataObjectGeneric>();
194 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
195 kVisitJSObjectGeneric>();
197 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
198 kVisitStructGeneric>();
202 template <typename StaticVisitor>
203 void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
204 Heap* heap, HeapObject* object, Address entry_address) {
205 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
206 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
208 StaticVisitor::MarkObject(heap, code);
212 template <typename StaticVisitor>
213 void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
214 Heap* heap, RelocInfo* rinfo) {
215 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
216 HeapObject* object = HeapObject::cast(rinfo->target_object());
217 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
218 // TODO(ulan): It could be better to record slots only for strongly embedded
219 // objects here and record slots for weakly embedded object during clearing
220 // of non-live references in mark-compact.
221 if (!rinfo->host()->IsWeakObject(object)) {
222 StaticVisitor::MarkObject(heap, object);
227 template <typename StaticVisitor>
228 void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
230 DCHECK(rinfo->rmode() == RelocInfo::CELL);
231 Cell* cell = rinfo->target_cell();
232 heap->mark_compact_collector()->RecordRelocSlot(rinfo, cell);
233 if (!rinfo->host()->IsWeakObject(cell)) {
234 StaticVisitor::MarkObject(heap, cell);
239 template <typename StaticVisitor>
240 void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
242 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
243 rinfo->IsPatchedDebugBreakSlotSequence());
244 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
245 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
246 StaticVisitor::MarkObject(heap, target);
250 template <typename StaticVisitor>
251 void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
253 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
254 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
255 // Monomorphic ICs are preserved when possible, but need to be flushed
256 // when they might be keeping a Context alive, or when the heap is about
258 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
259 !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
260 target->ic_age() != heap->global_ic_age())) {
261 ICUtility::Clear(heap->isolate(), rinfo->pc(),
262 rinfo->host()->constant_pool());
263 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
265 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
266 StaticVisitor::MarkObject(heap, target);
270 template <typename StaticVisitor>
271 void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
272 Heap* heap, RelocInfo* rinfo) {
273 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
274 Code* target = rinfo->code_age_stub();
275 DCHECK(target != NULL);
276 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
277 StaticVisitor::MarkObject(heap, target);
281 template <typename StaticVisitor>
282 void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
283 Map* map, HeapObject* object) {
284 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
285 void>::Visit(map, object);
287 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
288 for (int idx = Context::FIRST_WEAK_SLOT; idx < Context::NATIVE_CONTEXT_SLOTS;
290 Object** slot = Context::cast(object)->RawFieldOfElementAt(idx);
291 collector->RecordSlot(object, slot, *slot);
296 template <typename StaticVisitor>
297 void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
298 HeapObject* object) {
299 Heap* heap = map->GetHeap();
300 Map* map_object = Map::cast(object);
302 // Clears the cache of ICs related to this map.
303 if (FLAG_cleanup_code_caches_at_gc) {
304 map_object->ClearCodeCache(heap);
307 // When map collection is enabled we have to mark through map's transitions
308 // and back pointers in a special way to make these links weak.
309 if (map_object->CanTransition()) {
310 MarkMapContents(heap, map_object);
312 StaticVisitor::VisitPointers(
314 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
315 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
320 template <typename StaticVisitor>
321 void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
322 Map* map, HeapObject* object) {
323 Heap* heap = map->GetHeap();
325 StaticVisitor::VisitPointers(
327 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
328 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
332 template <typename StaticVisitor>
333 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
334 HeapObject* object) {
335 Heap* heap = map->GetHeap();
336 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
337 // Enqueue weak cell in linked list of encountered weak collections.
338 // We can ignore weak cells with cleared values because they will always
340 if (weak_cell->next_cleared() && !weak_cell->cleared()) {
341 weak_cell->set_next(heap->encountered_weak_cells(),
342 UPDATE_WEAK_WRITE_BARRIER);
343 heap->set_encountered_weak_cells(weak_cell);
348 template <typename StaticVisitor>
349 void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
350 Map* map, HeapObject* object) {
351 Heap* heap = map->GetHeap();
353 StaticVisitor::VisitPointers(
355 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
356 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
360 template <typename StaticVisitor>
361 void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
362 Map* map, HeapObject* object) {
363 Heap* heap = map->GetHeap();
364 JSWeakCollection* weak_collection =
365 reinterpret_cast<JSWeakCollection*>(object);
367 // Enqueue weak collection in linked list of encountered weak collections.
368 if (weak_collection->next() == heap->undefined_value()) {
369 weak_collection->set_next(heap->encountered_weak_collections());
370 heap->set_encountered_weak_collections(weak_collection);
373 // Skip visiting the backing hash table containing the mappings and the
374 // pointer to the other enqueued weak collections, both are post-processed.
375 StaticVisitor::VisitPointers(
377 HeapObject::RawField(object, JSWeakCollection::kPropertiesOffset),
378 HeapObject::RawField(object, JSWeakCollection::kTableOffset));
379 STATIC_ASSERT(JSWeakCollection::kTableOffset + kPointerSize ==
380 JSWeakCollection::kNextOffset);
381 STATIC_ASSERT(JSWeakCollection::kNextOffset + kPointerSize ==
382 JSWeakCollection::kSize);
384 // Partially initialized weak collection is enqueued, but table is ignored.
385 if (!weak_collection->table()->IsHashTable()) return;
387 // Mark the backing hash table without pushing it on the marking stack.
388 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
389 HeapObject* obj = HeapObject::cast(*slot);
390 heap->mark_compact_collector()->RecordSlot(object, slot, obj);
391 StaticVisitor::MarkObjectWithoutPush(heap, obj);
395 template <typename StaticVisitor>
396 void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
397 HeapObject* object) {
398 Heap* heap = map->GetHeap();
399 Code* code = Code::cast(object);
400 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
401 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
403 code->CodeIterateBody<StaticVisitor>(heap);
407 template <typename StaticVisitor>
408 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
409 Map* map, HeapObject* object) {
410 Heap* heap = map->GetHeap();
411 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
412 if (shared->ic_age() != heap->global_ic_age()) {
413 shared->ResetForNewContext(heap->global_ic_age());
415 if (FLAG_cleanup_code_caches_at_gc) {
416 shared->ClearTypeFeedbackInfoAtGCTime();
418 if ((FLAG_flush_optimized_code_cache ||
419 heap->isolate()->serializer_enabled()) &&
420 !shared->optimized_code_map()->IsSmi()) {
421 // Always flush the optimized code map if requested by flag.
422 shared->ClearOptimizedCodeMap();
424 MarkCompactCollector* collector = heap->mark_compact_collector();
425 if (collector->is_code_flushing_enabled()) {
426 if (!shared->optimized_code_map()->IsSmi()) {
427 // Add the shared function info holding an optimized code map to
428 // the code flusher for processing of code maps after marking.
429 collector->code_flusher()->AddOptimizedCodeMap(shared);
430 // Treat some references within the code map weakly by marking the
431 // code map itself but not pushing it onto the marking deque.
432 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
433 MarkOptimizedCodeMap(heap, code_map);
435 if (IsFlushable(heap, shared)) {
436 // This function's code looks flushable. But we have to postpone
437 // the decision until we see all functions that point to the same
438 // SharedFunctionInfo because some of them might be optimized.
439 // That would also make the non-optimized version of the code
440 // non-flushable, because it is required for bailing out from
442 collector->code_flusher()->AddCandidate(shared);
443 // Treat the reference to the code object weakly.
444 VisitSharedFunctionInfoWeakCode(heap, object);
448 if (!shared->optimized_code_map()->IsSmi()) {
449 // Flush optimized code map on major GCs without code flushing,
450 // needed because cached code doesn't contain breakpoints.
451 shared->ClearOptimizedCodeMap();
454 VisitSharedFunctionInfoStrongCode(heap, object);
458 template <typename StaticVisitor>
459 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
460 HeapObject* object) {
461 Heap* heap = map->GetHeap();
462 JSFunction* function = JSFunction::cast(object);
463 MarkCompactCollector* collector = heap->mark_compact_collector();
464 if (collector->is_code_flushing_enabled()) {
465 if (IsFlushable(heap, function)) {
466 // This function's code looks flushable. But we have to postpone
467 // the decision until we see all functions that point to the same
468 // SharedFunctionInfo because some of them might be optimized.
469 // That would also make the non-optimized version of the code
470 // non-flushable, because it is required for bailing out from
472 collector->code_flusher()->AddCandidate(function);
473 // Visit shared function info immediately to avoid double checking
474 // of its flushability later. This is just an optimization because
475 // the shared function info would eventually be visited.
476 SharedFunctionInfo* shared = function->shared();
477 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
478 StaticVisitor::MarkObject(heap, shared->map());
479 VisitSharedFunctionInfoWeakCode(heap, shared);
481 // Treat the reference to the code object weakly.
482 VisitJSFunctionWeakCode(heap, object);
485 // Visit all unoptimized code objects to prevent flushing them.
486 StaticVisitor::MarkObject(heap, function->shared()->code());
487 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
488 MarkInlinedFunctionsCode(heap, function->code());
492 VisitJSFunctionStrongCode(heap, object);
496 template <typename StaticVisitor>
497 void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
498 HeapObject* object) {
499 int last_property_offset =
500 JSRegExp::kSize + kPointerSize * map->inobject_properties();
501 StaticVisitor::VisitPointers(
502 map->GetHeap(), object,
503 HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
504 HeapObject::RawField(object, last_property_offset));
508 template <typename StaticVisitor>
509 void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
510 Map* map, HeapObject* object) {
511 Heap* heap = map->GetHeap();
513 StaticVisitor::VisitPointers(
515 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
516 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
517 if (!JSArrayBuffer::cast(object)->is_external()) {
518 heap->RegisterLiveArrayBuffer(false,
519 JSArrayBuffer::cast(object)->backing_store());
524 template <typename StaticVisitor>
525 void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
526 Map* map, HeapObject* object) {
527 StaticVisitor::VisitPointers(
528 map->GetHeap(), object,
529 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
530 HeapObject::RawField(object, JSTypedArray::kSizeWithInternalFields));
534 template <typename StaticVisitor>
535 void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(Map* map,
536 HeapObject* object) {
537 StaticVisitor::VisitPointers(
538 map->GetHeap(), object,
539 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
540 HeapObject::RawField(object, JSDataView::kSizeWithInternalFields));
544 template <typename StaticVisitor>
545 void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
547 Object* raw_transitions = map->raw_transitions();
548 if (TransitionArray::IsFullTransitionArray(raw_transitions)) {
549 MarkTransitionArray(heap, TransitionArray::cast(raw_transitions));
552 // Since descriptor arrays are potentially shared, ensure that only the
553 // descriptors that belong to this map are marked. The first time a non-empty
554 // descriptor array is marked, its header is also visited. The slot holding
555 // the descriptor array will be implicitly recorded when the pointer fields of
556 // this map are visited. Prototype maps don't keep track of transitions, so
557 // just mark the entire descriptor array.
558 if (!map->is_prototype_map()) {
559 DescriptorArray* descriptors = map->instance_descriptors();
560 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
561 descriptors->length() > 0) {
562 StaticVisitor::VisitPointers(heap, descriptors,
563 descriptors->GetFirstElementAddress(),
564 descriptors->GetDescriptorEndSlot(0));
567 int end = map->NumberOfOwnDescriptors();
569 StaticVisitor::VisitPointers(heap, descriptors,
570 descriptors->GetDescriptorStartSlot(start),
571 descriptors->GetDescriptorEndSlot(end));
575 // Mark the pointer fields of the Map. Since the transitions array has
576 // been marked already, it is fine that one of these fields contains a
578 StaticVisitor::VisitPointers(
579 heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
580 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
584 template <typename StaticVisitor>
585 void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
586 Heap* heap, TransitionArray* transitions) {
587 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
589 if (transitions->HasPrototypeTransitions()) {
590 StaticVisitor::VisitPointer(heap, transitions,
591 transitions->GetPrototypeTransitionsSlot());
594 int num_transitions = TransitionArray::NumberOfTransitions(transitions);
595 for (int i = 0; i < num_transitions; ++i) {
596 StaticVisitor::VisitPointer(heap, transitions, transitions->GetKeySlot(i));
601 template <typename StaticVisitor>
602 void StaticMarkingVisitor<StaticVisitor>::MarkOptimizedCodeMap(
603 Heap* heap, FixedArray* code_map) {
604 if (!StaticVisitor::MarkObjectWithoutPush(heap, code_map)) return;
606 // Mark the context-independent entry in the optimized code map. Depending on
607 // the age of the code object, we treat it as a strong or a weak reference.
608 Object* shared_object = code_map->get(SharedFunctionInfo::kSharedCodeIndex);
609 if (FLAG_turbo_preserve_shared_code && shared_object->IsCode() &&
610 FLAG_age_code && !Code::cast(shared_object)->IsOld()) {
611 StaticVisitor::VisitPointer(
613 code_map->RawFieldOfElementAt(SharedFunctionInfo::kSharedCodeIndex));
618 template <typename StaticVisitor>
619 void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(Heap* heap,
621 // For optimized functions we should retain both non-optimized version
622 // of its code and non-optimized version of all inlined functions.
623 // This is required to support bailing out from inlined code.
624 DeoptimizationInputData* const data =
625 DeoptimizationInputData::cast(code->deoptimization_data());
626 FixedArray* const literals = data->LiteralArray();
627 int const inlined_count = data->InlinedFunctionCount()->value();
628 for (int i = 0; i < inlined_count; ++i) {
629 StaticVisitor::MarkObject(
630 heap, SharedFunctionInfo::cast(literals->get(i))->code());
635 inline static bool IsValidNonBuiltinContext(Object* context) {
636 return context->IsContext() &&
637 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
641 inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
642 Object* undefined = heap->undefined_value();
643 return (info->script() != undefined) &&
644 (reinterpret_cast<Script*>(info->script())->source() != undefined);
648 template <typename StaticVisitor>
649 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
650 JSFunction* function) {
651 SharedFunctionInfo* shared_info = function->shared();
653 // Code is either on stack, in compilation cache or referenced
654 // by optimized version of function.
655 MarkBit code_mark = Marking::MarkBitFrom(function->code());
656 if (Marking::IsBlackOrGrey(code_mark)) {
660 // The function must have a valid context and not be a builtin.
661 if (!IsValidNonBuiltinContext(function->context())) {
665 // We do not (yet) flush code for optimized functions.
666 if (function->code() != shared_info->code()) {
670 // Check age of optimized code.
671 if (FLAG_age_code && !function->code()->IsOld()) {
675 return IsFlushable(heap, shared_info);
679 template <typename StaticVisitor>
680 bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
681 Heap* heap, SharedFunctionInfo* shared_info) {
682 // Code is either on stack, in compilation cache or referenced
683 // by optimized version of function.
684 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
685 if (Marking::IsBlackOrGrey(code_mark)) {
689 // The function must be compiled and have the source code available,
690 // to be able to recompile it in case we need the function again.
691 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
695 // We never flush code for API functions.
696 Object* function_data = shared_info->function_data();
697 if (function_data->IsFunctionTemplateInfo()) {
701 // Only flush code for functions.
702 if (shared_info->code()->kind() != Code::FUNCTION) {
706 // Function must be lazy compilable.
707 if (!shared_info->allows_lazy_compilation()) {
711 // We do not (yet?) flush code for generator functions, because we don't know
712 // if there are still live activations (generator objects) on the heap.
713 if (shared_info->is_generator()) {
717 // If this is a full script wrapped in a function we do not flush the code.
718 if (shared_info->is_toplevel()) {
722 // If this is a function initialized with %SetCode then the one-to-one
723 // relation between SharedFunctionInfo and Code is broken.
724 if (shared_info->dont_flush()) {
728 // Check age of code. If code aging is disabled we never flush.
729 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
737 template <typename StaticVisitor>
738 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
739 Heap* heap, HeapObject* object) {
740 Object** start_slot = HeapObject::RawField(
741 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
742 Object** end_slot = HeapObject::RawField(
743 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
744 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
748 template <typename StaticVisitor>
749 void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
750 Heap* heap, HeapObject* object) {
752 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
753 StaticVisitor::VisitPointer(heap, object, name_slot);
755 // Skip visiting kCodeOffset as it is treated weakly here.
756 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
757 SharedFunctionInfo::kCodeOffset);
758 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
759 SharedFunctionInfo::kOptimizedCodeMapOffset);
761 Object** start_slot =
762 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
763 Object** end_slot = HeapObject::RawField(
764 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
765 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
769 template <typename StaticVisitor>
770 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
771 Heap* heap, HeapObject* object) {
772 Object** start_slot =
773 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
775 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
776 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
778 VisitCodeEntry(heap, object,
779 object->address() + JSFunction::kCodeEntryOffset);
780 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
781 JSFunction::kPrototypeOrInitialMapOffset);
784 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
785 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
786 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
790 template <typename StaticVisitor>
791 void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
792 Heap* heap, HeapObject* object) {
793 Object** start_slot =
794 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
796 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
797 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
799 // Skip visiting kCodeEntryOffset as it is treated weakly here.
800 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
801 JSFunction::kPrototypeOrInitialMapOffset);
804 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
805 end_slot = HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
806 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
810 void Code::CodeIterateBody(ObjectVisitor* v) {
811 int mode_mask = RelocInfo::kCodeTargetMask |
812 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
813 RelocInfo::ModeMask(RelocInfo::CELL) |
814 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
815 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
816 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
817 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
818 RelocInfo::kDebugBreakSlotMask;
820 // There are two places where we iterate code bodies: here and the
821 // templated CodeIterateBody (below). They should be kept in sync.
822 IteratePointer(v, kRelocationInfoOffset);
823 IteratePointer(v, kHandlerTableOffset);
824 IteratePointer(v, kDeoptimizationDataOffset);
825 IteratePointer(v, kTypeFeedbackInfoOffset);
826 IterateNextCodeLink(v, kNextCodeLinkOffset);
828 RelocIterator it(this, mode_mask);
829 Isolate* isolate = this->GetIsolate();
830 for (; !it.done(); it.next()) {
831 it.rinfo()->Visit(isolate, v);
836 template <typename StaticVisitor>
837 void Code::CodeIterateBody(Heap* heap) {
838 int mode_mask = RelocInfo::kCodeTargetMask |
839 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
840 RelocInfo::ModeMask(RelocInfo::CELL) |
841 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
842 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
843 RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
844 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY) |
845 RelocInfo::kDebugBreakSlotMask;
847 // There are two places where we iterate code bodies: here and the non-
848 // templated CodeIterateBody (above). They should be kept in sync.
849 StaticVisitor::VisitPointer(
851 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
852 StaticVisitor::VisitPointer(
854 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
855 StaticVisitor::VisitPointer(
857 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
858 StaticVisitor::VisitPointer(
860 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
861 StaticVisitor::VisitNextCodeLink(
862 heap, reinterpret_cast<Object**>(this->address() + kNextCodeLinkOffset));
865 RelocIterator it(this, mode_mask);
866 for (; !it.done(); it.next()) {
867 it.rinfo()->template Visit<StaticVisitor>(heap);
871 } // namespace v8::internal
873 #endif // V8_OBJECTS_VISITING_INL_H_