1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
8 #include "src/allocation.h"
9 #include "src/layout-descriptor.h"
11 // This file provides base classes and auxiliary methods for defining
12 // static object visitors used during GC.
13 // Visiting HeapObject body with a normal ObjectVisitor requires performing
14 // two switches on object's instance type to determine object size and layout
15 // and one or more virtual method calls on visitor itself.
16 // Static visitor is different: it provides a dispatch table which contains
17 // pointers to specialized visit functions. Each map has the visitor_id
18 // field which contains an index of specialized visitor to use.
24 // Base class for all static visitors.
25 class StaticVisitorBase : public AllStatic {
27 #define VISITOR_ID_LIST(V) \
30 V(ShortcutCandidate) \
36 V(FixedFloat64Array) \
37 V(ConstantPoolArray) \
48 V(DataObjectGeneric) \
76 V(SharedFunctionInfo) \
84 // For data objects, JS objects and structs along with generic visitor which
85 // can visit object of any size we provide visitors specialized by
86 // object size in words.
87 // Ids of specialized visitors are declared in a linear order (without
88 // holes) starting from the id of visitor specialized for 2 words objects
89 // (base visitor id) and ending with the id of generic visitor.
90 // Method GetVisitorIdForSize depends on this ordering to calculate visitor
91 // id of specialized visitor from given instance size, base visitor id and
92 // generic visitor's id.
94 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
95 VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
96 #undef VISITOR_ID_ENUM_DECL
98 kVisitDataObject = kVisitDataObject2,
99 kVisitJSObject = kVisitJSObject2,
100 kVisitStruct = kVisitStruct2,
101 kMinObjectSizeInWords = 2
104 // Visitor ID should fit in one byte.
105 STATIC_ASSERT(kVisitorIdCount <= 256);
107 // Determine which specialized visitor should be used for given instance type
108 // and instance type.
109 static VisitorId GetVisitorId(int instance_type, int instance_size,
110 bool has_unboxed_fields);
112 // Determine which specialized visitor should be used for given map.
113 static VisitorId GetVisitorId(Map* map) {
114 return GetVisitorId(map->instance_type(), map->instance_size(),
115 FLAG_unbox_double_fields &&
116 !map->layout_descriptor()->IsFastPointerLayout());
119 // For visitors that allow specialization by size calculate VisitorId based
120 // on size, base visitor id and generic visitor id.
121 static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
123 bool has_unboxed_fields) {
124 DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
125 (base == kVisitJSObject));
126 DCHECK(IsAligned(object_size, kPointerSize));
127 DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
128 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
129 DCHECK(!has_unboxed_fields || (base == kVisitJSObject));
131 if (has_unboxed_fields) return generic;
133 const VisitorId specialization = static_cast<VisitorId>(
134 base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
136 return Min(specialization, generic);
141 template <typename Callback>
142 class VisitorDispatchTable {
144 void CopyFrom(VisitorDispatchTable* other) {
145 // We are not using memcpy to guarantee that during update
146 // every element of callbacks_ array will remain correct
147 // pointer (memcpy might be implemented as a byte copying loop).
148 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
149 base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
153 inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
154 return reinterpret_cast<Callback>(callbacks_[id]);
157 inline Callback GetVisitor(Map* map) {
158 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
161 void Register(StaticVisitorBase::VisitorId id, Callback callback) {
162 DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
163 callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
166 template <typename Visitor, StaticVisitorBase::VisitorId base,
167 StaticVisitorBase::VisitorId generic, int object_size_in_words>
168 void RegisterSpecialization() {
169 static const int size = object_size_in_words * kPointerSize;
170 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
171 &Visitor::template VisitSpecialized<size>);
175 template <typename Visitor, StaticVisitorBase::VisitorId base,
176 StaticVisitorBase::VisitorId generic>
177 void RegisterSpecializations() {
178 STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
180 RegisterSpecialization<Visitor, base, generic, 2>();
181 RegisterSpecialization<Visitor, base, generic, 3>();
182 RegisterSpecialization<Visitor, base, generic, 4>();
183 RegisterSpecialization<Visitor, base, generic, 5>();
184 RegisterSpecialization<Visitor, base, generic, 6>();
185 RegisterSpecialization<Visitor, base, generic, 7>();
186 RegisterSpecialization<Visitor, base, generic, 8>();
187 RegisterSpecialization<Visitor, base, generic, 9>();
188 Register(generic, &Visitor::Visit);
192 base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
196 template <typename StaticVisitor>
197 class BodyVisitorBase : public AllStatic {
199 INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
200 int start_offset, int end_offset)) {
201 DCHECK(!FLAG_unbox_double_fields ||
202 object->map()->layout_descriptor()->IsFastPointerLayout());
203 IterateRawPointers(heap, object, start_offset, end_offset);
206 INLINE(static void IterateBody(Heap* heap, HeapObject* object,
207 int start_offset, int end_offset)) {
208 if (!FLAG_unbox_double_fields ||
209 object->map()->layout_descriptor()->IsFastPointerLayout()) {
210 IterateRawPointers(heap, object, start_offset, end_offset);
212 IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
217 INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
218 int start_offset, int end_offset)) {
219 StaticVisitor::VisitPointers(heap,
220 HeapObject::RawField(object, start_offset),
221 HeapObject::RawField(object, end_offset));
224 static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
227 DCHECK(FLAG_unbox_double_fields);
228 DCHECK(IsAligned(start_offset, kPointerSize) &&
229 IsAligned(end_offset, kPointerSize));
231 InobjectPropertiesHelper helper(object->map());
232 DCHECK(!helper.all_fields_tagged());
234 for (int offset = start_offset; offset < end_offset;
235 offset += kPointerSize) {
236 // Visit tagged fields only.
237 if (helper.IsTagged(offset)) {
238 // TODO(ishell): call this once for contiguous region of tagged fields.
239 IterateRawPointers(heap, object, offset, offset + kPointerSize);
246 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
247 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
249 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
250 int object_size = BodyDescriptor::SizeOf(map, object);
251 BodyVisitorBase<StaticVisitor>::IterateBody(
252 map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
253 return static_cast<ReturnType>(object_size);
256 template <int object_size>
257 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
258 DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
259 BodyVisitorBase<StaticVisitor>::IteratePointers(
260 map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
261 return static_cast<ReturnType>(object_size);
266 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
267 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
269 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
270 BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
271 BodyDescriptor::kStartOffset,
272 BodyDescriptor::kEndOffset);
273 return static_cast<ReturnType>(BodyDescriptor::kSize);
278 // Base class for visitors used for a linear new space iteration.
279 // IterateBody returns size of visited object.
280 // Certain types of objects (i.e. Code objects) are not handled
281 // by dispatch table of this visitor because they cannot appear
284 // This class is intended to be used in the following way:
286 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
290 // This is an example of Curiously recurring template pattern
291 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
292 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
293 // inlining and specialization of StaticVisitor::VisitPointers methods).
294 template <typename StaticVisitor>
295 class StaticNewSpaceVisitor : public StaticVisitorBase {
297 static void Initialize();
299 INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
300 return table_.GetVisitor(map)(map, obj);
303 INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
304 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
308 INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
309 Heap* heap = map->GetHeap();
311 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
312 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
314 // Don't visit code entry. We are using this visitor only during scavenges.
317 heap, HeapObject::RawField(object,
318 JSFunction::kCodeEntryOffset + kPointerSize),
319 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
320 return JSFunction::kSize;
323 INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
324 return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
327 INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
328 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
329 return FixedDoubleArray::SizeFor(length);
332 INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
333 return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
336 INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
337 return JSObjectVisitor::Visit(map, object);
340 INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
341 return SeqOneByteString::cast(object)
342 ->SeqOneByteStringSize(map->instance_type());
345 INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
346 return SeqTwoByteString::cast(object)
347 ->SeqTwoByteStringSize(map->instance_type());
350 INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
351 return FreeSpace::cast(object)->Size();
354 INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
355 INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
356 INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
358 class DataObjectVisitor {
360 template <int object_size>
361 static inline int VisitSpecialized(Map* map, HeapObject* object) {
365 INLINE(static int Visit(Map* map, HeapObject* object)) {
366 return map->instance_size();
370 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
373 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
376 typedef int (*Callback)(Map* map, HeapObject* object);
378 static VisitorDispatchTable<Callback> table_;
382 template <typename StaticVisitor>
383 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
384 StaticNewSpaceVisitor<StaticVisitor>::table_;
387 // Base class for visitors used to transitively mark the entire heap.
388 // IterateBody returns nothing.
389 // Certain types of objects might not be handled by this base class and
390 // no visitor function is registered by the generic initialization. A
391 // specialized visitor function needs to be provided by the inheriting
392 // class itself for those cases.
394 // This class is intended to be used in the following way:
396 // class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
400 // This is an example of Curiously recurring template pattern.
401 template <typename StaticVisitor>
402 class StaticMarkingVisitor : public StaticVisitorBase {
404 static void Initialize();
406 INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
407 table_.GetVisitor(map)(map, obj);
410 INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
411 INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
412 INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
413 INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
414 INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
415 INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
416 INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
417 INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
418 INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
419 INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
420 // Skip the weak next code link in a code object.
421 INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
423 // TODO(mstarzinger): This should be made protected once refactoring is done.
424 // Mark non-optimize code for functions inlined into the given optimized
425 // code. This will prevent it from being flushed.
426 static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
429 INLINE(static void VisitMap(Map* map, HeapObject* object));
430 INLINE(static void VisitCode(Map* map, HeapObject* object));
431 INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
432 INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
433 INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
434 INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
435 INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
436 INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
437 INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
438 INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
439 INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
440 INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
442 // Mark pointers in a Map and its TransitionArray together, possibly
443 // treating transitions or back pointers weak.
444 static void MarkMapContents(Heap* heap, Map* map);
445 static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
447 // Code flushing support.
448 INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
449 INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
451 // Helpers used by code flushing support that visit pointer fields and treat
452 // references to code objects either strongly or weakly.
453 static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
454 static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
455 static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
456 static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
458 class DataObjectVisitor {
461 static inline void VisitSpecialized(Map* map, HeapObject* object) {}
463 INLINE(static void Visit(Map* map, HeapObject* object)) {}
466 typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
469 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
472 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
475 typedef void (*Callback)(Map* map, HeapObject* object);
477 static VisitorDispatchTable<Callback> table_;
481 template <typename StaticVisitor>
482 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
483 StaticMarkingVisitor<StaticVisitor>::table_;
486 class WeakObjectRetainer;
489 // A weak list is single linked list where each element has a weak pointer to
490 // the next element. Given the head of the list, this function removes dead
491 // elements from the list and if requested records slots for next-element
492 // pointers. The template parameter T is a WeakListVisitor that defines how to
493 // access the next-element pointers.
495 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
497 } // namespace v8::internal
499 #endif // V8_OBJECTS_VISITING_H_