1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
8 #include "src/allocation.h"
9 #include "src/heap/heap.h"
10 #include "src/heap/spaces.h"
11 #include "src/layout-descriptor.h"
13 // This file provides base classes and auxiliary methods for defining
14 // static object visitors used during GC.
15 // Visiting HeapObject body with a normal ObjectVisitor requires performing
16 // two switches on object's instance type to determine object size and layout
17 // and one or more virtual method calls on visitor itself.
18 // Static visitor is different: it provides a dispatch table which contains
19 // pointers to specialized visit functions. Each map has the visitor_id
20 // field which contains an index of specialized visitor to use.
26 // Base class for all static visitors.
27 class StaticVisitorBase : public AllStatic {
29 #define VISITOR_ID_LIST(V) \
32 V(ShortcutCandidate) \
39 V(FixedFloat64Array) \
50 V(DataObjectGeneric) \
78 V(SharedFunctionInfo) \
86 // For data objects, JS objects and structs along with generic visitor which
87 // can visit object of any size we provide visitors specialized by
88 // object size in words.
89 // Ids of specialized visitors are declared in a linear order (without
90 // holes) starting from the id of visitor specialized for 2 words objects
91 // (base visitor id) and ending with the id of generic visitor.
92 // Method GetVisitorIdForSize depends on this ordering to calculate visitor
93 // id of specialized visitor from given instance size, base visitor id and
94 // generic visitor's id.
96 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
97 VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
98 #undef VISITOR_ID_ENUM_DECL
100 kVisitDataObject = kVisitDataObject2,
101 kVisitJSObject = kVisitJSObject2,
102 kVisitStruct = kVisitStruct2,
105 // Visitor ID should fit in one byte.
106 STATIC_ASSERT(kVisitorIdCount <= 256);
108 // Determine which specialized visitor should be used for given instance type
109 // and instance type.
110 static VisitorId GetVisitorId(int instance_type, int instance_size,
111 bool has_unboxed_fields);
113 // Determine which specialized visitor should be used for given map.
114 static VisitorId GetVisitorId(Map* map);
116 // For visitors that allow specialization by size calculate VisitorId based
117 // on size, base visitor id and generic visitor id.
118 static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
120 bool has_unboxed_fields) {
121 DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
122 (base == kVisitJSObject));
123 DCHECK(IsAligned(object_size, kPointerSize));
124 DCHECK(Heap::kMinObjectSizeInWords * kPointerSize <= object_size);
125 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
126 DCHECK(!has_unboxed_fields || (base == kVisitJSObject));
128 if (has_unboxed_fields) return generic;
130 int visitor_id = Min(
131 base + (object_size >> kPointerSizeLog2) - Heap::kMinObjectSizeInWords,
132 static_cast<int>(generic));
134 return static_cast<VisitorId>(visitor_id);
139 template <typename Callback>
140 class VisitorDispatchTable {
142 void CopyFrom(VisitorDispatchTable* other) {
143 // We are not using memcpy to guarantee that during update
144 // every element of callbacks_ array will remain correct
145 // pointer (memcpy might be implemented as a byte copying loop).
146 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
147 base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
151 inline Callback GetVisitor(Map* map);
153 inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
154 return reinterpret_cast<Callback>(callbacks_[id]);
157 void Register(StaticVisitorBase::VisitorId id, Callback callback) {
158 DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
159 callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
162 template <typename Visitor, StaticVisitorBase::VisitorId base,
163 StaticVisitorBase::VisitorId generic, int object_size_in_words>
164 void RegisterSpecialization() {
165 static const int size = object_size_in_words * kPointerSize;
166 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
167 &Visitor::template VisitSpecialized<size>);
171 template <typename Visitor, StaticVisitorBase::VisitorId base,
172 StaticVisitorBase::VisitorId generic>
173 void RegisterSpecializations() {
174 STATIC_ASSERT((generic - base + Heap::kMinObjectSizeInWords) == 10);
175 RegisterSpecialization<Visitor, base, generic, 2>();
176 RegisterSpecialization<Visitor, base, generic, 3>();
177 RegisterSpecialization<Visitor, base, generic, 4>();
178 RegisterSpecialization<Visitor, base, generic, 5>();
179 RegisterSpecialization<Visitor, base, generic, 6>();
180 RegisterSpecialization<Visitor, base, generic, 7>();
181 RegisterSpecialization<Visitor, base, generic, 8>();
182 RegisterSpecialization<Visitor, base, generic, 9>();
183 Register(generic, &Visitor::Visit);
187 base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
191 template <typename StaticVisitor>
192 class BodyVisitorBase : public AllStatic {
194 INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
195 int start_offset, int end_offset)) {
196 DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
197 IterateRawPointers(heap, object, start_offset, end_offset);
200 INLINE(static void IterateBody(Heap* heap, HeapObject* object,
201 int start_offset, int end_offset)) {
202 if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
203 IterateRawPointers(heap, object, start_offset, end_offset);
205 IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
210 INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
211 int start_offset, int end_offset)) {
212 StaticVisitor::VisitPointers(heap, object,
213 HeapObject::RawField(object, start_offset),
214 HeapObject::RawField(object, end_offset));
217 static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
220 DCHECK(FLAG_unbox_double_fields);
221 DCHECK(IsAligned(start_offset, kPointerSize) &&
222 IsAligned(end_offset, kPointerSize));
224 LayoutDescriptorHelper helper(object->map());
225 DCHECK(!helper.all_fields_tagged());
226 for (int offset = start_offset; offset < end_offset;) {
227 int end_of_region_offset;
228 if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
229 IterateRawPointers(heap, object, offset, end_of_region_offset);
231 offset = end_of_region_offset;
237 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
238 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
240 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
241 int object_size = BodyDescriptor::SizeOf(map, object);
242 BodyVisitorBase<StaticVisitor>::IterateBody(
243 map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
244 return static_cast<ReturnType>(object_size);
247 template <int object_size>
248 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
249 DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
250 BodyVisitorBase<StaticVisitor>::IteratePointers(
251 map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
252 return static_cast<ReturnType>(object_size);
257 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
258 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
260 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
261 BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
262 BodyDescriptor::kStartOffset,
263 BodyDescriptor::kEndOffset);
264 return static_cast<ReturnType>(BodyDescriptor::kSize);
269 // Base class for visitors used for a linear new space iteration.
270 // IterateBody returns size of visited object.
271 // Certain types of objects (i.e. Code objects) are not handled
272 // by dispatch table of this visitor because they cannot appear
275 // This class is intended to be used in the following way:
277 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
281 // This is an example of Curiously recurring template pattern
282 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
283 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
284 // inlining and specialization of StaticVisitor::VisitPointers methods).
285 template <typename StaticVisitor>
286 class StaticNewSpaceVisitor : public StaticVisitorBase {
288 static void Initialize();
290 INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
291 return table_.GetVisitor(map)(map, obj);
294 INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
295 Object** start, Object** end)) {
296 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
300 INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
301 Heap* heap = map->GetHeap();
302 VisitPointers(heap, object,
303 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
304 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
306 // Don't visit code entry. We are using this visitor only during scavenges.
309 heap, object, HeapObject::RawField(
310 object, JSFunction::kCodeEntryOffset + kPointerSize),
311 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
312 return JSFunction::kSize;
315 INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
316 return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
319 INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
320 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
321 return FixedDoubleArray::SizeFor(length);
324 INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
325 return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
328 INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
329 return JSObjectVisitor::Visit(map, object);
332 INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
333 return SeqOneByteString::cast(object)
334 ->SeqOneByteStringSize(map->instance_type());
337 INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
338 return SeqTwoByteString::cast(object)
339 ->SeqTwoByteStringSize(map->instance_type());
342 INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
343 return FreeSpace::cast(object)->Size();
346 INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
347 INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
348 INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
349 INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
351 class DataObjectVisitor {
353 template <int object_size>
354 static inline int VisitSpecialized(Map* map, HeapObject* object) {
358 INLINE(static int Visit(Map* map, HeapObject* object)) {
359 return map->instance_size();
363 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
366 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
369 typedef int (*Callback)(Map* map, HeapObject* object);
371 static VisitorDispatchTable<Callback> table_;
375 template <typename StaticVisitor>
376 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
377 StaticNewSpaceVisitor<StaticVisitor>::table_;
380 // Base class for visitors used to transitively mark the entire heap.
381 // IterateBody returns nothing.
382 // Certain types of objects might not be handled by this base class and
383 // no visitor function is registered by the generic initialization. A
384 // specialized visitor function needs to be provided by the inheriting
385 // class itself for those cases.
387 // This class is intended to be used in the following way:
389 // class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
393 // This is an example of Curiously recurring template pattern.
394 template <typename StaticVisitor>
395 class StaticMarkingVisitor : public StaticVisitorBase {
397 static void Initialize();
399 INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
400 table_.GetVisitor(map)(map, obj);
403 INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
404 INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
405 INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
406 Address entry_address));
407 INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
408 INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
409 INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
410 INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
411 INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
412 INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
413 INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {}
414 INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
415 // Skip the weak next code link in a code object.
416 INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
418 // Mark non-optimize code for functions inlined into the given optimized
419 // code. This will prevent it from being flushed.
420 static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
423 INLINE(static void VisitMap(Map* map, HeapObject* object));
424 INLINE(static void VisitCode(Map* map, HeapObject* object));
425 INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
426 INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
427 INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
428 INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
429 INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
430 INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
431 INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
432 INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
433 INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
434 INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
436 // Mark pointers in a Map and its TransitionArray together, possibly
437 // treating transitions or back pointers weak.
438 static void MarkMapContents(Heap* heap, Map* map);
439 static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
441 // Mark pointers in the optimized code map that should act as strong
442 // references, possibly treating some entries weak.
443 static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
445 // Code flushing support.
446 INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
447 INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
449 // Helpers used by code flushing support that visit pointer fields and treat
450 // references to code objects either strongly or weakly.
451 static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
452 static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
453 static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
454 static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
456 class DataObjectVisitor {
459 static inline void VisitSpecialized(Map* map, HeapObject* object) {}
461 INLINE(static void Visit(Map* map, HeapObject* object)) {}
464 typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
467 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
470 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
473 typedef void (*Callback)(Map* map, HeapObject* object);
475 static VisitorDispatchTable<Callback> table_;
479 template <typename StaticVisitor>
480 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
481 StaticMarkingVisitor<StaticVisitor>::table_;
484 class WeakObjectRetainer;
487 // A weak list is single linked list where each element has a weak pointer to
488 // the next element. Given the head of the list, this function removes dead
489 // elements from the list and if requested records slots for next-element
490 // pointers. The template parameter T is a WeakListVisitor that defines how to
491 // access the next-element pointers.
493 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
495 } // namespace v8::internal
497 #endif // V8_OBJECTS_VISITING_H_