1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
8 #include "src/allocation.h"
9 #include "src/heap/spaces.h"
10 #include "src/layout-descriptor.h"
12 // This file provides base classes and auxiliary methods for defining
13 // static object visitors used during GC.
14 // Visiting HeapObject body with a normal ObjectVisitor requires performing
15 // two switches on object's instance type to determine object size and layout
16 // and one or more virtual method calls on visitor itself.
17 // Static visitor is different: it provides a dispatch table which contains
18 // pointers to specialized visit functions. Each map has the visitor_id
19 // field which contains an index of specialized visitor to use.
25 // Base class for all static visitors.
26 class StaticVisitorBase : public AllStatic {
28 #define VISITOR_ID_LIST(V) \
31 V(ShortcutCandidate) \
38 V(FixedFloat64Array) \
49 V(DataObjectGeneric) \
77 V(SharedFunctionInfo) \
85 // For data objects, JS objects and structs along with generic visitor which
86 // can visit object of any size we provide visitors specialized by
87 // object size in words.
88 // Ids of specialized visitors are declared in a linear order (without
89 // holes) starting from the id of visitor specialized for 2 words objects
90 // (base visitor id) and ending with the id of generic visitor.
91 // Method GetVisitorIdForSize depends on this ordering to calculate visitor
92 // id of specialized visitor from given instance size, base visitor id and
93 // generic visitor's id.
95 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
96 VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
97 #undef VISITOR_ID_ENUM_DECL
99 kVisitDataObject = kVisitDataObject2,
100 kVisitJSObject = kVisitJSObject2,
101 kVisitStruct = kVisitStruct2,
102 kMinObjectSizeInWords = 2
105 // Visitor ID should fit in one byte.
106 STATIC_ASSERT(kVisitorIdCount <= 256);
108 // Determine which specialized visitor should be used for given instance type
109 // and instance type.
110 static VisitorId GetVisitorId(int instance_type, int instance_size,
111 bool has_unboxed_fields);
113 // Determine which specialized visitor should be used for given map.
114 static VisitorId GetVisitorId(Map* map);
116 // For visitors that allow specialization by size calculate VisitorId based
117 // on size, base visitor id and generic visitor id.
118 static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
120 bool has_unboxed_fields) {
121 DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
122 (base == kVisitJSObject));
123 DCHECK(IsAligned(object_size, kPointerSize));
124 DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
125 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
126 DCHECK(!has_unboxed_fields || (base == kVisitJSObject));
128 if (has_unboxed_fields) return generic;
131 Min(base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords,
132 static_cast<int>(generic));
134 return static_cast<VisitorId>(visitor_id);
139 template <typename Callback>
140 class VisitorDispatchTable {
142 void CopyFrom(VisitorDispatchTable* other) {
143 // We are not using memcpy to guarantee that during update
144 // every element of callbacks_ array will remain correct
145 // pointer (memcpy might be implemented as a byte copying loop).
146 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
147 base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
151 inline Callback GetVisitor(Map* map);
153 inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
154 return reinterpret_cast<Callback>(callbacks_[id]);
157 void Register(StaticVisitorBase::VisitorId id, Callback callback) {
158 DCHECK(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
159 callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
162 template <typename Visitor, StaticVisitorBase::VisitorId base,
163 StaticVisitorBase::VisitorId generic, int object_size_in_words>
164 void RegisterSpecialization() {
165 static const int size = object_size_in_words * kPointerSize;
166 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
167 &Visitor::template VisitSpecialized<size>);
171 template <typename Visitor, StaticVisitorBase::VisitorId base,
172 StaticVisitorBase::VisitorId generic>
173 void RegisterSpecializations() {
174 STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
176 RegisterSpecialization<Visitor, base, generic, 2>();
177 RegisterSpecialization<Visitor, base, generic, 3>();
178 RegisterSpecialization<Visitor, base, generic, 4>();
179 RegisterSpecialization<Visitor, base, generic, 5>();
180 RegisterSpecialization<Visitor, base, generic, 6>();
181 RegisterSpecialization<Visitor, base, generic, 7>();
182 RegisterSpecialization<Visitor, base, generic, 8>();
183 RegisterSpecialization<Visitor, base, generic, 9>();
184 Register(generic, &Visitor::Visit);
188 base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
192 template <typename StaticVisitor>
193 class BodyVisitorBase : public AllStatic {
195 INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
196 int start_offset, int end_offset)) {
197 DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
198 IterateRawPointers(heap, object, start_offset, end_offset);
201 INLINE(static void IterateBody(Heap* heap, HeapObject* object,
202 int start_offset, int end_offset)) {
203 if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
204 IterateRawPointers(heap, object, start_offset, end_offset);
206 IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
211 INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
212 int start_offset, int end_offset)) {
213 StaticVisitor::VisitPointers(heap, object,
214 HeapObject::RawField(object, start_offset),
215 HeapObject::RawField(object, end_offset));
218 static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
221 DCHECK(FLAG_unbox_double_fields);
222 DCHECK(IsAligned(start_offset, kPointerSize) &&
223 IsAligned(end_offset, kPointerSize));
225 LayoutDescriptorHelper helper(object->map());
226 DCHECK(!helper.all_fields_tagged());
227 for (int offset = start_offset; offset < end_offset;) {
228 int end_of_region_offset;
229 if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
230 IterateRawPointers(heap, object, offset, end_of_region_offset);
232 offset = end_of_region_offset;
238 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
239 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
241 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
242 int object_size = BodyDescriptor::SizeOf(map, object);
243 BodyVisitorBase<StaticVisitor>::IterateBody(
244 map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
245 return static_cast<ReturnType>(object_size);
248 template <int object_size>
249 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
250 DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
251 BodyVisitorBase<StaticVisitor>::IteratePointers(
252 map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
253 return static_cast<ReturnType>(object_size);
258 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
259 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
261 INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
262 BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
263 BodyDescriptor::kStartOffset,
264 BodyDescriptor::kEndOffset);
265 return static_cast<ReturnType>(BodyDescriptor::kSize);
270 // Base class for visitors used for a linear new space iteration.
271 // IterateBody returns size of visited object.
272 // Certain types of objects (i.e. Code objects) are not handled
273 // by dispatch table of this visitor because they cannot appear
276 // This class is intended to be used in the following way:
278 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
282 // This is an example of Curiously recurring template pattern
283 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
284 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
285 // inlining and specialization of StaticVisitor::VisitPointers methods).
286 template <typename StaticVisitor>
287 class StaticNewSpaceVisitor : public StaticVisitorBase {
289 static void Initialize();
291 INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
292 return table_.GetVisitor(map)(map, obj);
295 INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
296 Object** start, Object** end)) {
297 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
301 INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
302 Heap* heap = map->GetHeap();
303 VisitPointers(heap, object,
304 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
305 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
307 // Don't visit code entry. We are using this visitor only during scavenges.
310 heap, object, HeapObject::RawField(
311 object, JSFunction::kCodeEntryOffset + kPointerSize),
312 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
313 return JSFunction::kSize;
316 INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
317 return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
320 INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
321 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
322 return FixedDoubleArray::SizeFor(length);
325 INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
326 return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
329 INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
330 return JSObjectVisitor::Visit(map, object);
333 INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
334 return SeqOneByteString::cast(object)
335 ->SeqOneByteStringSize(map->instance_type());
338 INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
339 return SeqTwoByteString::cast(object)
340 ->SeqTwoByteStringSize(map->instance_type());
343 INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
344 return FreeSpace::cast(object)->Size();
347 INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
348 INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
349 INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
350 INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
352 class DataObjectVisitor {
354 template <int object_size>
355 static inline int VisitSpecialized(Map* map, HeapObject* object) {
359 INLINE(static int Visit(Map* map, HeapObject* object)) {
360 return map->instance_size();
364 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
367 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
370 typedef int (*Callback)(Map* map, HeapObject* object);
372 static VisitorDispatchTable<Callback> table_;
376 template <typename StaticVisitor>
377 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
378 StaticNewSpaceVisitor<StaticVisitor>::table_;
381 // Base class for visitors used to transitively mark the entire heap.
382 // IterateBody returns nothing.
383 // Certain types of objects might not be handled by this base class and
384 // no visitor function is registered by the generic initialization. A
385 // specialized visitor function needs to be provided by the inheriting
386 // class itself for those cases.
388 // This class is intended to be used in the following way:
390 // class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
394 // This is an example of Curiously recurring template pattern.
395 template <typename StaticVisitor>
396 class StaticMarkingVisitor : public StaticVisitorBase {
398 static void Initialize();
400 INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
401 table_.GetVisitor(map)(map, obj);
404 INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
405 INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
406 INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
407 Address entry_address));
408 INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
409 INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
410 INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
411 INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
412 INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
413 INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
414 INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {}
415 INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
416 // Skip the weak next code link in a code object.
417 INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
419 // Mark non-optimize code for functions inlined into the given optimized
420 // code. This will prevent it from being flushed.
421 static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
424 INLINE(static void VisitMap(Map* map, HeapObject* object));
425 INLINE(static void VisitCode(Map* map, HeapObject* object));
426 INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
427 INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
428 INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
429 INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
430 INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
431 INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
432 INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
433 INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
434 INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
435 INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
437 // Mark pointers in a Map and its TransitionArray together, possibly
438 // treating transitions or back pointers weak.
439 static void MarkMapContents(Heap* heap, Map* map);
440 static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
442 // Mark pointers in the optimized code map that should act as strong
443 // references, possibly treating some entries weak.
444 static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
446 // Code flushing support.
447 INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
448 INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
450 // Helpers used by code flushing support that visit pointer fields and treat
451 // references to code objects either strongly or weakly.
452 static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
453 static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
454 static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
455 static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
457 class DataObjectVisitor {
460 static inline void VisitSpecialized(Map* map, HeapObject* object) {}
462 INLINE(static void Visit(Map* map, HeapObject* object)) {}
465 typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
468 typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
471 typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
474 typedef void (*Callback)(Map* map, HeapObject* object);
476 static VisitorDispatchTable<Callback> table_;
480 template <typename StaticVisitor>
481 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
482 StaticMarkingVisitor<StaticVisitor>::table_;
485 class WeakObjectRetainer;
488 // A weak list is single linked list where each element has a weak pointer to
489 // the next element. Given the head of the list, this function removes dead
490 // elements from the list and if requested records slots for next-element
491 // pointers. The template parameter T is a WeakListVisitor that defines how to
492 // access the next-element pointers.
494 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
496 } // namespace v8::internal
498 #endif // V8_OBJECTS_VISITING_H_