1eba88731b38acfc643708be2c0b82ed3f1d3fac
[platform/upstream/v8.git] / src / heap / objects-visiting.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
7
8 #include "src/allocation.h"
9 #include "src/heap/heap.h"
10 #include "src/heap/spaces.h"
11 #include "src/layout-descriptor.h"
12
13 // This file provides base classes and auxiliary methods for defining
14 // static object visitors used during GC.
15 // Visiting HeapObject body with a normal ObjectVisitor requires performing
16 // two switches on object's instance type to determine object size and layout
17 // and one or more virtual method calls on visitor itself.
18 // Static visitor is different: it provides a dispatch table which contains
19 // pointers to specialized visit functions. Each map has the visitor_id
20 // field which contains an index of specialized visitor to use.
21
22 namespace v8 {
23 namespace internal {
24
25
26 // Base class for all static visitors.
27 class StaticVisitorBase : public AllStatic {
28  public:
29 #define VISITOR_ID_LIST(V) \
30   V(SeqOneByteString)      \
31   V(SeqTwoByteString)      \
32   V(ShortcutCandidate)     \
33   V(ByteArray)             \
34   V(BytecodeArray)         \
35   V(FreeSpace)             \
36   V(FixedArray)            \
37   V(FixedDoubleArray)      \
38   V(FixedTypedArray)       \
39   V(FixedFloat64Array)     \
40   V(NativeContext)         \
41   V(AllocationSite)        \
42   V(DataObject2)           \
43   V(DataObject3)           \
44   V(DataObject4)           \
45   V(DataObject5)           \
46   V(DataObject6)           \
47   V(DataObject7)           \
48   V(DataObject8)           \
49   V(DataObject9)           \
50   V(DataObjectGeneric)     \
51   V(JSObject2)             \
52   V(JSObject3)             \
53   V(JSObject4)             \
54   V(JSObject5)             \
55   V(JSObject6)             \
56   V(JSObject7)             \
57   V(JSObject8)             \
58   V(JSObject9)             \
59   V(JSObjectGeneric)       \
60   V(Struct2)               \
61   V(Struct3)               \
62   V(Struct4)               \
63   V(Struct5)               \
64   V(Struct6)               \
65   V(Struct7)               \
66   V(Struct8)               \
67   V(Struct9)               \
68   V(StructGeneric)         \
69   V(ConsString)            \
70   V(SlicedString)          \
71   V(Symbol)                \
72   V(Oddball)               \
73   V(Code)                  \
74   V(Map)                   \
75   V(Cell)                  \
76   V(PropertyCell)          \
77   V(WeakCell)              \
78   V(SharedFunctionInfo)    \
79   V(JSFunction)            \
80   V(JSWeakCollection)      \
81   V(JSArrayBuffer)         \
82   V(JSTypedArray)          \
83   V(JSDataView)            \
84   V(JSRegExp)
85
86   // For data objects, JS objects and structs along with generic visitor which
87   // can visit object of any size we provide visitors specialized by
88   // object size in words.
89   // Ids of specialized visitors are declared in a linear order (without
90   // holes) starting from the id of visitor specialized for 2 words objects
91   // (base visitor id) and ending with the id of generic visitor.
92   // Method GetVisitorIdForSize depends on this ordering to calculate visitor
93   // id of specialized visitor from given instance size, base visitor id and
94   // generic visitor's id.
95   enum VisitorId {
96 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
97     VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
98 #undef VISITOR_ID_ENUM_DECL
99     kVisitorIdCount,
100     kVisitDataObject = kVisitDataObject2,
101     kVisitJSObject = kVisitJSObject2,
102     kVisitStruct = kVisitStruct2,
103   };
104
105   // Visitor ID should fit in one byte.
106   STATIC_ASSERT(kVisitorIdCount <= 256);
107
108   // Determine which specialized visitor should be used for given instance type
109   // and instance type.
110   static VisitorId GetVisitorId(int instance_type, int instance_size,
111                                 bool has_unboxed_fields);
112
113   // Determine which specialized visitor should be used for given map.
114   static VisitorId GetVisitorId(Map* map);
115
116   // For visitors that allow specialization by size calculate VisitorId based
117   // on size, base visitor id and generic visitor id.
118   static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
119                                        int object_size,
120                                        bool has_unboxed_fields) {
121     DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
122            (base == kVisitJSObject));
123     DCHECK(IsAligned(object_size, kPointerSize));
124     DCHECK(Heap::kMinObjectSizeInWords * kPointerSize <= object_size);
125     DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
126     DCHECK(!has_unboxed_fields || (base == kVisitJSObject));
127
128     if (has_unboxed_fields) return generic;
129
130     int visitor_id = Min(
131         base + (object_size >> kPointerSizeLog2) - Heap::kMinObjectSizeInWords,
132         static_cast<int>(generic));
133
134     return static_cast<VisitorId>(visitor_id);
135   }
136 };
137
138
139 template <typename Callback>
140 class VisitorDispatchTable {
141  public:
142   void CopyFrom(VisitorDispatchTable* other) {
143     // We are not using memcpy to guarantee that during update
144     // every element of callbacks_ array will remain correct
145     // pointer (memcpy might be implemented as a byte copying loop).
146     for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
147       base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
148     }
149   }
150
151   inline Callback GetVisitor(Map* map);
152
153   inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
154     return reinterpret_cast<Callback>(callbacks_[id]);
155   }
156
157   void Register(StaticVisitorBase::VisitorId id, Callback callback) {
158     DCHECK(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
159     callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
160   }
161
162   template <typename Visitor, StaticVisitorBase::VisitorId base,
163             StaticVisitorBase::VisitorId generic, int object_size_in_words>
164   void RegisterSpecialization() {
165     static const int size = object_size_in_words * kPointerSize;
166     Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
167              &Visitor::template VisitSpecialized<size>);
168   }
169
170
171   template <typename Visitor, StaticVisitorBase::VisitorId base,
172             StaticVisitorBase::VisitorId generic>
173   void RegisterSpecializations() {
174     STATIC_ASSERT((generic - base + Heap::kMinObjectSizeInWords) == 10);
175     RegisterSpecialization<Visitor, base, generic, 2>();
176     RegisterSpecialization<Visitor, base, generic, 3>();
177     RegisterSpecialization<Visitor, base, generic, 4>();
178     RegisterSpecialization<Visitor, base, generic, 5>();
179     RegisterSpecialization<Visitor, base, generic, 6>();
180     RegisterSpecialization<Visitor, base, generic, 7>();
181     RegisterSpecialization<Visitor, base, generic, 8>();
182     RegisterSpecialization<Visitor, base, generic, 9>();
183     Register(generic, &Visitor::Visit);
184   }
185
186  private:
187   base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
188 };
189
190
191 template <typename StaticVisitor>
192 class BodyVisitorBase : public AllStatic {
193  public:
194   INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
195                                      int start_offset, int end_offset)) {
196     DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
197     IterateRawPointers(heap, object, start_offset, end_offset);
198   }
199
200   INLINE(static void IterateBody(Heap* heap, HeapObject* object,
201                                  int start_offset, int end_offset)) {
202     if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
203       IterateRawPointers(heap, object, start_offset, end_offset);
204     } else {
205       IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
206     }
207   }
208
209  private:
210   INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
211                                         int start_offset, int end_offset)) {
212     StaticVisitor::VisitPointers(heap, object,
213                                  HeapObject::RawField(object, start_offset),
214                                  HeapObject::RawField(object, end_offset));
215   }
216
217   static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
218                                                int start_offset,
219                                                int end_offset) {
220     DCHECK(FLAG_unbox_double_fields);
221     DCHECK(IsAligned(start_offset, kPointerSize) &&
222            IsAligned(end_offset, kPointerSize));
223
224     LayoutDescriptorHelper helper(object->map());
225     DCHECK(!helper.all_fields_tagged());
226     for (int offset = start_offset; offset < end_offset;) {
227       int end_of_region_offset;
228       if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
229         IterateRawPointers(heap, object, offset, end_of_region_offset);
230       }
231       offset = end_of_region_offset;
232     }
233   }
234 };
235
236
237 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
238 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
239  public:
240   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
241     int object_size = BodyDescriptor::SizeOf(map, object);
242     BodyVisitorBase<StaticVisitor>::IterateBody(
243         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
244     return static_cast<ReturnType>(object_size);
245   }
246
247   template <int object_size>
248   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
249     DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
250     BodyVisitorBase<StaticVisitor>::IteratePointers(
251         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
252     return static_cast<ReturnType>(object_size);
253   }
254 };
255
256
257 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
258 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
259  public:
260   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
261     BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
262                                                 BodyDescriptor::kStartOffset,
263                                                 BodyDescriptor::kEndOffset);
264     return static_cast<ReturnType>(BodyDescriptor::kSize);
265   }
266 };
267
268
269 // Base class for visitors used for a linear new space iteration.
270 // IterateBody returns size of visited object.
271 // Certain types of objects (i.e. Code objects) are not handled
272 // by dispatch table of this visitor because they cannot appear
273 // in the new space.
274 //
275 // This class is intended to be used in the following way:
276 //
277 //   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
278 //     ...
279 //   }
280 //
281 // This is an example of Curiously recurring template pattern
282 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
283 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
284 // inlining and specialization of StaticVisitor::VisitPointers methods).
285 template <typename StaticVisitor>
286 class StaticNewSpaceVisitor : public StaticVisitorBase {
287  public:
288   static void Initialize();
289
290   INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
291     return table_.GetVisitor(map)(map, obj);
292   }
293
294   INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
295                                    Object** start, Object** end)) {
296     for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
297   }
298
299  private:
300   INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
301     Heap* heap = map->GetHeap();
302     VisitPointers(heap, object,
303                   HeapObject::RawField(object, JSFunction::kPropertiesOffset),
304                   HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
305
306     // Don't visit code entry. We are using this visitor only during scavenges.
307
308     VisitPointers(
309         heap, object, HeapObject::RawField(
310                           object, JSFunction::kCodeEntryOffset + kPointerSize),
311         HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
312     return JSFunction::kSize;
313   }
314
315   INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
316     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
317   }
318
319   INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
320     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
321     return FixedDoubleArray::SizeFor(length);
322   }
323
324   INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
325     return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
326   }
327
328   INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
329     return JSObjectVisitor::Visit(map, object);
330   }
331
332   INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
333     return SeqOneByteString::cast(object)
334         ->SeqOneByteStringSize(map->instance_type());
335   }
336
337   INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
338     return SeqTwoByteString::cast(object)
339         ->SeqTwoByteStringSize(map->instance_type());
340   }
341
342   INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
343     return FreeSpace::cast(object)->Size();
344   }
345
346   INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
347   INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
348   INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
349   INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
350
351   class DataObjectVisitor {
352    public:
353     template <int object_size>
354     static inline int VisitSpecialized(Map* map, HeapObject* object) {
355       return object_size;
356     }
357
358     INLINE(static int Visit(Map* map, HeapObject* object)) {
359       return map->instance_size();
360     }
361   };
362
363   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
364       StructVisitor;
365
366   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
367       JSObjectVisitor;
368
369   typedef int (*Callback)(Map* map, HeapObject* object);
370
371   static VisitorDispatchTable<Callback> table_;
372 };
373
374
375 template <typename StaticVisitor>
376 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
377     StaticNewSpaceVisitor<StaticVisitor>::table_;
378
379
380 // Base class for visitors used to transitively mark the entire heap.
381 // IterateBody returns nothing.
382 // Certain types of objects might not be handled by this base class and
383 // no visitor function is registered by the generic initialization. A
384 // specialized visitor function needs to be provided by the inheriting
385 // class itself for those cases.
386 //
387 // This class is intended to be used in the following way:
388 //
389 //   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
390 //     ...
391 //   }
392 //
393 // This is an example of Curiously recurring template pattern.
394 template <typename StaticVisitor>
395 class StaticMarkingVisitor : public StaticVisitorBase {
396  public:
397   static void Initialize();
398
399   INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
400     table_.GetVisitor(map)(map, obj);
401   }
402
403   INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
404   INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
405   INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
406                                     Address entry_address));
407   INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
408   INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
409   INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
410   INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
411   INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
412   INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
413   INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {}
414   INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
415   // Skip the weak next code link in a code object.
416   INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
417
418   // Mark non-optimize code for functions inlined into the given optimized
419   // code. This will prevent it from being flushed.
420   static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
421
422  protected:
423   INLINE(static void VisitMap(Map* map, HeapObject* object));
424   INLINE(static void VisitCode(Map* map, HeapObject* object));
425   INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
426   INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
427   INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
428   INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
429   INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
430   INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
431   INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
432   INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
433   INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
434   INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
435
436   // Mark pointers in a Map and its TransitionArray together, possibly
437   // treating transitions or back pointers weak.
438   static void MarkMapContents(Heap* heap, Map* map);
439   static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
440
441   // Mark pointers in the optimized code map that should act as strong
442   // references, possibly treating some entries weak.
443   static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
444
445   // Code flushing support.
446   INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
447   INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
448
449   // Helpers used by code flushing support that visit pointer fields and treat
450   // references to code objects either strongly or weakly.
451   static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
452   static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
453   static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
454   static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
455
456   class DataObjectVisitor {
457    public:
458     template <int size>
459     static inline void VisitSpecialized(Map* map, HeapObject* object) {}
460
461     INLINE(static void Visit(Map* map, HeapObject* object)) {}
462   };
463
464   typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
465       FixedArrayVisitor;
466
467   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
468       JSObjectVisitor;
469
470   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
471       StructObjectVisitor;
472
473   typedef void (*Callback)(Map* map, HeapObject* object);
474
475   static VisitorDispatchTable<Callback> table_;
476 };
477
478
479 template <typename StaticVisitor>
480 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
481     StaticMarkingVisitor<StaticVisitor>::table_;
482
483
484 class WeakObjectRetainer;
485
486
487 // A weak list is single linked list where each element has a weak pointer to
488 // the next element. Given the head of the list, this function removes dead
489 // elements from the list and if requested records slots for next-element
490 // pointers. The template parameter T is a WeakListVisitor that defines how to
491 // access the next-element pointers.
492 template <class T>
493 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
494 }
495 }  // namespace v8::internal
496
497 #endif  // V8_OBJECTS_VISITING_H_