Change RecordSlot interface. Make it more robust by replacing anchor slot with actual...
[platform/upstream/v8.git] / src / heap / objects-visiting.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
7
8 #include "src/allocation.h"
9 #include "src/layout-descriptor.h"
10
11 // This file provides base classes and auxiliary methods for defining
12 // static object visitors used during GC.
13 // Visiting HeapObject body with a normal ObjectVisitor requires performing
14 // two switches on object's instance type to determine object size and layout
15 // and one or more virtual method calls on visitor itself.
16 // Static visitor is different: it provides a dispatch table which contains
17 // pointers to specialized visit functions. Each map has the visitor_id
18 // field which contains an index of specialized visitor to use.
19
20 namespace v8 {
21 namespace internal {
22
23
24 // Base class for all static visitors.
25 class StaticVisitorBase : public AllStatic {
26  public:
27 #define VISITOR_ID_LIST(V) \
28   V(SeqOneByteString)      \
29   V(SeqTwoByteString)      \
30   V(ShortcutCandidate)     \
31   V(ByteArray)             \
32   V(BytecodeArray)         \
33   V(FreeSpace)             \
34   V(FixedArray)            \
35   V(FixedDoubleArray)      \
36   V(FixedTypedArray)       \
37   V(FixedFloat64Array)     \
38   V(NativeContext)         \
39   V(AllocationSite)        \
40   V(DataObject2)           \
41   V(DataObject3)           \
42   V(DataObject4)           \
43   V(DataObject5)           \
44   V(DataObject6)           \
45   V(DataObject7)           \
46   V(DataObject8)           \
47   V(DataObject9)           \
48   V(DataObjectGeneric)     \
49   V(JSObject2)             \
50   V(JSObject3)             \
51   V(JSObject4)             \
52   V(JSObject5)             \
53   V(JSObject6)             \
54   V(JSObject7)             \
55   V(JSObject8)             \
56   V(JSObject9)             \
57   V(JSObjectGeneric)       \
58   V(Struct2)               \
59   V(Struct3)               \
60   V(Struct4)               \
61   V(Struct5)               \
62   V(Struct6)               \
63   V(Struct7)               \
64   V(Struct8)               \
65   V(Struct9)               \
66   V(StructGeneric)         \
67   V(ConsString)            \
68   V(SlicedString)          \
69   V(Symbol)                \
70   V(Oddball)               \
71   V(Code)                  \
72   V(Map)                   \
73   V(Cell)                  \
74   V(PropertyCell)          \
75   V(WeakCell)              \
76   V(SharedFunctionInfo)    \
77   V(JSFunction)            \
78   V(JSWeakCollection)      \
79   V(JSArrayBuffer)         \
80   V(JSTypedArray)          \
81   V(JSDataView)            \
82   V(JSRegExp)
83
84   // For data objects, JS objects and structs along with generic visitor which
85   // can visit object of any size we provide visitors specialized by
86   // object size in words.
87   // Ids of specialized visitors are declared in a linear order (without
88   // holes) starting from the id of visitor specialized for 2 words objects
89   // (base visitor id) and ending with the id of generic visitor.
90   // Method GetVisitorIdForSize depends on this ordering to calculate visitor
91   // id of specialized visitor from given instance size, base visitor id and
92   // generic visitor's id.
93   enum VisitorId {
94 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
95     VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
96 #undef VISITOR_ID_ENUM_DECL
97     kVisitorIdCount,
98     kVisitDataObject = kVisitDataObject2,
99     kVisitJSObject = kVisitJSObject2,
100     kVisitStruct = kVisitStruct2,
101     kMinObjectSizeInWords = 2
102   };
103
104   // Visitor ID should fit in one byte.
105   STATIC_ASSERT(kVisitorIdCount <= 256);
106
107   // Determine which specialized visitor should be used for given instance type
108   // and instance type.
109   static VisitorId GetVisitorId(int instance_type, int instance_size,
110                                 bool has_unboxed_fields);
111
112   // Determine which specialized visitor should be used for given map.
113   static VisitorId GetVisitorId(Map* map) {
114     return GetVisitorId(
115         map->instance_type(), map->instance_size(),
116         FLAG_unbox_double_fields && !map->HasFastPointerLayout());
117   }
118
119   // For visitors that allow specialization by size calculate VisitorId based
120   // on size, base visitor id and generic visitor id.
121   static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
122                                        int object_size,
123                                        bool has_unboxed_fields) {
124     DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
125            (base == kVisitJSObject));
126     DCHECK(IsAligned(object_size, kPointerSize));
127     DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
128     DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
129     DCHECK(!has_unboxed_fields || (base == kVisitJSObject));
130
131     if (has_unboxed_fields) return generic;
132
133     int visitor_id =
134         Min(base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords,
135             static_cast<int>(generic));
136
137     return static_cast<VisitorId>(visitor_id);
138   }
139 };
140
141
142 template <typename Callback>
143 class VisitorDispatchTable {
144  public:
145   void CopyFrom(VisitorDispatchTable* other) {
146     // We are not using memcpy to guarantee that during update
147     // every element of callbacks_ array will remain correct
148     // pointer (memcpy might be implemented as a byte copying loop).
149     for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
150       base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
151     }
152   }
153
154   inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
155     return reinterpret_cast<Callback>(callbacks_[id]);
156   }
157
158   inline Callback GetVisitor(Map* map) {
159     return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
160   }
161
162   void Register(StaticVisitorBase::VisitorId id, Callback callback) {
163     DCHECK(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
164     callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
165   }
166
167   template <typename Visitor, StaticVisitorBase::VisitorId base,
168             StaticVisitorBase::VisitorId generic, int object_size_in_words>
169   void RegisterSpecialization() {
170     static const int size = object_size_in_words * kPointerSize;
171     Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
172              &Visitor::template VisitSpecialized<size>);
173   }
174
175
176   template <typename Visitor, StaticVisitorBase::VisitorId base,
177             StaticVisitorBase::VisitorId generic>
178   void RegisterSpecializations() {
179     STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
180                   10);
181     RegisterSpecialization<Visitor, base, generic, 2>();
182     RegisterSpecialization<Visitor, base, generic, 3>();
183     RegisterSpecialization<Visitor, base, generic, 4>();
184     RegisterSpecialization<Visitor, base, generic, 5>();
185     RegisterSpecialization<Visitor, base, generic, 6>();
186     RegisterSpecialization<Visitor, base, generic, 7>();
187     RegisterSpecialization<Visitor, base, generic, 8>();
188     RegisterSpecialization<Visitor, base, generic, 9>();
189     Register(generic, &Visitor::Visit);
190   }
191
192  private:
193   base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
194 };
195
196
197 template <typename StaticVisitor>
198 class BodyVisitorBase : public AllStatic {
199  public:
200   INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
201                                      int start_offset, int end_offset)) {
202     DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
203     IterateRawPointers(heap, object, start_offset, end_offset);
204   }
205
206   INLINE(static void IterateBody(Heap* heap, HeapObject* object,
207                                  int start_offset, int end_offset)) {
208     if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
209       IterateRawPointers(heap, object, start_offset, end_offset);
210     } else {
211       IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
212     }
213   }
214
215  private:
216   INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
217                                         int start_offset, int end_offset)) {
218     StaticVisitor::VisitPointers(heap, object,
219                                  HeapObject::RawField(object, start_offset),
220                                  HeapObject::RawField(object, end_offset));
221   }
222
223   static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
224                                                int start_offset,
225                                                int end_offset) {
226     DCHECK(FLAG_unbox_double_fields);
227     DCHECK(IsAligned(start_offset, kPointerSize) &&
228            IsAligned(end_offset, kPointerSize));
229
230     LayoutDescriptorHelper helper(object->map());
231     DCHECK(!helper.all_fields_tagged());
232     for (int offset = start_offset; offset < end_offset;) {
233       int end_of_region_offset;
234       if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
235         IterateRawPointers(heap, object, offset, end_of_region_offset);
236       }
237       offset = end_of_region_offset;
238     }
239   }
240 };
241
242
243 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
244 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
245  public:
246   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
247     int object_size = BodyDescriptor::SizeOf(map, object);
248     BodyVisitorBase<StaticVisitor>::IterateBody(
249         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
250     return static_cast<ReturnType>(object_size);
251   }
252
253   template <int object_size>
254   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
255     DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
256     BodyVisitorBase<StaticVisitor>::IteratePointers(
257         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
258     return static_cast<ReturnType>(object_size);
259   }
260 };
261
262
263 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
264 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
265  public:
266   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
267     BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
268                                                 BodyDescriptor::kStartOffset,
269                                                 BodyDescriptor::kEndOffset);
270     return static_cast<ReturnType>(BodyDescriptor::kSize);
271   }
272 };
273
274
275 // Base class for visitors used for a linear new space iteration.
276 // IterateBody returns size of visited object.
277 // Certain types of objects (i.e. Code objects) are not handled
278 // by dispatch table of this visitor because they cannot appear
279 // in the new space.
280 //
281 // This class is intended to be used in the following way:
282 //
283 //   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
284 //     ...
285 //   }
286 //
287 // This is an example of Curiously recurring template pattern
288 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
289 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
290 // inlining and specialization of StaticVisitor::VisitPointers methods).
291 template <typename StaticVisitor>
292 class StaticNewSpaceVisitor : public StaticVisitorBase {
293  public:
294   static void Initialize();
295
296   INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
297     return table_.GetVisitor(map)(map, obj);
298   }
299
300   INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
301                                    Object** start, Object** end)) {
302     for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
303   }
304
305  private:
306   INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
307     Heap* heap = map->GetHeap();
308     VisitPointers(heap, object,
309                   HeapObject::RawField(object, JSFunction::kPropertiesOffset),
310                   HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
311
312     // Don't visit code entry. We are using this visitor only during scavenges.
313
314     VisitPointers(
315         heap, object, HeapObject::RawField(
316                           object, JSFunction::kCodeEntryOffset + kPointerSize),
317         HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
318     return JSFunction::kSize;
319   }
320
321   INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
322     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
323   }
324
325   INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object)) {
326     return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
327   }
328
329   INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
330     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
331     return FixedDoubleArray::SizeFor(length);
332   }
333
334   INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
335     return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
336   }
337
338   INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
339     return JSObjectVisitor::Visit(map, object);
340   }
341
342   INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
343     return SeqOneByteString::cast(object)
344         ->SeqOneByteStringSize(map->instance_type());
345   }
346
347   INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
348     return SeqTwoByteString::cast(object)
349         ->SeqTwoByteStringSize(map->instance_type());
350   }
351
352   INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
353     return FreeSpace::cast(object)->Size();
354   }
355
356   INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
357   INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
358   INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
359
360   class DataObjectVisitor {
361    public:
362     template <int object_size>
363     static inline int VisitSpecialized(Map* map, HeapObject* object) {
364       return object_size;
365     }
366
367     INLINE(static int Visit(Map* map, HeapObject* object)) {
368       return map->instance_size();
369     }
370   };
371
372   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
373       StructVisitor;
374
375   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
376       JSObjectVisitor;
377
378   typedef int (*Callback)(Map* map, HeapObject* object);
379
380   static VisitorDispatchTable<Callback> table_;
381 };
382
383
384 template <typename StaticVisitor>
385 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
386     StaticNewSpaceVisitor<StaticVisitor>::table_;
387
388
389 // Base class for visitors used to transitively mark the entire heap.
390 // IterateBody returns nothing.
391 // Certain types of objects might not be handled by this base class and
392 // no visitor function is registered by the generic initialization. A
393 // specialized visitor function needs to be provided by the inheriting
394 // class itself for those cases.
395 //
396 // This class is intended to be used in the following way:
397 //
398 //   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
399 //     ...
400 //   }
401 //
402 // This is an example of Curiously recurring template pattern.
403 template <typename StaticVisitor>
404 class StaticMarkingVisitor : public StaticVisitorBase {
405  public:
406   static void Initialize();
407
408   INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
409     table_.GetVisitor(map)(map, obj);
410   }
411
412   INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
413   INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
414   INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
415                                     Address entry_address));
416   INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
417   INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
418   INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
419   INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
420   INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
421   INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
422   INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {}
423   INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
424   // Skip the weak next code link in a code object.
425   INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
426
427   // Mark non-optimize code for functions inlined into the given optimized
428   // code. This will prevent it from being flushed.
429   static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
430
431  protected:
432   INLINE(static void VisitMap(Map* map, HeapObject* object));
433   INLINE(static void VisitCode(Map* map, HeapObject* object));
434   INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
435   INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
436   INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
437   INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
438   INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
439   INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
440   INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
441   INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
442   INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
443
444   // Mark pointers in a Map and its TransitionArray together, possibly
445   // treating transitions or back pointers weak.
446   static void MarkMapContents(Heap* heap, Map* map);
447   static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
448
449   // Mark pointers in the optimized code map that should act as strong
450   // references, possibly treating some entries weak.
451   static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
452
453   // Code flushing support.
454   INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
455   INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
456
457   // Helpers used by code flushing support that visit pointer fields and treat
458   // references to code objects either strongly or weakly.
459   static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
460   static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
461   static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
462   static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
463
464   class DataObjectVisitor {
465    public:
466     template <int size>
467     static inline void VisitSpecialized(Map* map, HeapObject* object) {}
468
469     INLINE(static void Visit(Map* map, HeapObject* object)) {}
470   };
471
472   typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
473       FixedArrayVisitor;
474
475   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
476       JSObjectVisitor;
477
478   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
479       StructObjectVisitor;
480
481   typedef void (*Callback)(Map* map, HeapObject* object);
482
483   static VisitorDispatchTable<Callback> table_;
484 };
485
486
487 template <typename StaticVisitor>
488 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
489     StaticMarkingVisitor<StaticVisitor>::table_;
490
491
492 class WeakObjectRetainer;
493
494
495 // A weak list is single linked list where each element has a weak pointer to
496 // the next element. Given the head of the list, this function removes dead
497 // elements from the list and if requested records slots for next-element
498 // pointers. The template parameter T is a WeakListVisitor that defines how to
499 // access the next-element pointers.
500 template <class T>
501 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
502 }
503 }  // namespace v8::internal
504
505 #endif  // V8_OBJECTS_VISITING_H_