Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / objects-visiting.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
7
8 #include "allocation.h"
9
10 // This file provides base classes and auxiliary methods for defining
11 // static object visitors used during GC.
12 // Visiting HeapObject body with a normal ObjectVisitor requires performing
13 // two switches on object's instance type to determine object size and layout
14 // and one or more virtual method calls on visitor itself.
15 // Static visitor is different: it provides a dispatch table which contains
16 // pointers to specialized visit functions. Each map has the visitor_id
17 // field which contains an index of specialized visitor to use.
18
19 namespace v8 {
20 namespace internal {
21
22
23 // Base class for all static visitors.
24 class StaticVisitorBase : public AllStatic {
25  public:
26 #define VISITOR_ID_LIST(V)    \
27   V(SeqOneByteString)         \
28   V(SeqTwoByteString)         \
29   V(ShortcutCandidate)        \
30   V(ByteArray)                \
31   V(FreeSpace)                \
32   V(FixedArray)               \
33   V(FixedDoubleArray)         \
34   V(FixedTypedArray)          \
35   V(FixedFloat64Array)        \
36   V(ConstantPoolArray)        \
37   V(NativeContext)            \
38   V(AllocationSite)           \
39   V(DataObject2)              \
40   V(DataObject3)              \
41   V(DataObject4)              \
42   V(DataObject5)              \
43   V(DataObject6)              \
44   V(DataObject7)              \
45   V(DataObject8)              \
46   V(DataObject9)              \
47   V(DataObjectGeneric)        \
48   V(JSObject2)                \
49   V(JSObject3)                \
50   V(JSObject4)                \
51   V(JSObject5)                \
52   V(JSObject6)                \
53   V(JSObject7)                \
54   V(JSObject8)                \
55   V(JSObject9)                \
56   V(JSObjectGeneric)          \
57   V(Struct2)                  \
58   V(Struct3)                  \
59   V(Struct4)                  \
60   V(Struct5)                  \
61   V(Struct6)                  \
62   V(Struct7)                  \
63   V(Struct8)                  \
64   V(Struct9)                  \
65   V(StructGeneric)            \
66   V(ConsString)               \
67   V(SlicedString)             \
68   V(Symbol)                   \
69   V(Oddball)                  \
70   V(Code)                     \
71   V(Map)                      \
72   V(Cell)                     \
73   V(PropertyCell)             \
74   V(SharedFunctionInfo)       \
75   V(JSFunction)               \
76   V(JSWeakMap)                \
77   V(JSWeakSet)                \
78   V(JSArrayBuffer)            \
79   V(JSTypedArray)             \
80   V(JSDataView)               \
81   V(JSRegExp)
82
83   // For data objects, JS objects and structs along with generic visitor which
84   // can visit object of any size we provide visitors specialized by
85   // object size in words.
86   // Ids of specialized visitors are declared in a linear order (without
87   // holes) starting from the id of visitor specialized for 2 words objects
88   // (base visitor id) and ending with the id of generic visitor.
89   // Method GetVisitorIdForSize depends on this ordering to calculate visitor
90   // id of specialized visitor from given instance size, base visitor id and
91   // generic visitor's id.
92   enum VisitorId {
93 #define VISITOR_ID_ENUM_DECL(id)  kVisit##id,
94     VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
95 #undef VISITOR_ID_ENUM_DECL
96     kVisitorIdCount,
97     kVisitDataObject = kVisitDataObject2,
98     kVisitJSObject = kVisitJSObject2,
99     kVisitStruct = kVisitStruct2,
100     kMinObjectSizeInWords = 2
101   };
102
103   // Visitor ID should fit in one byte.
104   STATIC_ASSERT(kVisitorIdCount <= 256);
105
106   // Determine which specialized visitor should be used for given instance type
107   // and instance type.
108   static VisitorId GetVisitorId(int instance_type, int instance_size);
109
110   static VisitorId GetVisitorId(Map* map) {
111     return GetVisitorId(map->instance_type(), map->instance_size());
112   }
113
114   // For visitors that allow specialization by size calculate VisitorId based
115   // on size, base visitor id and generic visitor id.
116   static VisitorId GetVisitorIdForSize(VisitorId base,
117                                        VisitorId generic,
118                                        int object_size) {
119     ASSERT((base == kVisitDataObject) ||
120            (base == kVisitStruct) ||
121            (base == kVisitJSObject));
122     ASSERT(IsAligned(object_size, kPointerSize));
123     ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
124     ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
125
126     const VisitorId specialization = static_cast<VisitorId>(
127         base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
128
129     return Min(specialization, generic);
130   }
131 };
132
133
134 template<typename Callback>
135 class VisitorDispatchTable {
136  public:
137   void CopyFrom(VisitorDispatchTable* other) {
138     // We are not using memcpy to guarantee that during update
139     // every element of callbacks_ array will remain correct
140     // pointer (memcpy might be implemented as a byte copying loop).
141     for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
142       NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
143     }
144   }
145
146   inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
147     return reinterpret_cast<Callback>(callbacks_[id]);
148   }
149
150   inline Callback GetVisitor(Map* map) {
151     return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
152   }
153
154   void Register(StaticVisitorBase::VisitorId id, Callback callback) {
155     ASSERT(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
156     callbacks_[id] = reinterpret_cast<AtomicWord>(callback);
157   }
158
159   template<typename Visitor,
160            StaticVisitorBase::VisitorId base,
161            StaticVisitorBase::VisitorId generic,
162            int object_size_in_words>
163   void RegisterSpecialization() {
164     static const int size = object_size_in_words * kPointerSize;
165     Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
166              &Visitor::template VisitSpecialized<size>);
167   }
168
169
170   template<typename Visitor,
171            StaticVisitorBase::VisitorId base,
172            StaticVisitorBase::VisitorId generic>
173   void RegisterSpecializations() {
174     STATIC_ASSERT(
175         (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
176     RegisterSpecialization<Visitor, base, generic, 2>();
177     RegisterSpecialization<Visitor, base, generic, 3>();
178     RegisterSpecialization<Visitor, base, generic, 4>();
179     RegisterSpecialization<Visitor, base, generic, 5>();
180     RegisterSpecialization<Visitor, base, generic, 6>();
181     RegisterSpecialization<Visitor, base, generic, 7>();
182     RegisterSpecialization<Visitor, base, generic, 8>();
183     RegisterSpecialization<Visitor, base, generic, 9>();
184     Register(generic, &Visitor::Visit);
185   }
186
187  private:
188   AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
189 };
190
191
192 template<typename StaticVisitor>
193 class BodyVisitorBase : public AllStatic {
194  public:
195   INLINE(static void IteratePointers(Heap* heap,
196                                      HeapObject* object,
197                                      int start_offset,
198                                      int end_offset)) {
199     Object** start_slot = reinterpret_cast<Object**>(object->address() +
200                                                      start_offset);
201     Object** end_slot = reinterpret_cast<Object**>(object->address() +
202                                                    end_offset);
203     StaticVisitor::VisitPointers(heap, start_slot, end_slot);
204   }
205 };
206
207
208 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
209 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
210  public:
211   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
212     int object_size = BodyDescriptor::SizeOf(map, object);
213     BodyVisitorBase<StaticVisitor>::IteratePointers(
214         map->GetHeap(),
215         object,
216         BodyDescriptor::kStartOffset,
217         object_size);
218     return static_cast<ReturnType>(object_size);
219   }
220
221   template<int object_size>
222   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
223     ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
224     BodyVisitorBase<StaticVisitor>::IteratePointers(
225         map->GetHeap(),
226         object,
227         BodyDescriptor::kStartOffset,
228         object_size);
229     return static_cast<ReturnType>(object_size);
230   }
231 };
232
233
234 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
235 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
236  public:
237   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
238     BodyVisitorBase<StaticVisitor>::IteratePointers(
239         map->GetHeap(),
240         object,
241         BodyDescriptor::kStartOffset,
242         BodyDescriptor::kEndOffset);
243     return static_cast<ReturnType>(BodyDescriptor::kSize);
244   }
245 };
246
247
248 // Base class for visitors used for a linear new space iteration.
249 // IterateBody returns size of visited object.
250 // Certain types of objects (i.e. Code objects) are not handled
251 // by dispatch table of this visitor because they cannot appear
252 // in the new space.
253 //
254 // This class is intended to be used in the following way:
255 //
256 //   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
257 //     ...
258 //   }
259 //
260 // This is an example of Curiously recurring template pattern
261 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
262 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
263 // inlining and specialization of StaticVisitor::VisitPointers methods).
264 template<typename StaticVisitor>
265 class StaticNewSpaceVisitor : public StaticVisitorBase {
266  public:
267   static void Initialize();
268
269   INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
270     return table_.GetVisitor(map)(map, obj);
271   }
272
273   INLINE(static void VisitPointers(Heap* heap, Object** start, Object** end)) {
274     for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
275   }
276
277  private:
278   INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
279     Heap* heap = map->GetHeap();
280     VisitPointers(heap,
281                   HeapObject::RawField(object, JSFunction::kPropertiesOffset),
282                   HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
283
284     // Don't visit code entry. We are using this visitor only during scavenges.
285
286     VisitPointers(
287         heap,
288         HeapObject::RawField(object,
289                              JSFunction::kCodeEntryOffset + kPointerSize),
290         HeapObject::RawField(object,
291                              JSFunction::kNonWeakFieldsEndOffset));
292     return JSFunction::kSize;
293   }
294
295   INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
296     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
297   }
298
299   INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
300     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
301     return FixedDoubleArray::SizeFor(length);
302   }
303
304   INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
305     return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
306   }
307
308   INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
309     return JSObjectVisitor::Visit(map, object);
310   }
311
312   INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
313     return SeqOneByteString::cast(object)->
314         SeqOneByteStringSize(map->instance_type());
315   }
316
317   INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
318     return SeqTwoByteString::cast(object)->
319         SeqTwoByteStringSize(map->instance_type());
320   }
321
322   INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
323     return FreeSpace::cast(object)->Size();
324   }
325
326   INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
327   INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
328   INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
329
330   class DataObjectVisitor {
331    public:
332     template<int object_size>
333     static inline int VisitSpecialized(Map* map, HeapObject* object) {
334       return object_size;
335     }
336
337     INLINE(static int Visit(Map* map, HeapObject* object)) {
338       return map->instance_size();
339     }
340   };
341
342   typedef FlexibleBodyVisitor<StaticVisitor,
343                               StructBodyDescriptor,
344                               int> StructVisitor;
345
346   typedef FlexibleBodyVisitor<StaticVisitor,
347                               JSObject::BodyDescriptor,
348                               int> JSObjectVisitor;
349
350   typedef int (*Callback)(Map* map, HeapObject* object);
351
352   static VisitorDispatchTable<Callback> table_;
353 };
354
355
356 template<typename StaticVisitor>
357 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
358     StaticNewSpaceVisitor<StaticVisitor>::table_;
359
360
361 // Base class for visitors used to transitively mark the entire heap.
362 // IterateBody returns nothing.
363 // Certain types of objects might not be handled by this base class and
364 // no visitor function is registered by the generic initialization. A
365 // specialized visitor function needs to be provided by the inheriting
366 // class itself for those cases.
367 //
368 // This class is intended to be used in the following way:
369 //
370 //   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
371 //     ...
372 //   }
373 //
374 // This is an example of Curiously recurring template pattern.
375 template<typename StaticVisitor>
376 class StaticMarkingVisitor : public StaticVisitorBase {
377  public:
378   static void Initialize();
379
380   INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
381     table_.GetVisitor(map)(map, obj);
382   }
383
384   INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
385   INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
386   INLINE(static void VisitCodeEntry(Heap* heap, Address entry_address));
387   INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
388   INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
389   INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
390   INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
391   INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
392   INLINE(static void VisitExternalReference(RelocInfo* rinfo)) { }
393   INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) { }
394   // Skip the weak next code link in a code object.
395   INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) { }
396
397   // TODO(mstarzinger): This should be made protected once refactoring is done.
398   // Mark non-optimize code for functions inlined into the given optimized
399   // code. This will prevent it from being flushed.
400   static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
401
402  protected:
403   INLINE(static void VisitMap(Map* map, HeapObject* object));
404   INLINE(static void VisitCode(Map* map, HeapObject* object));
405   INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
406   INLINE(static void VisitConstantPoolArray(Map* map, HeapObject* object));
407   INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
408   INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
409   INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
410   INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
411   INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
412   INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
413
414   // Mark pointers in a Map and its TransitionArray together, possibly
415   // treating transitions or back pointers weak.
416   static void MarkMapContents(Heap* heap, Map* map);
417   static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
418
419   // Code flushing support.
420   INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
421   INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
422
423   // Helpers used by code flushing support that visit pointer fields and treat
424   // references to code objects either strongly or weakly.
425   static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
426   static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
427   static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
428   static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
429
430   class DataObjectVisitor {
431    public:
432     template<int size>
433     static inline void VisitSpecialized(Map* map, HeapObject* object) {
434     }
435
436     INLINE(static void Visit(Map* map, HeapObject* object)) {
437     }
438   };
439
440   typedef FlexibleBodyVisitor<StaticVisitor,
441                               FixedArray::BodyDescriptor,
442                               void> FixedArrayVisitor;
443
444   typedef FlexibleBodyVisitor<StaticVisitor,
445                               JSObject::BodyDescriptor,
446                               void> JSObjectVisitor;
447
448   typedef FlexibleBodyVisitor<StaticVisitor,
449                               StructBodyDescriptor,
450                               void> StructObjectVisitor;
451
452   typedef void (*Callback)(Map* map, HeapObject* object);
453
454   static VisitorDispatchTable<Callback> table_;
455 };
456
457
458 template<typename StaticVisitor>
459 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
460     StaticMarkingVisitor<StaticVisitor>::table_;
461
462
463 class WeakObjectRetainer;
464
465
466 // A weak list is single linked list where each element has a weak pointer to
467 // the next element. Given the head of the list, this function removes dead
468 // elements from the list and if requested records slots for next-element
469 // pointers. The template parameter T is a WeakListVisitor that defines how to
470 // access the next-element pointers.
471 template <class T>
472 Object* VisitWeakList(Heap* heap,
473                       Object* list,
474                       WeakObjectRetainer* retainer,
475                       bool record_slots);
476
477 } }  // namespace v8::internal
478
479 #endif  // V8_OBJECTS_VISITING_H_