[interpreter] Add constant_pool() to BytecodeArray.
[platform/upstream/v8.git] / src / heap / objects-visiting.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_OBJECTS_VISITING_H_
6 #define V8_OBJECTS_VISITING_H_
7
8 #include "src/allocation.h"
9 #include "src/heap/spaces.h"
10 #include "src/layout-descriptor.h"
11
12 // This file provides base classes and auxiliary methods for defining
13 // static object visitors used during GC.
14 // Visiting HeapObject body with a normal ObjectVisitor requires performing
15 // two switches on object's instance type to determine object size and layout
16 // and one or more virtual method calls on visitor itself.
17 // Static visitor is different: it provides a dispatch table which contains
18 // pointers to specialized visit functions. Each map has the visitor_id
19 // field which contains an index of specialized visitor to use.
20
21 namespace v8 {
22 namespace internal {
23
24
25 // Base class for all static visitors.
26 class StaticVisitorBase : public AllStatic {
27  public:
28 #define VISITOR_ID_LIST(V) \
29   V(SeqOneByteString)      \
30   V(SeqTwoByteString)      \
31   V(ShortcutCandidate)     \
32   V(ByteArray)             \
33   V(BytecodeArray)         \
34   V(FreeSpace)             \
35   V(FixedArray)            \
36   V(FixedDoubleArray)      \
37   V(FixedTypedArray)       \
38   V(FixedFloat64Array)     \
39   V(NativeContext)         \
40   V(AllocationSite)        \
41   V(DataObject2)           \
42   V(DataObject3)           \
43   V(DataObject4)           \
44   V(DataObject5)           \
45   V(DataObject6)           \
46   V(DataObject7)           \
47   V(DataObject8)           \
48   V(DataObject9)           \
49   V(DataObjectGeneric)     \
50   V(JSObject2)             \
51   V(JSObject3)             \
52   V(JSObject4)             \
53   V(JSObject5)             \
54   V(JSObject6)             \
55   V(JSObject7)             \
56   V(JSObject8)             \
57   V(JSObject9)             \
58   V(JSObjectGeneric)       \
59   V(Struct2)               \
60   V(Struct3)               \
61   V(Struct4)               \
62   V(Struct5)               \
63   V(Struct6)               \
64   V(Struct7)               \
65   V(Struct8)               \
66   V(Struct9)               \
67   V(StructGeneric)         \
68   V(ConsString)            \
69   V(SlicedString)          \
70   V(Symbol)                \
71   V(Oddball)               \
72   V(Code)                  \
73   V(Map)                   \
74   V(Cell)                  \
75   V(PropertyCell)          \
76   V(WeakCell)              \
77   V(SharedFunctionInfo)    \
78   V(JSFunction)            \
79   V(JSWeakCollection)      \
80   V(JSArrayBuffer)         \
81   V(JSTypedArray)          \
82   V(JSDataView)            \
83   V(JSRegExp)
84
85   // For data objects, JS objects and structs along with generic visitor which
86   // can visit object of any size we provide visitors specialized by
87   // object size in words.
88   // Ids of specialized visitors are declared in a linear order (without
89   // holes) starting from the id of visitor specialized for 2 words objects
90   // (base visitor id) and ending with the id of generic visitor.
91   // Method GetVisitorIdForSize depends on this ordering to calculate visitor
92   // id of specialized visitor from given instance size, base visitor id and
93   // generic visitor's id.
94   enum VisitorId {
95 #define VISITOR_ID_ENUM_DECL(id) kVisit##id,
96     VISITOR_ID_LIST(VISITOR_ID_ENUM_DECL)
97 #undef VISITOR_ID_ENUM_DECL
98     kVisitorIdCount,
99     kVisitDataObject = kVisitDataObject2,
100     kVisitJSObject = kVisitJSObject2,
101     kVisitStruct = kVisitStruct2,
102     kMinObjectSizeInWords = 2
103   };
104
105   // Visitor ID should fit in one byte.
106   STATIC_ASSERT(kVisitorIdCount <= 256);
107
108   // Determine which specialized visitor should be used for given instance type
109   // and instance type.
110   static VisitorId GetVisitorId(int instance_type, int instance_size,
111                                 bool has_unboxed_fields);
112
113   // Determine which specialized visitor should be used for given map.
114   static VisitorId GetVisitorId(Map* map);
115
116   // For visitors that allow specialization by size calculate VisitorId based
117   // on size, base visitor id and generic visitor id.
118   static VisitorId GetVisitorIdForSize(VisitorId base, VisitorId generic,
119                                        int object_size,
120                                        bool has_unboxed_fields) {
121     DCHECK((base == kVisitDataObject) || (base == kVisitStruct) ||
122            (base == kVisitJSObject));
123     DCHECK(IsAligned(object_size, kPointerSize));
124     DCHECK(kMinObjectSizeInWords * kPointerSize <= object_size);
125     DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
126     DCHECK(!has_unboxed_fields || (base == kVisitJSObject));
127
128     if (has_unboxed_fields) return generic;
129
130     int visitor_id =
131         Min(base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords,
132             static_cast<int>(generic));
133
134     return static_cast<VisitorId>(visitor_id);
135   }
136 };
137
138
139 template <typename Callback>
140 class VisitorDispatchTable {
141  public:
142   void CopyFrom(VisitorDispatchTable* other) {
143     // We are not using memcpy to guarantee that during update
144     // every element of callbacks_ array will remain correct
145     // pointer (memcpy might be implemented as a byte copying loop).
146     for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
147       base::NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
148     }
149   }
150
151   inline Callback GetVisitor(Map* map);
152
153   inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
154     return reinterpret_cast<Callback>(callbacks_[id]);
155   }
156
157   void Register(StaticVisitorBase::VisitorId id, Callback callback) {
158     DCHECK(id < StaticVisitorBase::kVisitorIdCount);  // id is unsigned.
159     callbacks_[id] = reinterpret_cast<base::AtomicWord>(callback);
160   }
161
162   template <typename Visitor, StaticVisitorBase::VisitorId base,
163             StaticVisitorBase::VisitorId generic, int object_size_in_words>
164   void RegisterSpecialization() {
165     static const int size = object_size_in_words * kPointerSize;
166     Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size, false),
167              &Visitor::template VisitSpecialized<size>);
168   }
169
170
171   template <typename Visitor, StaticVisitorBase::VisitorId base,
172             StaticVisitorBase::VisitorId generic>
173   void RegisterSpecializations() {
174     STATIC_ASSERT((generic - base + StaticVisitorBase::kMinObjectSizeInWords) ==
175                   10);
176     RegisterSpecialization<Visitor, base, generic, 2>();
177     RegisterSpecialization<Visitor, base, generic, 3>();
178     RegisterSpecialization<Visitor, base, generic, 4>();
179     RegisterSpecialization<Visitor, base, generic, 5>();
180     RegisterSpecialization<Visitor, base, generic, 6>();
181     RegisterSpecialization<Visitor, base, generic, 7>();
182     RegisterSpecialization<Visitor, base, generic, 8>();
183     RegisterSpecialization<Visitor, base, generic, 9>();
184     Register(generic, &Visitor::Visit);
185   }
186
187  private:
188   base::AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
189 };
190
191
192 template <typename StaticVisitor>
193 class BodyVisitorBase : public AllStatic {
194  public:
195   INLINE(static void IteratePointers(Heap* heap, HeapObject* object,
196                                      int start_offset, int end_offset)) {
197     DCHECK(!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout());
198     IterateRawPointers(heap, object, start_offset, end_offset);
199   }
200
201   INLINE(static void IterateBody(Heap* heap, HeapObject* object,
202                                  int start_offset, int end_offset)) {
203     if (!FLAG_unbox_double_fields || object->map()->HasFastPointerLayout()) {
204       IterateRawPointers(heap, object, start_offset, end_offset);
205     } else {
206       IterateBodyUsingLayoutDescriptor(heap, object, start_offset, end_offset);
207     }
208   }
209
210  private:
211   INLINE(static void IterateRawPointers(Heap* heap, HeapObject* object,
212                                         int start_offset, int end_offset)) {
213     StaticVisitor::VisitPointers(heap, object,
214                                  HeapObject::RawField(object, start_offset),
215                                  HeapObject::RawField(object, end_offset));
216   }
217
218   static void IterateBodyUsingLayoutDescriptor(Heap* heap, HeapObject* object,
219                                                int start_offset,
220                                                int end_offset) {
221     DCHECK(FLAG_unbox_double_fields);
222     DCHECK(IsAligned(start_offset, kPointerSize) &&
223            IsAligned(end_offset, kPointerSize));
224
225     LayoutDescriptorHelper helper(object->map());
226     DCHECK(!helper.all_fields_tagged());
227     for (int offset = start_offset; offset < end_offset;) {
228       int end_of_region_offset;
229       if (helper.IsTagged(offset, end_offset, &end_of_region_offset)) {
230         IterateRawPointers(heap, object, offset, end_of_region_offset);
231       }
232       offset = end_of_region_offset;
233     }
234   }
235 };
236
237
238 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
239 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
240  public:
241   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
242     int object_size = BodyDescriptor::SizeOf(map, object);
243     BodyVisitorBase<StaticVisitor>::IterateBody(
244         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
245     return static_cast<ReturnType>(object_size);
246   }
247
248   template <int object_size>
249   static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
250     DCHECK(BodyDescriptor::SizeOf(map, object) == object_size);
251     BodyVisitorBase<StaticVisitor>::IteratePointers(
252         map->GetHeap(), object, BodyDescriptor::kStartOffset, object_size);
253     return static_cast<ReturnType>(object_size);
254   }
255 };
256
257
258 template <typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
259 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
260  public:
261   INLINE(static ReturnType Visit(Map* map, HeapObject* object)) {
262     BodyVisitorBase<StaticVisitor>::IterateBody(map->GetHeap(), object,
263                                                 BodyDescriptor::kStartOffset,
264                                                 BodyDescriptor::kEndOffset);
265     return static_cast<ReturnType>(BodyDescriptor::kSize);
266   }
267 };
268
269
270 // Base class for visitors used for a linear new space iteration.
271 // IterateBody returns size of visited object.
272 // Certain types of objects (i.e. Code objects) are not handled
273 // by dispatch table of this visitor because they cannot appear
274 // in the new space.
275 //
276 // This class is intended to be used in the following way:
277 //
278 //   class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
279 //     ...
280 //   }
281 //
282 // This is an example of Curiously recurring template pattern
283 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
284 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
285 // inlining and specialization of StaticVisitor::VisitPointers methods).
286 template <typename StaticVisitor>
287 class StaticNewSpaceVisitor : public StaticVisitorBase {
288  public:
289   static void Initialize();
290
291   INLINE(static int IterateBody(Map* map, HeapObject* obj)) {
292     return table_.GetVisitor(map)(map, obj);
293   }
294
295   INLINE(static void VisitPointers(Heap* heap, HeapObject* object,
296                                    Object** start, Object** end)) {
297     for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
298   }
299
300  private:
301   INLINE(static int VisitJSFunction(Map* map, HeapObject* object)) {
302     Heap* heap = map->GetHeap();
303     VisitPointers(heap, object,
304                   HeapObject::RawField(object, JSFunction::kPropertiesOffset),
305                   HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
306
307     // Don't visit code entry. We are using this visitor only during scavenges.
308
309     VisitPointers(
310         heap, object, HeapObject::RawField(
311                           object, JSFunction::kCodeEntryOffset + kPointerSize),
312         HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset));
313     return JSFunction::kSize;
314   }
315
316   INLINE(static int VisitByteArray(Map* map, HeapObject* object)) {
317     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
318   }
319
320   INLINE(static int VisitFixedDoubleArray(Map* map, HeapObject* object)) {
321     int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
322     return FixedDoubleArray::SizeFor(length);
323   }
324
325   INLINE(static int VisitFixedTypedArray(Map* map, HeapObject* object)) {
326     return reinterpret_cast<FixedTypedArrayBase*>(object)->size();
327   }
328
329   INLINE(static int VisitJSObject(Map* map, HeapObject* object)) {
330     return JSObjectVisitor::Visit(map, object);
331   }
332
333   INLINE(static int VisitSeqOneByteString(Map* map, HeapObject* object)) {
334     return SeqOneByteString::cast(object)
335         ->SeqOneByteStringSize(map->instance_type());
336   }
337
338   INLINE(static int VisitSeqTwoByteString(Map* map, HeapObject* object)) {
339     return SeqTwoByteString::cast(object)
340         ->SeqTwoByteStringSize(map->instance_type());
341   }
342
343   INLINE(static int VisitFreeSpace(Map* map, HeapObject* object)) {
344     return FreeSpace::cast(object)->Size();
345   }
346
347   INLINE(static int VisitJSArrayBuffer(Map* map, HeapObject* object));
348   INLINE(static int VisitJSTypedArray(Map* map, HeapObject* object));
349   INLINE(static int VisitJSDataView(Map* map, HeapObject* object));
350   INLINE(static int VisitBytecodeArray(Map* map, HeapObject* object));
351
352   class DataObjectVisitor {
353    public:
354     template <int object_size>
355     static inline int VisitSpecialized(Map* map, HeapObject* object) {
356       return object_size;
357     }
358
359     INLINE(static int Visit(Map* map, HeapObject* object)) {
360       return map->instance_size();
361     }
362   };
363
364   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, int>
365       StructVisitor;
366
367   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, int>
368       JSObjectVisitor;
369
370   typedef int (*Callback)(Map* map, HeapObject* object);
371
372   static VisitorDispatchTable<Callback> table_;
373 };
374
375
376 template <typename StaticVisitor>
377 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
378     StaticNewSpaceVisitor<StaticVisitor>::table_;
379
380
381 // Base class for visitors used to transitively mark the entire heap.
382 // IterateBody returns nothing.
383 // Certain types of objects might not be handled by this base class and
384 // no visitor function is registered by the generic initialization. A
385 // specialized visitor function needs to be provided by the inheriting
386 // class itself for those cases.
387 //
388 // This class is intended to be used in the following way:
389 //
390 //   class SomeVisitor : public StaticMarkingVisitor<SomeVisitor> {
391 //     ...
392 //   }
393 //
394 // This is an example of Curiously recurring template pattern.
395 template <typename StaticVisitor>
396 class StaticMarkingVisitor : public StaticVisitorBase {
397  public:
398   static void Initialize();
399
400   INLINE(static void IterateBody(Map* map, HeapObject* obj)) {
401     table_.GetVisitor(map)(map, obj);
402   }
403
404   INLINE(static void VisitPropertyCell(Map* map, HeapObject* object));
405   INLINE(static void VisitWeakCell(Map* map, HeapObject* object));
406   INLINE(static void VisitCodeEntry(Heap* heap, HeapObject* object,
407                                     Address entry_address));
408   INLINE(static void VisitEmbeddedPointer(Heap* heap, RelocInfo* rinfo));
409   INLINE(static void VisitCell(Heap* heap, RelocInfo* rinfo));
410   INLINE(static void VisitDebugTarget(Heap* heap, RelocInfo* rinfo));
411   INLINE(static void VisitCodeTarget(Heap* heap, RelocInfo* rinfo));
412   INLINE(static void VisitCodeAgeSequence(Heap* heap, RelocInfo* rinfo));
413   INLINE(static void VisitExternalReference(RelocInfo* rinfo)) {}
414   INLINE(static void VisitInternalReference(RelocInfo* rinfo)) {}
415   INLINE(static void VisitRuntimeEntry(RelocInfo* rinfo)) {}
416   // Skip the weak next code link in a code object.
417   INLINE(static void VisitNextCodeLink(Heap* heap, Object** slot)) {}
418
419   // Mark non-optimize code for functions inlined into the given optimized
420   // code. This will prevent it from being flushed.
421   static void MarkInlinedFunctionsCode(Heap* heap, Code* code);
422
423  protected:
424   INLINE(static void VisitMap(Map* map, HeapObject* object));
425   INLINE(static void VisitCode(Map* map, HeapObject* object));
426   INLINE(static void VisitSharedFunctionInfo(Map* map, HeapObject* object));
427   INLINE(static void VisitAllocationSite(Map* map, HeapObject* object));
428   INLINE(static void VisitWeakCollection(Map* map, HeapObject* object));
429   INLINE(static void VisitJSFunction(Map* map, HeapObject* object));
430   INLINE(static void VisitJSRegExp(Map* map, HeapObject* object));
431   INLINE(static void VisitJSArrayBuffer(Map* map, HeapObject* object));
432   INLINE(static void VisitJSTypedArray(Map* map, HeapObject* object));
433   INLINE(static void VisitJSDataView(Map* map, HeapObject* object));
434   INLINE(static void VisitNativeContext(Map* map, HeapObject* object));
435   INLINE(static void VisitBytecodeArray(Map* map, HeapObject* object));
436
437   // Mark pointers in a Map and its TransitionArray together, possibly
438   // treating transitions or back pointers weak.
439   static void MarkMapContents(Heap* heap, Map* map);
440   static void MarkTransitionArray(Heap* heap, TransitionArray* transitions);
441
442   // Mark pointers in the optimized code map that should act as strong
443   // references, possibly treating some entries weak.
444   static void MarkOptimizedCodeMap(Heap* heap, FixedArray* code_map);
445
446   // Code flushing support.
447   INLINE(static bool IsFlushable(Heap* heap, JSFunction* function));
448   INLINE(static bool IsFlushable(Heap* heap, SharedFunctionInfo* shared_info));
449
450   // Helpers used by code flushing support that visit pointer fields and treat
451   // references to code objects either strongly or weakly.
452   static void VisitSharedFunctionInfoStrongCode(Heap* heap, HeapObject* object);
453   static void VisitSharedFunctionInfoWeakCode(Heap* heap, HeapObject* object);
454   static void VisitJSFunctionStrongCode(Heap* heap, HeapObject* object);
455   static void VisitJSFunctionWeakCode(Heap* heap, HeapObject* object);
456
457   class DataObjectVisitor {
458    public:
459     template <int size>
460     static inline void VisitSpecialized(Map* map, HeapObject* object) {}
461
462     INLINE(static void Visit(Map* map, HeapObject* object)) {}
463   };
464
465   typedef FlexibleBodyVisitor<StaticVisitor, FixedArray::BodyDescriptor, void>
466       FixedArrayVisitor;
467
468   typedef FlexibleBodyVisitor<StaticVisitor, JSObject::BodyDescriptor, void>
469       JSObjectVisitor;
470
471   typedef FlexibleBodyVisitor<StaticVisitor, StructBodyDescriptor, void>
472       StructObjectVisitor;
473
474   typedef void (*Callback)(Map* map, HeapObject* object);
475
476   static VisitorDispatchTable<Callback> table_;
477 };
478
479
480 template <typename StaticVisitor>
481 VisitorDispatchTable<typename StaticMarkingVisitor<StaticVisitor>::Callback>
482     StaticMarkingVisitor<StaticVisitor>::table_;
483
484
485 class WeakObjectRetainer;
486
487
488 // A weak list is single linked list where each element has a weak pointer to
489 // the next element. Given the head of the list, this function removes dead
490 // elements from the list and if requested records slots for next-element
491 // pointers. The template parameter T is a WeakListVisitor that defines how to
492 // access the next-element pointers.
493 template <class T>
494 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
495 }
496 }  // namespace v8::internal
497
498 #endif  // V8_OBJECTS_VISITING_H_