1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_OBJECTS_VISITING_H_
29 #define V8_OBJECTS_VISITING_H_
31 #include "allocation.h"
33 // This file provides base classes and auxiliary methods for defining
34 // static object visitors used during GC.
35 // Visiting HeapObject body with a normal ObjectVisitor requires performing
36 // two switches on object's instance type to determine object size and layout
37 // and one or more virtual method calls on visitor itself.
38 // Static visitor is different: it provides a dispatch table which contains
39 // pointers to specialized visit functions. Each map has the visitor_id
40 // field which contains an index of specialized visitor to use.
46 // Base class for all static visitors.
47 class StaticVisitorBase : public AllStatic {
50 kVisitSeqAsciiString = 0,
51 kVisitSeqTwoByteString,
52 kVisitShortcutCandidate,
56 kVisitFixedDoubleArray,
59 // For data objects, JS objects and structs along with generic visitor which
60 // can visit object of any size we provide visitors specialized by
61 // object size in words.
62 // Ids of specialized visitors are declared in a linear order (without
63 // holes) starting from the id of visitor specialized for 2 words objects
64 // (base visitor id) and ending with the id of generic visitor.
65 // Method GetVisitorIdForSize depends on this ordering to calculate visitor
66 // id of specialized visitor from given instance size, base visitor id and
67 // generic visitor's id.
70 kVisitDataObject2 = kVisitDataObject,
78 kVisitDataObjectGeneric,
81 kVisitJSObject2 = kVisitJSObject,
89 kVisitJSObjectGeneric,
92 kVisitStruct2 = kVisitStruct,
108 kVisitSharedFunctionInfo,
114 kMinObjectSizeInWords = 2
117 // Visitor ID should fit in one byte.
118 STATIC_ASSERT(kVisitorIdCount <= 256);
120 // Determine which specialized visitor should be used for given instance type
121 // and instance type.
122 static VisitorId GetVisitorId(int instance_type, int instance_size);
124 static VisitorId GetVisitorId(Map* map) {
125 return GetVisitorId(map->instance_type(), map->instance_size());
128 // For visitors that allow specialization by size calculate VisitorId based
129 // on size, base visitor id and generic visitor id.
130 static VisitorId GetVisitorIdForSize(VisitorId base,
133 ASSERT((base == kVisitDataObject) ||
134 (base == kVisitStruct) ||
135 (base == kVisitJSObject));
136 ASSERT(IsAligned(object_size, kPointerSize));
137 ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size);
138 ASSERT(object_size < Page::kMaxNonCodeHeapObjectSize);
140 const VisitorId specialization = static_cast<VisitorId>(
141 base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords);
143 return Min(specialization, generic);
148 template<typename Callback>
149 class VisitorDispatchTable {
151 void CopyFrom(VisitorDispatchTable* other) {
152 // We are not using memcpy to guarantee that during update
153 // every element of callbacks_ array will remain correct
154 // pointer (memcpy might be implemented as a byte copying loop).
155 for (int i = 0; i < StaticVisitorBase::kVisitorIdCount; i++) {
156 NoBarrier_Store(&callbacks_[i], other->callbacks_[i]);
160 inline Callback GetVisitorById(StaticVisitorBase::VisitorId id) {
161 return reinterpret_cast<Callback>(callbacks_[id]);
164 inline Callback GetVisitor(Map* map) {
165 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
168 void Register(StaticVisitorBase::VisitorId id, Callback callback) {
169 ASSERT(id < StaticVisitorBase::kVisitorIdCount); // id is unsigned.
170 callbacks_[id] = reinterpret_cast<AtomicWord>(callback);
173 template<typename Visitor,
174 StaticVisitorBase::VisitorId base,
175 StaticVisitorBase::VisitorId generic,
176 int object_size_in_words>
177 void RegisterSpecialization() {
178 static const int size = object_size_in_words * kPointerSize;
179 Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
180 &Visitor::template VisitSpecialized<size>);
184 template<typename Visitor,
185 StaticVisitorBase::VisitorId base,
186 StaticVisitorBase::VisitorId generic>
187 void RegisterSpecializations() {
189 (generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10);
190 RegisterSpecialization<Visitor, base, generic, 2>();
191 RegisterSpecialization<Visitor, base, generic, 3>();
192 RegisterSpecialization<Visitor, base, generic, 4>();
193 RegisterSpecialization<Visitor, base, generic, 5>();
194 RegisterSpecialization<Visitor, base, generic, 6>();
195 RegisterSpecialization<Visitor, base, generic, 7>();
196 RegisterSpecialization<Visitor, base, generic, 8>();
197 RegisterSpecialization<Visitor, base, generic, 9>();
198 Register(generic, &Visitor::Visit);
202 AtomicWord callbacks_[StaticVisitorBase::kVisitorIdCount];
206 template<typename StaticVisitor>
207 class BodyVisitorBase : public AllStatic {
209 INLINE(static void IteratePointers(Heap* heap,
213 Object** start_slot = reinterpret_cast<Object**>(object->address() +
215 Object** end_slot = reinterpret_cast<Object**>(object->address() +
217 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
222 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
223 class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> {
225 static inline ReturnType Visit(Map* map, HeapObject* object) {
226 int object_size = BodyDescriptor::SizeOf(map, object);
227 BodyVisitorBase<StaticVisitor>::IteratePointers(
230 BodyDescriptor::kStartOffset,
232 return static_cast<ReturnType>(object_size);
235 template<int object_size>
236 static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) {
237 ASSERT(BodyDescriptor::SizeOf(map, object) == object_size);
238 BodyVisitorBase<StaticVisitor>::IteratePointers(
241 BodyDescriptor::kStartOffset,
243 return static_cast<ReturnType>(object_size);
248 template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType>
249 class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> {
251 static inline ReturnType Visit(Map* map, HeapObject* object) {
252 BodyVisitorBase<StaticVisitor>::IteratePointers(
255 BodyDescriptor::kStartOffset,
256 BodyDescriptor::kEndOffset);
257 return static_cast<ReturnType>(BodyDescriptor::kSize);
262 // Base class for visitors used for a linear new space iteration.
263 // IterateBody returns size of visited object.
264 // Certain types of objects (i.e. Code objects) are not handled
265 // by dispatch table of this visitor because they cannot appear
268 // This class is intended to be used in the following way:
270 // class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
274 // This is an example of Curiously recurring template pattern
275 // (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
276 // We use CRTP to guarantee aggressive compile time optimizations (i.e.
277 // inlining and specialization of StaticVisitor::VisitPointers methods).
278 template<typename StaticVisitor>
279 class StaticNewSpaceVisitor : public StaticVisitorBase {
281 static void Initialize();
283 static inline int IterateBody(Map* map, HeapObject* obj) {
284 return table_.GetVisitor(map)(map, obj);
287 static inline void VisitPointers(Heap* heap, Object** start, Object** end) {
288 for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(heap, p);
292 static inline int VisitJSFunction(Map* map, HeapObject* object) {
293 Heap* heap = map->GetHeap();
295 HeapObject::RawField(object, JSFunction::kPropertiesOffset),
296 HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
298 // Don't visit code entry. We are using this visitor only during scavenges.
302 HeapObject::RawField(object,
303 JSFunction::kCodeEntryOffset + kPointerSize),
304 HeapObject::RawField(object,
305 JSFunction::kNonWeakFieldsEndOffset));
306 return JSFunction::kSize;
309 static inline int VisitByteArray(Map* map, HeapObject* object) {
310 return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
313 static inline int VisitFixedDoubleArray(Map* map, HeapObject* object) {
314 int length = reinterpret_cast<FixedDoubleArray*>(object)->length();
315 return FixedDoubleArray::SizeFor(length);
318 static inline int VisitJSObject(Map* map, HeapObject* object) {
319 return JSObjectVisitor::Visit(map, object);
322 static inline int VisitSeqAsciiString(Map* map, HeapObject* object) {
323 return SeqAsciiString::cast(object)->
324 SeqAsciiStringSize(map->instance_type());
327 static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) {
328 return SeqTwoByteString::cast(object)->
329 SeqTwoByteStringSize(map->instance_type());
332 static inline int VisitFreeSpace(Map* map, HeapObject* object) {
333 return FreeSpace::cast(object)->Size();
336 class DataObjectVisitor {
338 template<int object_size>
339 static inline int VisitSpecialized(Map* map, HeapObject* object) {
343 static inline int Visit(Map* map, HeapObject* object) {
344 return map->instance_size();
348 typedef FlexibleBodyVisitor<StaticVisitor,
349 StructBodyDescriptor,
352 typedef FlexibleBodyVisitor<StaticVisitor,
353 JSObject::BodyDescriptor,
354 int> JSObjectVisitor;
356 typedef int (*Callback)(Map* map, HeapObject* object);
358 static VisitorDispatchTable<Callback> table_;
362 template<typename StaticVisitor>
363 VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback>
364 StaticNewSpaceVisitor<StaticVisitor>::table_;
367 } } // namespace v8::internal
369 #endif // V8_OBJECTS_VISITING_H_