Reland of "Remove ExternalArray, derived types, and element kinds"
[platform/upstream/v8.git] / src / heap / objects-visiting.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/heap/objects-visiting.h"
8
9 namespace v8 {
10 namespace internal {
11
12
13 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
14     int instance_type, int instance_size, bool has_unboxed_fields) {
15   if (instance_type < FIRST_NONSTRING_TYPE) {
16     switch (instance_type & kStringRepresentationMask) {
17       case kSeqStringTag:
18         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
19           return kVisitSeqOneByteString;
20         } else {
21           return kVisitSeqTwoByteString;
22         }
23
24       case kConsStringTag:
25         if (IsShortcutCandidate(instance_type)) {
26           return kVisitShortcutCandidate;
27         } else {
28           return kVisitConsString;
29         }
30
31       case kSlicedStringTag:
32         return kVisitSlicedString;
33
34       case kExternalStringTag:
35         return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
36                                    instance_size, has_unboxed_fields);
37     }
38     UNREACHABLE();
39   }
40
41   switch (instance_type) {
42     case BYTE_ARRAY_TYPE:
43       return kVisitByteArray;
44
45     case BYTECODE_ARRAY_TYPE:
46       return kVisitBytecodeArray;
47
48     case FREE_SPACE_TYPE:
49       return kVisitFreeSpace;
50
51     case FIXED_ARRAY_TYPE:
52       return kVisitFixedArray;
53
54     case FIXED_DOUBLE_ARRAY_TYPE:
55       return kVisitFixedDoubleArray;
56
57     case ODDBALL_TYPE:
58       return kVisitOddball;
59
60     case MAP_TYPE:
61       return kVisitMap;
62
63     case CODE_TYPE:
64       return kVisitCode;
65
66     case CELL_TYPE:
67       return kVisitCell;
68
69     case PROPERTY_CELL_TYPE:
70       return kVisitPropertyCell;
71
72     case WEAK_CELL_TYPE:
73       return kVisitWeakCell;
74
75     case JS_SET_TYPE:
76       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
77                                  JSSet::kSize, has_unboxed_fields);
78
79     case JS_MAP_TYPE:
80       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
81                                  JSMap::kSize, has_unboxed_fields);
82
83     case JS_WEAK_MAP_TYPE:
84     case JS_WEAK_SET_TYPE:
85       return kVisitJSWeakCollection;
86
87     case JS_REGEXP_TYPE:
88       return kVisitJSRegExp;
89
90     case SHARED_FUNCTION_INFO_TYPE:
91       return kVisitSharedFunctionInfo;
92
93     case JS_PROXY_TYPE:
94       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
95                                  JSProxy::kSize, has_unboxed_fields);
96
97     case JS_FUNCTION_PROXY_TYPE:
98       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
99                                  JSFunctionProxy::kSize, has_unboxed_fields);
100
101     case FOREIGN_TYPE:
102       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
103                                  Foreign::kSize, has_unboxed_fields);
104
105     case SYMBOL_TYPE:
106       return kVisitSymbol;
107
108     case FILLER_TYPE:
109       return kVisitDataObjectGeneric;
110
111     case JS_ARRAY_BUFFER_TYPE:
112       return kVisitJSArrayBuffer;
113
114     case JS_TYPED_ARRAY_TYPE:
115       return kVisitJSTypedArray;
116
117     case JS_DATA_VIEW_TYPE:
118       return kVisitJSDataView;
119
120     case JS_OBJECT_TYPE:
121     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
122     case JS_GENERATOR_OBJECT_TYPE:
123     case JS_MODULE_TYPE:
124     case JS_VALUE_TYPE:
125     case JS_DATE_TYPE:
126     case JS_ARRAY_TYPE:
127     case JS_GLOBAL_PROXY_TYPE:
128     case JS_GLOBAL_OBJECT_TYPE:
129     case JS_BUILTINS_OBJECT_TYPE:
130     case JS_MESSAGE_OBJECT_TYPE:
131     case JS_SET_ITERATOR_TYPE:
132     case JS_MAP_ITERATOR_TYPE:
133       return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
134                                  instance_size, has_unboxed_fields);
135
136     case JS_FUNCTION_TYPE:
137       return kVisitJSFunction;
138
139     case HEAP_NUMBER_TYPE:
140     case MUTABLE_HEAP_NUMBER_TYPE:
141     case FLOAT32X4_TYPE:
142       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
143                                  instance_size, has_unboxed_fields);
144
145     case FIXED_UINT8_ARRAY_TYPE:
146     case FIXED_INT8_ARRAY_TYPE:
147     case FIXED_UINT16_ARRAY_TYPE:
148     case FIXED_INT16_ARRAY_TYPE:
149     case FIXED_UINT32_ARRAY_TYPE:
150     case FIXED_INT32_ARRAY_TYPE:
151     case FIXED_FLOAT32_ARRAY_TYPE:
152     case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
153       return kVisitFixedTypedArray;
154
155     case FIXED_FLOAT64_ARRAY_TYPE:
156       return kVisitFixedFloat64Array;
157
158 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
159       STRUCT_LIST(MAKE_STRUCT_CASE)
160 #undef MAKE_STRUCT_CASE
161       if (instance_type == ALLOCATION_SITE_TYPE) {
162         return kVisitAllocationSite;
163       }
164
165       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
166                                  instance_size, has_unboxed_fields);
167
168     default:
169       UNREACHABLE();
170       return kVisitorIdCount;
171   }
172 }
173
174
175 // We don't record weak slots during marking or scavenges. Instead we do it
176 // once when we complete mark-compact cycle.  Note that write barrier has no
177 // effect if we are already in the middle of compacting mark-sweep cycle and we
178 // have to record slots manually.
179 static bool MustRecordSlots(Heap* heap) {
180   return heap->gc_state() == Heap::MARK_COMPACT &&
181          heap->mark_compact_collector()->is_compacting();
182 }
183
184
185 template <class T>
186 struct WeakListVisitor;
187
188
189 template <class T>
190 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
191   Object* undefined = heap->undefined_value();
192   Object* head = undefined;
193   T* tail = NULL;
194   MarkCompactCollector* collector = heap->mark_compact_collector();
195   bool record_slots = MustRecordSlots(heap);
196
197   while (list != undefined) {
198     // Check whether to keep the candidate in the list.
199     T* candidate = reinterpret_cast<T*>(list);
200
201     Object* retained = retainer->RetainAs(list);
202     if (retained != NULL) {
203       if (head == undefined) {
204         // First element in the list.
205         head = retained;
206       } else {
207         // Subsequent elements in the list.
208         DCHECK(tail != NULL);
209         WeakListVisitor<T>::SetWeakNext(tail, retained);
210         if (record_slots) {
211           Object** next_slot =
212               HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
213           collector->RecordSlot(next_slot, next_slot, retained);
214         }
215       }
216       // Retained object is new tail.
217       DCHECK(!retained->IsUndefined());
218       candidate = reinterpret_cast<T*>(retained);
219       tail = candidate;
220
221       // tail is a live object, visit it.
222       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
223
224     } else {
225       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
226     }
227
228     // Move to next element in the list.
229     list = WeakListVisitor<T>::WeakNext(candidate);
230   }
231
232   // Terminate the list if there is one or more elements.
233   if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
234   return head;
235 }
236
237
238 template <class T>
239 static void ClearWeakList(Heap* heap, Object* list) {
240   Object* undefined = heap->undefined_value();
241   while (list != undefined) {
242     T* candidate = reinterpret_cast<T*>(list);
243     list = WeakListVisitor<T>::WeakNext(candidate);
244     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
245   }
246 }
247
248
249 template <>
250 struct WeakListVisitor<JSFunction> {
251   static void SetWeakNext(JSFunction* function, Object* next) {
252     function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
253   }
254
255   static Object* WeakNext(JSFunction* function) {
256     return function->next_function_link();
257   }
258
259   static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
260
261   static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
262
263   static void VisitPhantomObject(Heap*, JSFunction*) {}
264 };
265
266
267 template <>
268 struct WeakListVisitor<Code> {
269   static void SetWeakNext(Code* code, Object* next) {
270     code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
271   }
272
273   static Object* WeakNext(Code* code) { return code->next_code_link(); }
274
275   static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
276
277   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
278
279   static void VisitPhantomObject(Heap*, Code*) {}
280 };
281
282
283 template <>
284 struct WeakListVisitor<Context> {
285   static void SetWeakNext(Context* context, Object* next) {
286     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
287   }
288
289   static Object* WeakNext(Context* context) {
290     return context->get(Context::NEXT_CONTEXT_LINK);
291   }
292
293   static int WeakNextOffset() {
294     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
295   }
296
297   static void VisitLiveObject(Heap* heap, Context* context,
298                               WeakObjectRetainer* retainer) {
299     // Process the three weak lists linked off the context.
300     DoWeakList<JSFunction>(heap, context, retainer,
301                            Context::OPTIMIZED_FUNCTIONS_LIST);
302
303     // Code objects are always allocated in Code space, we do not have to visit
304     // them during scavenges.
305     if (heap->gc_state() == Heap::MARK_COMPACT) {
306       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
307       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
308     }
309   }
310
311   template <class T>
312   static void DoWeakList(Heap* heap, Context* context,
313                          WeakObjectRetainer* retainer, int index) {
314     // Visit the weak list, removing dead intermediate elements.
315     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
316
317     // Update the list head.
318     context->set(index, list_head, UPDATE_WRITE_BARRIER);
319
320     if (MustRecordSlots(heap)) {
321       // Record the updated slot if necessary.
322       Object** head_slot =
323           HeapObject::RawField(context, FixedArray::SizeFor(index));
324       heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
325                                                  list_head);
326     }
327   }
328
329   static void VisitPhantomObject(Heap* heap, Context* context) {
330     ClearWeakList<JSFunction>(heap,
331                               context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
332     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
333     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
334   }
335 };
336
337
338 template <>
339 struct WeakListVisitor<AllocationSite> {
340   static void SetWeakNext(AllocationSite* obj, Object* next) {
341     obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
342   }
343
344   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
345
346   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
347
348   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
349
350   static void VisitPhantomObject(Heap*, AllocationSite*) {}
351 };
352
353
354 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
355                                         WeakObjectRetainer* retainer);
356
357 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
358                                                WeakObjectRetainer* retainer);
359 }  // namespace internal
360 }  // namespace v8