deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / heap / objects-visiting.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/heap/objects-visiting.h"
8
9 namespace v8 {
10 namespace internal {
11
12
13 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
14     int instance_type, int instance_size, bool has_unboxed_fields) {
15   if (instance_type < FIRST_NONSTRING_TYPE) {
16     switch (instance_type & kStringRepresentationMask) {
17       case kSeqStringTag:
18         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
19           return kVisitSeqOneByteString;
20         } else {
21           return kVisitSeqTwoByteString;
22         }
23
24       case kConsStringTag:
25         if (IsShortcutCandidate(instance_type)) {
26           return kVisitShortcutCandidate;
27         } else {
28           return kVisitConsString;
29         }
30
31       case kSlicedStringTag:
32         return kVisitSlicedString;
33
34       case kExternalStringTag:
35         return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
36                                    instance_size, has_unboxed_fields);
37     }
38     UNREACHABLE();
39   }
40
41   switch (instance_type) {
42     case BYTE_ARRAY_TYPE:
43       return kVisitByteArray;
44
45     case FREE_SPACE_TYPE:
46       return kVisitFreeSpace;
47
48     case FIXED_ARRAY_TYPE:
49       return kVisitFixedArray;
50
51     case FIXED_DOUBLE_ARRAY_TYPE:
52       return kVisitFixedDoubleArray;
53
54     case CONSTANT_POOL_ARRAY_TYPE:
55       return kVisitConstantPoolArray;
56
57     case ODDBALL_TYPE:
58       return kVisitOddball;
59
60     case MAP_TYPE:
61       return kVisitMap;
62
63     case CODE_TYPE:
64       return kVisitCode;
65
66     case CELL_TYPE:
67       return kVisitCell;
68
69     case PROPERTY_CELL_TYPE:
70       return kVisitPropertyCell;
71
72     case WEAK_CELL_TYPE:
73       return kVisitWeakCell;
74
75     case JS_SET_TYPE:
76       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
77                                  JSSet::kSize, has_unboxed_fields);
78
79     case JS_MAP_TYPE:
80       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
81                                  JSMap::kSize, has_unboxed_fields);
82
83     case JS_WEAK_MAP_TYPE:
84     case JS_WEAK_SET_TYPE:
85       return kVisitJSWeakCollection;
86
87     case JS_REGEXP_TYPE:
88       return kVisitJSRegExp;
89
90     case SHARED_FUNCTION_INFO_TYPE:
91       return kVisitSharedFunctionInfo;
92
93     case JS_PROXY_TYPE:
94       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
95                                  JSProxy::kSize, has_unboxed_fields);
96
97     case JS_FUNCTION_PROXY_TYPE:
98       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
99                                  JSFunctionProxy::kSize, has_unboxed_fields);
100
101     case FOREIGN_TYPE:
102       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
103                                  Foreign::kSize, has_unboxed_fields);
104
105     case SYMBOL_TYPE:
106       return kVisitSymbol;
107
108     case FILLER_TYPE:
109       return kVisitDataObjectGeneric;
110
111     case JS_ARRAY_BUFFER_TYPE:
112       return kVisitJSArrayBuffer;
113
114     case JS_TYPED_ARRAY_TYPE:
115       return kVisitJSTypedArray;
116
117     case JS_DATA_VIEW_TYPE:
118       return kVisitJSDataView;
119
120     case JS_OBJECT_TYPE:
121     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
122     case JS_GENERATOR_OBJECT_TYPE:
123     case JS_MODULE_TYPE:
124     case JS_VALUE_TYPE:
125     case JS_DATE_TYPE:
126     case JS_ARRAY_TYPE:
127     case JS_GLOBAL_PROXY_TYPE:
128     case JS_GLOBAL_OBJECT_TYPE:
129     case JS_BUILTINS_OBJECT_TYPE:
130     case JS_MESSAGE_OBJECT_TYPE:
131     case JS_SET_ITERATOR_TYPE:
132     case JS_MAP_ITERATOR_TYPE:
133       return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
134                                  instance_size, has_unboxed_fields);
135
136     case JS_FUNCTION_TYPE:
137       return kVisitJSFunction;
138
139     case HEAP_NUMBER_TYPE:
140     case MUTABLE_HEAP_NUMBER_TYPE:
141 #define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
142   case EXTERNAL_##TYPE##_ARRAY_TYPE:
143
144       TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
145       return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
146                                  instance_size, has_unboxed_fields);
147 #undef EXTERNAL_ARRAY_CASE
148
149     case FIXED_UINT8_ARRAY_TYPE:
150     case FIXED_INT8_ARRAY_TYPE:
151     case FIXED_UINT16_ARRAY_TYPE:
152     case FIXED_INT16_ARRAY_TYPE:
153     case FIXED_UINT32_ARRAY_TYPE:
154     case FIXED_INT32_ARRAY_TYPE:
155     case FIXED_FLOAT32_ARRAY_TYPE:
156     case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
157       return kVisitFixedTypedArray;
158
159     case FIXED_FLOAT64_ARRAY_TYPE:
160       return kVisitFixedFloat64Array;
161
162 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
163       STRUCT_LIST(MAKE_STRUCT_CASE)
164 #undef MAKE_STRUCT_CASE
165       if (instance_type == ALLOCATION_SITE_TYPE) {
166         return kVisitAllocationSite;
167       }
168
169       return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
170                                  instance_size, has_unboxed_fields);
171
172     default:
173       UNREACHABLE();
174       return kVisitorIdCount;
175   }
176 }
177
178
179 // We don't record weak slots during marking or scavenges. Instead we do it
180 // once when we complete mark-compact cycle.  Note that write barrier has no
181 // effect if we are already in the middle of compacting mark-sweep cycle and we
182 // have to record slots manually.
183 static bool MustRecordSlots(Heap* heap) {
184   return heap->gc_state() == Heap::MARK_COMPACT &&
185          heap->mark_compact_collector()->is_compacting();
186 }
187
188
189 template <class T>
190 struct WeakListVisitor;
191
192
193 template <class T>
194 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
195                       bool stop_after_young) {
196   Object* undefined = heap->undefined_value();
197   Object* head = undefined;
198   T* tail = NULL;
199   MarkCompactCollector* collector = heap->mark_compact_collector();
200   bool record_slots = MustRecordSlots(heap);
201
202   while (list != undefined) {
203     // Check whether to keep the candidate in the list.
204     T* candidate = reinterpret_cast<T*>(list);
205
206     Object* retained = retainer->RetainAs(list);
207     if (retained != NULL) {
208       if (head == undefined) {
209         // First element in the list.
210         head = retained;
211       } else {
212         // Subsequent elements in the list.
213         DCHECK(tail != NULL);
214         WeakListVisitor<T>::SetWeakNext(tail, retained);
215         if (record_slots) {
216           Object** next_slot =
217               HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
218           collector->RecordSlot(next_slot, next_slot, retained);
219         }
220       }
221       // Retained object is new tail.
222       DCHECK(!retained->IsUndefined());
223       candidate = reinterpret_cast<T*>(retained);
224       tail = candidate;
225
226       // tail is a live object, visit it.
227       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
228
229     } else {
230       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
231     }
232
233     // Move to next element in the list.
234     list = WeakListVisitor<T>::WeakNext(candidate);
235   }
236
237   // Terminate the list if there is one or more elements.
238   if (tail != NULL) {
239     WeakListVisitor<T>::SetWeakNext(tail, undefined);
240   }
241   return head;
242 }
243
244
245 Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
246                                          WeakObjectRetainer* retainer) {
247   Object* undefined = heap->undefined_value();
248   Object* previous = undefined;
249   Object* head = undefined;
250   Object* next;
251   MarkCompactCollector* collector = heap->mark_compact_collector();
252   bool record_slots = MustRecordSlots(heap);
253
254   for (Object* o = list; o != undefined;) {
255     JSArrayBufferView* view = JSArrayBufferView::cast(o);
256     next = view->weak_next();
257     if (!heap->InNewSpace(view)) {
258       if (previous != undefined) {
259         // We are in the middle of the list, skip the old space element.
260         JSArrayBufferView* previous_view = JSArrayBufferView::cast(previous);
261         previous_view->set_weak_next(next);
262         if (record_slots) {
263           Object** next_slot = HeapObject::RawField(
264               previous_view, JSArrayBufferView::kWeakNextOffset);
265           collector->RecordSlot(next_slot, next_slot, next);
266         }
267       }
268       JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer());
269       view->set_weak_next(buffer->weak_first_view());
270       if (record_slots) {
271         Object** next_slot =
272             HeapObject::RawField(view, JSArrayBufferView::kWeakNextOffset);
273         collector->RecordSlot(next_slot, next_slot, buffer->weak_first_view());
274       }
275       buffer->set_weak_first_view(view);
276       if (record_slots) {
277         Object** slot =
278             HeapObject::RawField(buffer, JSArrayBuffer::kWeakFirstViewOffset);
279         heap->mark_compact_collector()->RecordSlot(slot, slot, view);
280       }
281     } else {
282       // We found a valid new space view, remember it.
283       previous = view;
284       if (head == undefined) {
285         // We are at the list head.
286         head = view;
287       }
288     }
289     o = next;
290   }
291   return head;
292 }
293
294
295 template <class T>
296 static void ClearWeakList(Heap* heap, Object* list) {
297   Object* undefined = heap->undefined_value();
298   while (list != undefined) {
299     T* candidate = reinterpret_cast<T*>(list);
300     list = WeakListVisitor<T>::WeakNext(candidate);
301     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
302   }
303 }
304
305
306 template <>
307 struct WeakListVisitor<JSFunction> {
308   static void SetWeakNext(JSFunction* function, Object* next) {
309     function->set_next_function_link(next);
310   }
311
312   static Object* WeakNext(JSFunction* function) {
313     return function->next_function_link();
314   }
315
316   static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
317
318   static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
319
320   static void VisitPhantomObject(Heap*, JSFunction*) {}
321 };
322
323
324 template <>
325 struct WeakListVisitor<Code> {
326   static void SetWeakNext(Code* code, Object* next) {
327     code->set_next_code_link(next);
328   }
329
330   static Object* WeakNext(Code* code) { return code->next_code_link(); }
331
332   static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
333
334   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
335
336   static void VisitPhantomObject(Heap*, Code*) {}
337 };
338
339
340 template <>
341 struct WeakListVisitor<Context> {
342   static void SetWeakNext(Context* context, Object* next) {
343     context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
344   }
345
346   static Object* WeakNext(Context* context) {
347     return context->get(Context::NEXT_CONTEXT_LINK);
348   }
349
350   static int WeakNextOffset() {
351     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
352   }
353
354   static void VisitLiveObject(Heap* heap, Context* context,
355                               WeakObjectRetainer* retainer) {
356     // Process the three weak lists linked off the context.
357     DoWeakList<JSFunction>(heap, context, retainer,
358                            Context::OPTIMIZED_FUNCTIONS_LIST);
359
360     // Code objects are always allocated in Code space, we do not have to visit
361     // them during scavenges.
362     if (heap->gc_state() == Heap::MARK_COMPACT) {
363       DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
364       DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
365     }
366   }
367
368   template <class T>
369   static void DoWeakList(Heap* heap, Context* context,
370                          WeakObjectRetainer* retainer, int index) {
371     // Visit the weak list, removing dead intermediate elements.
372     Object* list_head =
373         VisitWeakList<T>(heap, context->get(index), retainer, false);
374
375     // Update the list head.
376     context->set(index, list_head, UPDATE_WRITE_BARRIER);
377
378     if (MustRecordSlots(heap)) {
379       // Record the updated slot if necessary.
380       Object** head_slot =
381           HeapObject::RawField(context, FixedArray::SizeFor(index));
382       heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
383                                                  list_head);
384     }
385   }
386
387   static void VisitPhantomObject(Heap* heap, Context* context) {
388     ClearWeakList<JSFunction>(heap,
389                               context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
390     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
391     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
392   }
393 };
394
395
396 template <>
397 struct WeakListVisitor<JSArrayBufferView> {
398   static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
399     obj->set_weak_next(next);
400   }
401
402   static Object* WeakNext(JSArrayBufferView* obj) { return obj->weak_next(); }
403
404   static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; }
405
406   static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
407
408   static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
409 };
410
411
412 template <>
413 struct WeakListVisitor<JSArrayBuffer> {
414   static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
415     obj->set_weak_next(next);
416   }
417
418   static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
419
420   static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
421
422   static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
423                               WeakObjectRetainer* retainer) {
424     Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
425         heap, array_buffer->weak_first_view(), retainer, false);
426     array_buffer->set_weak_first_view(typed_array_obj);
427     if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
428       Object** slot = HeapObject::RawField(array_buffer,
429                                            JSArrayBuffer::kWeakFirstViewOffset);
430       heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
431     }
432   }
433
434   static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
435     Runtime::FreeArrayBuffer(heap->isolate(), phantom);
436   }
437 };
438
439
440 template <>
441 struct WeakListVisitor<AllocationSite> {
442   static void SetWeakNext(AllocationSite* obj, Object* next) {
443     obj->set_weak_next(next);
444   }
445
446   static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
447
448   static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
449
450   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
451
452   static void VisitPhantomObject(Heap*, AllocationSite*) {}
453 };
454
455
456 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
457                                         WeakObjectRetainer* retainer,
458                                         bool stop_after_young);
459
460
461 template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
462                                               WeakObjectRetainer* retainer,
463                                               bool stop_after_young);
464
465 template Object* VisitWeakList<JSArrayBufferView>(Heap* heap, Object* list,
466                                                   WeakObjectRetainer* retainer,
467                                                   bool stop_after_young);
468
469 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
470                                                WeakObjectRetainer* retainer,
471                                                bool stop_after_young);
472 }
473 }  // namespace v8::internal