Upstream version 8.37.186.0
[platform/framework/web/crosswalk.git] / src / v8 / src / objects-visiting.cc
1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/ic-inl.h"
8 #include "src/objects-visiting.h"
9
10 namespace v8 {
11 namespace internal {
12
13
14 static inline bool IsShortcutCandidate(int type) {
15   return ((type & kShortcutTypeMask) == kShortcutTypeTag);
16 }
17
18
19 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
20     int instance_type,
21     int instance_size) {
22   if (instance_type < FIRST_NONSTRING_TYPE) {
23     switch (instance_type & kStringRepresentationMask) {
24       case kSeqStringTag:
25         if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
26           return kVisitSeqOneByteString;
27         } else {
28           return kVisitSeqTwoByteString;
29         }
30
31       case kConsStringTag:
32         if (IsShortcutCandidate(instance_type)) {
33           return kVisitShortcutCandidate;
34         } else {
35           return kVisitConsString;
36         }
37
38       case kSlicedStringTag:
39         return kVisitSlicedString;
40
41       case kExternalStringTag:
42         return GetVisitorIdForSize(kVisitDataObject,
43                                    kVisitDataObjectGeneric,
44                                    instance_size);
45     }
46     UNREACHABLE();
47   }
48
49   switch (instance_type) {
50     case BYTE_ARRAY_TYPE:
51       return kVisitByteArray;
52
53     case FREE_SPACE_TYPE:
54       return kVisitFreeSpace;
55
56     case FIXED_ARRAY_TYPE:
57       return kVisitFixedArray;
58
59     case FIXED_DOUBLE_ARRAY_TYPE:
60       return kVisitFixedDoubleArray;
61
62     case CONSTANT_POOL_ARRAY_TYPE:
63       return kVisitConstantPoolArray;
64
65     case ODDBALL_TYPE:
66       return kVisitOddball;
67
68     case MAP_TYPE:
69       return kVisitMap;
70
71     case CODE_TYPE:
72       return kVisitCode;
73
74     case CELL_TYPE:
75       return kVisitCell;
76
77     case PROPERTY_CELL_TYPE:
78       return kVisitPropertyCell;
79
80     case JS_SET_TYPE:
81       return GetVisitorIdForSize(kVisitStruct,
82                                  kVisitStructGeneric,
83                                  JSSet::kSize);
84
85     case JS_MAP_TYPE:
86       return GetVisitorIdForSize(kVisitStruct,
87                                  kVisitStructGeneric,
88                                  JSMap::kSize);
89
90     case JS_WEAK_MAP_TYPE:
91     case JS_WEAK_SET_TYPE:
92       return kVisitJSWeakCollection;
93
94     case JS_REGEXP_TYPE:
95       return kVisitJSRegExp;
96
97     case SHARED_FUNCTION_INFO_TYPE:
98       return kVisitSharedFunctionInfo;
99
100     case JS_PROXY_TYPE:
101       return GetVisitorIdForSize(kVisitStruct,
102                                  kVisitStructGeneric,
103                                  JSProxy::kSize);
104
105     case JS_FUNCTION_PROXY_TYPE:
106       return GetVisitorIdForSize(kVisitStruct,
107                                  kVisitStructGeneric,
108                                  JSFunctionProxy::kSize);
109
110     case FOREIGN_TYPE:
111       return GetVisitorIdForSize(kVisitDataObject,
112                                  kVisitDataObjectGeneric,
113                                  Foreign::kSize);
114
115     case SYMBOL_TYPE:
116       return kVisitSymbol;
117
118     case FILLER_TYPE:
119       return kVisitDataObjectGeneric;
120
121     case JS_ARRAY_BUFFER_TYPE:
122       return kVisitJSArrayBuffer;
123
124     case JS_TYPED_ARRAY_TYPE:
125       return kVisitJSTypedArray;
126
127     case JS_DATA_VIEW_TYPE:
128       return kVisitJSDataView;
129
130     case JS_OBJECT_TYPE:
131     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
132     case JS_GENERATOR_OBJECT_TYPE:
133     case JS_MODULE_TYPE:
134     case JS_VALUE_TYPE:
135     case JS_DATE_TYPE:
136     case JS_ARRAY_TYPE:
137     case JS_GLOBAL_PROXY_TYPE:
138     case JS_GLOBAL_OBJECT_TYPE:
139     case JS_BUILTINS_OBJECT_TYPE:
140     case JS_MESSAGE_OBJECT_TYPE:
141     case JS_SET_ITERATOR_TYPE:
142     case JS_MAP_ITERATOR_TYPE:
143     case FLOAT32x4_TYPE:
144     case FLOAT64x2_TYPE:
145     case INT32x4_TYPE:
146       return GetVisitorIdForSize(kVisitJSObject,
147                                  kVisitJSObjectGeneric,
148                                  instance_size);
149
150     case JS_FUNCTION_TYPE:
151       return kVisitJSFunction;
152
153     case HEAP_NUMBER_TYPE:
154 #define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size)                     \
155     case EXTERNAL_##TYPE##_ARRAY_TYPE:
156
157     TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
158       return GetVisitorIdForSize(kVisitDataObject,
159                                  kVisitDataObjectGeneric,
160                                  instance_size);
161 #undef EXTERNAL_ARRAY_CASE
162
163     case FIXED_UINT8_ARRAY_TYPE:
164     case FIXED_INT8_ARRAY_TYPE:
165     case FIXED_UINT16_ARRAY_TYPE:
166     case FIXED_INT16_ARRAY_TYPE:
167     case FIXED_UINT32_ARRAY_TYPE:
168     case FIXED_INT32_ARRAY_TYPE:
169     case FIXED_FLOAT32_ARRAY_TYPE:
170     case FIXED_INT32x4_ARRAY_TYPE:
171     case FIXED_FLOAT32x4_ARRAY_TYPE:
172     case FIXED_FLOAT64x2_ARRAY_TYPE:
173     case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
174       return kVisitFixedTypedArray;
175
176     case FIXED_FLOAT64_ARRAY_TYPE:
177       return kVisitFixedFloat64Array;
178
179 #define MAKE_STRUCT_CASE(NAME, Name, name) \
180         case NAME##_TYPE:
181       STRUCT_LIST(MAKE_STRUCT_CASE)
182 #undef MAKE_STRUCT_CASE
183           if (instance_type == ALLOCATION_SITE_TYPE) {
184             return kVisitAllocationSite;
185           }
186
187           return GetVisitorIdForSize(kVisitStruct,
188                                      kVisitStructGeneric,
189                                      instance_size);
190
191     default:
192       UNREACHABLE();
193       return kVisitorIdCount;
194   }
195 }
196
197
198 // We don't record weak slots during marking or scavenges. Instead we do it
199 // once when we complete mark-compact cycle.  Note that write barrier has no
200 // effect if we are already in the middle of compacting mark-sweep cycle and we
201 // have to record slots manually.
202 static bool MustRecordSlots(Heap* heap) {
203   return heap->gc_state() == Heap::MARK_COMPACT &&
204       heap->mark_compact_collector()->is_compacting();
205 }
206
207
208 template <class T>
209 struct WeakListVisitor;
210
211
212 template <class T>
213 Object* VisitWeakList(Heap* heap,
214                       Object* list,
215                       WeakObjectRetainer* retainer) {
216   Object* undefined = heap->undefined_value();
217   Object* head = undefined;
218   T* tail = NULL;
219   MarkCompactCollector* collector = heap->mark_compact_collector();
220   bool record_slots = MustRecordSlots(heap);
221   while (list != undefined) {
222     // Check whether to keep the candidate in the list.
223     T* candidate = reinterpret_cast<T*>(list);
224     Object* retained = retainer->RetainAs(list);
225     if (retained != NULL) {
226       if (head == undefined) {
227         // First element in the list.
228         head = retained;
229       } else {
230         // Subsequent elements in the list.
231         ASSERT(tail != NULL);
232         WeakListVisitor<T>::SetWeakNext(tail, retained);
233         if (record_slots) {
234           Object** next_slot =
235             HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
236           collector->RecordSlot(next_slot, next_slot, retained);
237         }
238       }
239       // Retained object is new tail.
240       ASSERT(!retained->IsUndefined());
241       candidate = reinterpret_cast<T*>(retained);
242       tail = candidate;
243
244
245       // tail is a live object, visit it.
246       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
247     } else {
248       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
249     }
250
251     // Move to next element in the list.
252     list = WeakListVisitor<T>::WeakNext(candidate);
253   }
254
255   // Terminate the list if there is one or more elements.
256   if (tail != NULL) {
257     WeakListVisitor<T>::SetWeakNext(tail, undefined);
258   }
259   return head;
260 }
261
262
263 template <class T>
264 static void ClearWeakList(Heap* heap,
265                           Object* list) {
266   Object* undefined = heap->undefined_value();
267   while (list != undefined) {
268     T* candidate = reinterpret_cast<T*>(list);
269     list = WeakListVisitor<T>::WeakNext(candidate);
270     WeakListVisitor<T>::SetWeakNext(candidate, undefined);
271   }
272 }
273
274
275 template<>
276 struct WeakListVisitor<JSFunction> {
277   static void SetWeakNext(JSFunction* function, Object* next) {
278     function->set_next_function_link(next);
279   }
280
281   static Object* WeakNext(JSFunction* function) {
282     return function->next_function_link();
283   }
284
285   static int WeakNextOffset() {
286     return JSFunction::kNextFunctionLinkOffset;
287   }
288
289   static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
290
291   static void VisitPhantomObject(Heap*, JSFunction*) {}
292 };
293
294
295 template<>
296 struct WeakListVisitor<Code> {
297   static void SetWeakNext(Code* code, Object* next) {
298     code->set_next_code_link(next);
299   }
300
301   static Object* WeakNext(Code* code) {
302     return code->next_code_link();
303   }
304
305   static int WeakNextOffset() {
306     return Code::kNextCodeLinkOffset;
307   }
308
309   static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
310
311   static void VisitPhantomObject(Heap*, Code*) {}
312 };
313
314
315 template<>
316 struct WeakListVisitor<Context> {
317   static void SetWeakNext(Context* context, Object* next) {
318     context->set(Context::NEXT_CONTEXT_LINK,
319                  next,
320                  UPDATE_WRITE_BARRIER);
321   }
322
323   static Object* WeakNext(Context* context) {
324     return context->get(Context::NEXT_CONTEXT_LINK);
325   }
326
327   static int WeakNextOffset() {
328     return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
329   }
330
331   static void VisitLiveObject(Heap* heap,
332                               Context* context,
333                               WeakObjectRetainer* retainer) {
334     // Process the three weak lists linked off the context.
335     DoWeakList<JSFunction>(heap, context, retainer,
336         Context::OPTIMIZED_FUNCTIONS_LIST);
337     DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
338     DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
339   }
340
341   template<class T>
342   static void DoWeakList(Heap* heap,
343                          Context* context,
344                          WeakObjectRetainer* retainer,
345                          int index) {
346     // Visit the weak list, removing dead intermediate elements.
347     Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
348
349     // Update the list head.
350     context->set(index, list_head, UPDATE_WRITE_BARRIER);
351
352     if (MustRecordSlots(heap)) {
353       // Record the updated slot if necessary.
354       Object** head_slot = HeapObject::RawField(
355           context, FixedArray::SizeFor(index));
356       heap->mark_compact_collector()->RecordSlot(
357           head_slot, head_slot, list_head);
358     }
359   }
360
361   static void VisitPhantomObject(Heap* heap, Context* context) {
362     ClearWeakList<JSFunction>(heap,
363         context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
364     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
365     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
366   }
367 };
368
369
370 template<>
371 struct WeakListVisitor<JSArrayBufferView> {
372   static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
373     obj->set_weak_next(next);
374   }
375
376   static Object* WeakNext(JSArrayBufferView* obj) {
377     return obj->weak_next();
378   }
379
380   static int WeakNextOffset() {
381     return JSArrayBufferView::kWeakNextOffset;
382   }
383
384   static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
385
386   static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
387 };
388
389
390 template<>
391 struct WeakListVisitor<JSArrayBuffer> {
392   static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
393     obj->set_weak_next(next);
394   }
395
396   static Object* WeakNext(JSArrayBuffer* obj) {
397     return obj->weak_next();
398   }
399
400   static int WeakNextOffset() {
401     return JSArrayBuffer::kWeakNextOffset;
402   }
403
404   static void VisitLiveObject(Heap* heap,
405                               JSArrayBuffer* array_buffer,
406                               WeakObjectRetainer* retainer) {
407     Object* typed_array_obj =
408         VisitWeakList<JSArrayBufferView>(
409             heap,
410             array_buffer->weak_first_view(),
411             retainer);
412     array_buffer->set_weak_first_view(typed_array_obj);
413     if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
414       Object** slot = HeapObject::RawField(
415           array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
416       heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
417     }
418   }
419
420   static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
421     Runtime::FreeArrayBuffer(heap->isolate(), phantom);
422   }
423 };
424
425
426 template<>
427 struct WeakListVisitor<AllocationSite> {
428   static void SetWeakNext(AllocationSite* obj, Object* next) {
429     obj->set_weak_next(next);
430   }
431
432   static Object* WeakNext(AllocationSite* obj) {
433     return obj->weak_next();
434   }
435
436   static int WeakNextOffset() {
437     return AllocationSite::kWeakNextOffset;
438   }
439
440   static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
441
442   static void VisitPhantomObject(Heap*, AllocationSite*) {}
443 };
444
445
446 template Object* VisitWeakList<Code>(
447     Heap* heap, Object* list, WeakObjectRetainer* retainer);
448
449
450 template Object* VisitWeakList<JSFunction>(
451     Heap* heap, Object* list, WeakObjectRetainer* retainer);
452
453
454 template Object* VisitWeakList<Context>(
455     Heap* heap, Object* list, WeakObjectRetainer* retainer);
456
457
458 template Object* VisitWeakList<JSArrayBuffer>(
459     Heap* heap, Object* list, WeakObjectRetainer* retainer);
460
461
462 template Object* VisitWeakList<AllocationSite>(
463     Heap* heap, Object* list, WeakObjectRetainer* retainer);
464
465 } }  // namespace v8::internal