1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/heap/objects-visiting.h"
13 StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
14 int instance_type, int instance_size, bool has_unboxed_fields) {
15 if (instance_type < FIRST_NONSTRING_TYPE) {
16 switch (instance_type & kStringRepresentationMask) {
18 if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
19 return kVisitSeqOneByteString;
21 return kVisitSeqTwoByteString;
25 if (IsShortcutCandidate(instance_type)) {
26 return kVisitShortcutCandidate;
28 return kVisitConsString;
31 case kSlicedStringTag:
32 return kVisitSlicedString;
34 case kExternalStringTag:
35 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
36 instance_size, has_unboxed_fields);
41 switch (instance_type) {
43 return kVisitByteArray;
46 return kVisitFreeSpace;
48 case FIXED_ARRAY_TYPE:
49 return kVisitFixedArray;
51 case FIXED_DOUBLE_ARRAY_TYPE:
52 return kVisitFixedDoubleArray;
54 case CONSTANT_POOL_ARRAY_TYPE:
55 return kVisitConstantPoolArray;
69 case PROPERTY_CELL_TYPE:
70 return kVisitPropertyCell;
73 return kVisitWeakCell;
76 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
77 JSSet::kSize, has_unboxed_fields);
80 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
81 JSMap::kSize, has_unboxed_fields);
83 case JS_WEAK_MAP_TYPE:
84 case JS_WEAK_SET_TYPE:
85 return kVisitJSWeakCollection;
88 return kVisitJSRegExp;
90 case SHARED_FUNCTION_INFO_TYPE:
91 return kVisitSharedFunctionInfo;
94 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
95 JSProxy::kSize, has_unboxed_fields);
97 case JS_FUNCTION_PROXY_TYPE:
98 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
99 JSFunctionProxy::kSize, has_unboxed_fields);
102 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
103 Foreign::kSize, has_unboxed_fields);
109 return kVisitDataObjectGeneric;
111 case JS_ARRAY_BUFFER_TYPE:
112 return kVisitJSArrayBuffer;
114 case JS_TYPED_ARRAY_TYPE:
115 return kVisitJSTypedArray;
117 case JS_DATA_VIEW_TYPE:
118 return kVisitJSDataView;
121 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
122 case JS_GENERATOR_OBJECT_TYPE:
127 case JS_GLOBAL_PROXY_TYPE:
128 case JS_GLOBAL_OBJECT_TYPE:
129 case JS_BUILTINS_OBJECT_TYPE:
130 case JS_MESSAGE_OBJECT_TYPE:
131 case JS_SET_ITERATOR_TYPE:
132 case JS_MAP_ITERATOR_TYPE:
133 return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
134 instance_size, has_unboxed_fields);
136 case JS_FUNCTION_TYPE:
137 return kVisitJSFunction;
139 case HEAP_NUMBER_TYPE:
140 case MUTABLE_HEAP_NUMBER_TYPE:
141 #define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
142 case EXTERNAL_##TYPE##_ARRAY_TYPE:
144 TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
145 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
146 instance_size, has_unboxed_fields);
147 #undef EXTERNAL_ARRAY_CASE
149 case FIXED_UINT8_ARRAY_TYPE:
150 case FIXED_INT8_ARRAY_TYPE:
151 case FIXED_UINT16_ARRAY_TYPE:
152 case FIXED_INT16_ARRAY_TYPE:
153 case FIXED_UINT32_ARRAY_TYPE:
154 case FIXED_INT32_ARRAY_TYPE:
155 case FIXED_FLOAT32_ARRAY_TYPE:
156 case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
157 return kVisitFixedTypedArray;
159 case FIXED_FLOAT64_ARRAY_TYPE:
160 return kVisitFixedFloat64Array;
162 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
163 STRUCT_LIST(MAKE_STRUCT_CASE)
164 #undef MAKE_STRUCT_CASE
165 if (instance_type == ALLOCATION_SITE_TYPE) {
166 return kVisitAllocationSite;
169 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
170 instance_size, has_unboxed_fields);
174 return kVisitorIdCount;
179 // We don't record weak slots during marking or scavenges. Instead we do it
180 // once when we complete mark-compact cycle. Note that write barrier has no
181 // effect if we are already in the middle of compacting mark-sweep cycle and we
182 // have to record slots manually.
183 static bool MustRecordSlots(Heap* heap) {
184 return heap->gc_state() == Heap::MARK_COMPACT &&
185 heap->mark_compact_collector()->is_compacting();
190 struct WeakListVisitor;
194 Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
195 bool stop_after_young) {
196 Object* undefined = heap->undefined_value();
197 Object* head = undefined;
199 MarkCompactCollector* collector = heap->mark_compact_collector();
200 bool record_slots = MustRecordSlots(heap);
202 while (list != undefined) {
203 // Check whether to keep the candidate in the list.
204 T* candidate = reinterpret_cast<T*>(list);
206 Object* retained = retainer->RetainAs(list);
207 if (retained != NULL) {
208 if (head == undefined) {
209 // First element in the list.
212 // Subsequent elements in the list.
213 DCHECK(tail != NULL);
214 WeakListVisitor<T>::SetWeakNext(tail, retained);
217 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
218 collector->RecordSlot(next_slot, next_slot, retained);
221 // Retained object is new tail.
222 DCHECK(!retained->IsUndefined());
223 candidate = reinterpret_cast<T*>(retained);
226 // tail is a live object, visit it.
227 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
230 WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
233 // Move to next element in the list.
234 list = WeakListVisitor<T>::WeakNext(candidate);
237 // Terminate the list if there is one or more elements.
239 WeakListVisitor<T>::SetWeakNext(tail, undefined);
245 Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
246 WeakObjectRetainer* retainer) {
247 Object* undefined = heap->undefined_value();
248 Object* previous = undefined;
249 Object* head = undefined;
251 MarkCompactCollector* collector = heap->mark_compact_collector();
252 bool record_slots = MustRecordSlots(heap);
254 for (Object* o = list; o != undefined;) {
255 JSArrayBufferView* view = JSArrayBufferView::cast(o);
256 next = view->weak_next();
257 if (!heap->InNewSpace(view)) {
258 if (previous != undefined) {
259 // We are in the middle of the list, skip the old space element.
260 JSArrayBufferView* previous_view = JSArrayBufferView::cast(previous);
261 previous_view->set_weak_next(next);
263 Object** next_slot = HeapObject::RawField(
264 previous_view, JSArrayBufferView::kWeakNextOffset);
265 collector->RecordSlot(next_slot, next_slot, next);
268 JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer());
269 view->set_weak_next(buffer->weak_first_view());
272 HeapObject::RawField(view, JSArrayBufferView::kWeakNextOffset);
273 collector->RecordSlot(next_slot, next_slot, buffer->weak_first_view());
275 buffer->set_weak_first_view(view);
278 HeapObject::RawField(buffer, JSArrayBuffer::kWeakFirstViewOffset);
279 heap->mark_compact_collector()->RecordSlot(slot, slot, view);
282 // We found a valid new space view, remember it.
284 if (head == undefined) {
285 // We are at the list head.
296 static void ClearWeakList(Heap* heap, Object* list) {
297 Object* undefined = heap->undefined_value();
298 while (list != undefined) {
299 T* candidate = reinterpret_cast<T*>(list);
300 list = WeakListVisitor<T>::WeakNext(candidate);
301 WeakListVisitor<T>::SetWeakNext(candidate, undefined);
307 struct WeakListVisitor<JSFunction> {
308 static void SetWeakNext(JSFunction* function, Object* next) {
309 function->set_next_function_link(next);
312 static Object* WeakNext(JSFunction* function) {
313 return function->next_function_link();
316 static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
318 static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
320 static void VisitPhantomObject(Heap*, JSFunction*) {}
325 struct WeakListVisitor<Code> {
326 static void SetWeakNext(Code* code, Object* next) {
327 code->set_next_code_link(next);
330 static Object* WeakNext(Code* code) { return code->next_code_link(); }
332 static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
334 static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
336 static void VisitPhantomObject(Heap*, Code*) {}
341 struct WeakListVisitor<Context> {
342 static void SetWeakNext(Context* context, Object* next) {
343 context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
346 static Object* WeakNext(Context* context) {
347 return context->get(Context::NEXT_CONTEXT_LINK);
350 static int WeakNextOffset() {
351 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
354 static void VisitLiveObject(Heap* heap, Context* context,
355 WeakObjectRetainer* retainer) {
356 // Process the three weak lists linked off the context.
357 DoWeakList<JSFunction>(heap, context, retainer,
358 Context::OPTIMIZED_FUNCTIONS_LIST);
360 // Code objects are always allocated in Code space, we do not have to visit
361 // them during scavenges.
362 if (heap->gc_state() == Heap::MARK_COMPACT) {
363 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
364 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
369 static void DoWeakList(Heap* heap, Context* context,
370 WeakObjectRetainer* retainer, int index) {
371 // Visit the weak list, removing dead intermediate elements.
373 VisitWeakList<T>(heap, context->get(index), retainer, false);
375 // Update the list head.
376 context->set(index, list_head, UPDATE_WRITE_BARRIER);
378 if (MustRecordSlots(heap)) {
379 // Record the updated slot if necessary.
381 HeapObject::RawField(context, FixedArray::SizeFor(index));
382 heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
387 static void VisitPhantomObject(Heap* heap, Context* context) {
388 ClearWeakList<JSFunction>(heap,
389 context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
390 ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
391 ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
397 struct WeakListVisitor<JSArrayBufferView> {
398 static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
399 obj->set_weak_next(next);
402 static Object* WeakNext(JSArrayBufferView* obj) { return obj->weak_next(); }
404 static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; }
406 static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
408 static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
413 struct WeakListVisitor<JSArrayBuffer> {
414 static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
415 obj->set_weak_next(next);
418 static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
420 static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
422 static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
423 WeakObjectRetainer* retainer) {
424 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
425 heap, array_buffer->weak_first_view(), retainer, false);
426 array_buffer->set_weak_first_view(typed_array_obj);
427 if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
428 Object** slot = HeapObject::RawField(array_buffer,
429 JSArrayBuffer::kWeakFirstViewOffset);
430 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
434 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
435 Runtime::FreeArrayBuffer(heap->isolate(), phantom);
441 struct WeakListVisitor<AllocationSite> {
442 static void SetWeakNext(AllocationSite* obj, Object* next) {
443 obj->set_weak_next(next);
446 static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
448 static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
450 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
452 static void VisitPhantomObject(Heap*, AllocationSite*) {}
456 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
457 WeakObjectRetainer* retainer,
458 bool stop_after_young);
461 template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
462 WeakObjectRetainer* retainer,
463 bool stop_after_young);
465 template Object* VisitWeakList<JSArrayBufferView>(Heap* heap, Object* list,
466 WeakObjectRetainer* retainer,
467 bool stop_after_young);
469 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
470 WeakObjectRetainer* retainer,
471 bool stop_after_young);
473 } // namespace v8::internal