dst->address() + BytecodeArray::kConstantPoolOffset;
RecordMigratedSlot(Memory::Object_at(constant_pool_slot),
constant_pool_slot);
+ } else if (src->IsJSArrayBuffer()) {
+ heap()->MoveBlock(dst->address(), src->address(), size);
+
+ // Visit inherited JSObject properties and byte length of ArrayBuffer
+ Address regular_slot =
+ dst->address() + JSArrayBuffer::BodyDescriptor::kStartOffset;
+ Address regular_slots_end =
+ dst->address() + JSArrayBuffer::kByteLengthOffset + kPointerSize;
+ while (regular_slot < regular_slots_end) {
+ RecordMigratedSlot(Memory::Object_at(regular_slot), regular_slot);
+ regular_slot += kPointerSize;
+ }
+
+ // Skip backing store and visit just internal fields
+ Address internal_field_slot = dst->address() + JSArrayBuffer::kSize;
+ Address internal_fields_end =
+ dst->address() + JSArrayBuffer::kSizeWithInternalFields;
+ while (internal_field_slot < internal_fields_end) {
+ RecordMigratedSlot(Memory::Object_at(internal_field_slot),
+ internal_field_slot);
+ internal_field_slot += kPointerSize;
+ }
} else if (FLAG_unbox_double_fields) {
Address dst_addr = dst->address();
Address src_addr = src->address();
} else if (object->IsBytecodeArray()) {
return static_cast<int>(slot - object->address()) ==
BytecodeArray::kConstantPoolOffset;
+ } else if (object->IsJSArrayBuffer()) {
+ int off = static_cast<int>(slot - object->address());
+ return (off >= JSArrayBuffer::BodyDescriptor::kStartOffset &&
+ off <= JSArrayBuffer::kByteLengthOffset) ||
+ (off >= JSArrayBuffer::kSize &&
+ off < JSArrayBuffer::kSizeWithInternalFields);
} else if (FLAG_unbox_double_fields) {
// Filter out slots that happen to point to unboxed double fields.
LayoutDescriptorHelper helper(object->map());
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
- VisitPointers(
- heap, object,
- HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
+ JSArrayBuffer::JSArrayBufferIterateBody<
+ StaticNewSpaceVisitor<StaticVisitor> >(heap, object);
if (!JSArrayBuffer::cast(object)->is_external()) {
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
}
Map* map, HeapObject* object) {
Heap* heap = map->GetHeap();
- StaticVisitor::VisitPointers(
- heap, object,
- HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
- HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
+ JSArrayBuffer::JSArrayBufferIterateBody<StaticVisitor>(heap, object);
if (!JSArrayBuffer::cast(object)->is_external() &&
!heap->InNewSpace(object)) {
heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
case JS_VALUE_TYPE:
case JS_DATE_TYPE:
case JS_ARRAY_TYPE:
- case JS_ARRAY_BUFFER_TYPE:
case JS_TYPED_ARRAY_TYPE:
case JS_DATA_VIEW_TYPE:
case JS_SET_TYPE:
case JS_MESSAGE_OBJECT_TYPE:
JSObject::BodyDescriptor::IterateBody(this, object_size, v);
break;
+ case JS_ARRAY_BUFFER_TYPE:
+ JSArrayBuffer::JSArrayBufferIterateBody(this, v);
+ break;
case JS_FUNCTION_TYPE:
reinterpret_cast<JSFunction*>(this)
->JSFunctionIterateBody(object_size, v);
obj_address + BytecodeArray::kConstantPoolOffset,
obj_address + BytecodeArray::kHeaderSize,
slot_callback);
+ } else if (heap_object->IsJSArrayBuffer()) {
+ FindPointersToNewSpaceInRegion(
+ obj_address +
+ JSArrayBuffer::BodyDescriptor::kStartOffset,
+ obj_address + JSArrayBuffer::kByteLengthOffset +
+ kPointerSize,
+ slot_callback);
+ FindPointersToNewSpaceInRegion(
+ obj_address + JSArrayBuffer::kSize,
+ obj_address + JSArrayBuffer::kSizeWithInternalFields,
+ slot_callback);
} else if (FLAG_unbox_double_fields) {
LayoutDescriptorHelper helper(heap_object->map());
DCHECK(!helper.all_fields_tagged());
} else if (type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
return HeapObjectContents::kMixedValues;
+ } else if (type == JS_ARRAY_BUFFER_TYPE) {
+ return HeapObjectContents::kMixedValues;
} else if (type <= LAST_DATA_TYPE) {
// TODO(jochen): Why do we claim that Code and Map contain only raw values?
return HeapObjectContents::kRawValues;
}
+// static
+template <typename StaticVisitor>
+void JSArrayBuffer::JSArrayBufferIterateBody(Heap* heap, HeapObject* obj) {
+ StaticVisitor::VisitPointers(
+ heap, obj,
+ HeapObject::RawField(obj, JSArrayBuffer::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(obj,
+ JSArrayBuffer::kByteLengthOffset + kPointerSize));
+ StaticVisitor::VisitPointers(
+ heap, obj, HeapObject::RawField(obj, JSArrayBuffer::kSize),
+ HeapObject::RawField(obj, JSArrayBuffer::kSizeWithInternalFields));
+}
+
+
+void JSArrayBuffer::JSArrayBufferIterateBody(HeapObject* obj,
+ ObjectVisitor* v) {
+ v->VisitPointers(
+ HeapObject::RawField(obj, JSArrayBuffer::BodyDescriptor::kStartOffset),
+ HeapObject::RawField(obj,
+ JSArrayBuffer::kByteLengthOffset + kPointerSize));
+ v->VisitPointers(
+ HeapObject::RawField(obj, JSArrayBuffer::kSize),
+ HeapObject::RawField(obj, JSArrayBuffer::kSizeWithInternalFields));
+}
+
+
Object* JSArrayBufferView::byte_offset() const {
if (WasNeutered()) return Smi::FromInt(0);
return Object::cast(READ_FIELD(this, kByteOffsetOffset));
DECLARE_PRINTER(JSArrayBuffer)
DECLARE_VERIFIER(JSArrayBuffer)
- static const int kBackingStoreOffset = JSObject::kHeaderSize;
- static const int kByteLengthOffset = kBackingStoreOffset + kPointerSize;
- static const int kBitFieldSlot = kByteLengthOffset + kPointerSize;
+ static const int kByteLengthOffset = JSObject::kHeaderSize;
+
+ // NOTE: GC will visit objects fields:
+ // 1. From JSObject::BodyDescriptor::kStartOffset to kByteLengthOffset +
+ // kPointerSize
+ // 2. From start of the internal fields and up to the end of them
+ static const int kBackingStoreOffset = kByteLengthOffset + kPointerSize;
+ static const int kBitFieldSlot = kBackingStoreOffset + kPointerSize;
#if V8_TARGET_LITTLE_ENDIAN || !V8_HOST_ARCH_64_BIT
static const int kBitFieldOffset = kBitFieldSlot;
#else
static const int kSizeWithInternalFields =
kSize + v8::ArrayBuffer::kInternalFieldCount * kPointerSize;
+ template <typename StaticVisitor>
+ static inline void JSArrayBufferIterateBody(Heap* heap, HeapObject* obj);
+
+ static inline void JSArrayBufferIterateBody(HeapObject* obj,
+ ObjectVisitor* v);
+
class IsExternal : public BitField<bool, 1, 1> {};
class IsNeuterable : public BitField<bool, 2, 1> {};
class WasNeutered : public BitField<bool, 3, 1> {};
}
+THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
+ LocalContext env;
+ v8::Isolate* isolate = env->GetIsolate();
+ v8::HandleScope handle_scope(isolate);
+
+ // Make sure the pointer looks like a heap object
+ uint8_t* store_ptr = reinterpret_cast<uint8_t*>(i::kHeapObjectTag);
+
+ // Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
+ Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
+
+ // Should not crash
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
+ CcTest::heap()->CollectAllGarbage();
+ CcTest::heap()->CollectAllGarbage();
+
+ // Should not move the pointer
+ CHECK_EQ(ab->GetContents().Data(), store_ptr);
+}
+
+
THREADED_TEST(SharedUint8Array) {
i::FLAG_harmony_sharedarraybuffer = true;
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::FixedUint8Array,