// for pointers to from semispace instead of looking for pointers
// to new space.
DCHECK(!target->IsMap());
- Address obj_address = target->address();
-
- // We are not collecting slots on new space objects during mutation
- // thus we have to scan for pointers to evacuation candidates when we
- // promote objects. But we should not record any slots in non-black
- // objects. Grey object's slots would be rescanned.
- // White object might not survive until the end of collection
- // it would be a violation of the invariant to record it's slots.
- bool record_slots = false;
- if (incremental_marking()->IsCompacting()) {
- MarkBit mark_bit = Marking::MarkBitFrom(target);
- record_slots = Marking::IsBlack(mark_bit);
- }
-#if V8_DOUBLE_FIELDS_UNBOXING
- LayoutDescriptorHelper helper(target->map());
- bool has_only_tagged_fields = helper.all_fields_tagged();
-
- if (!has_only_tagged_fields) {
- for (int offset = 0; offset < size;) {
- int end_of_region_offset;
- if (helper.IsTagged(offset, size, &end_of_region_offset)) {
- IterateAndMarkPointersToFromSpace(
- record_slots, obj_address + offset,
- obj_address + end_of_region_offset, &ScavengeObject);
- }
- offset = end_of_region_offset;
- }
- } else {
-#endif
- IterateAndMarkPointersToFromSpace(
- record_slots, obj_address, obj_address + size, &ScavengeObject);
-#if V8_DOUBLE_FIELDS_UNBOXING
- }
-#endif
+
+ IteratePointersToFromSpace(target, size, &ScavengeObject);
}
}
}
+void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
+ ObjectSlotCallback callback) {
+ Address obj_address = target->address();
+
+ // We are not collecting slots on new space objects during mutation
+ // thus we have to scan for pointers to evacuation candidates when we
+ // promote objects. But we should not record any slots in non-black
+ // objects. Grey object's slots would be rescanned.
+ // White object might not survive until the end of collection
+ // it would be a violation of the invariant to record it's slots.
+ bool record_slots = false;
+ if (incremental_marking()->IsCompacting()) {
+ MarkBit mark_bit = Marking::MarkBitFrom(target);
+ record_slots = Marking::IsBlack(mark_bit);
+ }
+
+ // Do not scavenge JSArrayBuffer's contents
+ switch (target->ContentType()) {
+ case HeapObjectContents::kTaggedValues: {
+ IterateAndMarkPointersToFromSpace(record_slots, obj_address,
+ obj_address + size, callback);
+ break;
+ }
+ case HeapObjectContents::kMixedValues: {
+ if (target->IsFixedTypedArrayBase()) {
+ IterateAndMarkPointersToFromSpace(
+ record_slots, obj_address + FixedTypedArrayBase::kBasePointerOffset,
+ obj_address + FixedTypedArrayBase::kHeaderSize, callback);
+ } else if (target->IsJSArrayBuffer()) {
+ IterateAndMarkPointersToFromSpace(
+ record_slots, obj_address,
+ obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
+ callback);
+ IterateAndMarkPointersToFromSpace(
+ record_slots, obj_address + JSArrayBuffer::kSize,
+ obj_address + size, callback);
+#if V8_DOUBLE_FIELDS_UNBOXING
+ } else if (FLAG_unbox_double_fields) {
+ LayoutDescriptorHelper helper(target->map());
+ DCHECK(!helper.all_fields_tagged());
+
+ for (int offset = 0; offset < size;) {
+ int end_of_region_offset;
+ if (helper.IsTagged(offset, size, &end_of_region_offset)) {
+ IterateAndMarkPointersToFromSpace(
+ record_slots, obj_address + offset,
+ obj_address + end_of_region_offset, callback);
+ }
+ offset = end_of_region_offset;
+ }
+#endif
+ }
+ break;
+ }
+ case HeapObjectContents::kRawValues: {
+ break;
+ }
+ }
+}
+
+
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode);
}
+THREADED_TEST(SkipArrayBufferDuringScavenge) {
+ LocalContext env;
+ v8::Isolate* isolate = env->GetIsolate();
+ v8::HandleScope handle_scope(isolate);
+
+ // Make sure the pointer looks like a heap object
+ Local<v8::Object> tmp = v8::Object::New(isolate);
+ uint8_t* store_ptr =
+ reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
+
+ // Make `store_ptr` point to from space
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+
+ // Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
+ Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
+
+ // Should not crash,
+ // i.e. backing store pointer should not be treated as a heap object pointer
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
+
+ // Use `ab` to silence compiler warning
+ CHECK_EQ(ab->GetContents().Data(), store_ptr);
+}
+
+
THREADED_TEST(SharedUint8Array) {
i::FLAG_harmony_sharedarraybuffer = true;
TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::ExternalUint8Array,