deps: backport 8d6a228 from the v8's upstream
authorFedor Indutny <fedor@indutny.com>
Tue, 27 Oct 2015 16:54:42 +0000 (12:54 -0400)
committerJames M Snell <jasnell@gmail.com>
Thu, 29 Oct 2015 15:38:43 +0000 (08:38 -0700)
Original commit message:

    [heap] fix crash during the scavenge of ArrayBuffer
    Scavenger should not attempt to visit ArrayBuffer's storage, it is a
    user-supplied pointer that may have any alignment. Visiting it, may
    result in a crash.

    BUG=
    R=jochen

    Review URL: https://codereview.chromium.org/1406133003

    Cr-Commit-Position: refs/heads/master@{#31611}

PR-URL: https://github.com/nodejs/node/pull/3549
Reviewed-By: Trevor Norris <trev.norris@gmail.com>
deps/v8/src/heap/heap.cc
deps/v8/src/heap/heap.h
deps/v8/test/cctest/test-api.cc

index 5bcc909..6bc200a 100644 (file)
@@ -2079,40 +2079,8 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
         // for pointers to from semispace instead of looking for pointers
         // to new space.
         DCHECK(!target->IsMap());
-        Address obj_address = target->address();
-
-        // We are not collecting slots on new space objects during mutation
-        // thus we have to scan for pointers to evacuation candidates when we
-        // promote objects. But we should not record any slots in non-black
-        // objects. Grey object's slots would be rescanned.
-        // White object might not survive until the end of collection
-        // it would be a violation of the invariant to record it's slots.
-        bool record_slots = false;
-        if (incremental_marking()->IsCompacting()) {
-          MarkBit mark_bit = Marking::MarkBitFrom(target);
-          record_slots = Marking::IsBlack(mark_bit);
-        }
-#if V8_DOUBLE_FIELDS_UNBOXING
-        LayoutDescriptorHelper helper(target->map());
-        bool has_only_tagged_fields = helper.all_fields_tagged();
-
-        if (!has_only_tagged_fields) {
-          for (int offset = 0; offset < size;) {
-            int end_of_region_offset;
-            if (helper.IsTagged(offset, size, &end_of_region_offset)) {
-              IterateAndMarkPointersToFromSpace(
-                  record_slots, obj_address + offset,
-                  obj_address + end_of_region_offset, &ScavengeObject);
-            }
-            offset = end_of_region_offset;
-          }
-        } else {
-#endif
-          IterateAndMarkPointersToFromSpace(
-              record_slots, obj_address, obj_address + size, &ScavengeObject);
-#if V8_DOUBLE_FIELDS_UNBOXING
-        }
-#endif
+
+        IteratePointersToFromSpace(target, size, &ScavengeObject);
       }
     }
 
@@ -5263,6 +5231,67 @@ void Heap::IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
 }
 
 
+void Heap::IteratePointersToFromSpace(HeapObject* target, int size,
+                                      ObjectSlotCallback callback) {
+  Address obj_address = target->address();
+
+  // We are not collecting slots on new space objects during mutation
+  // thus we have to scan for pointers to evacuation candidates when we
+  // promote objects. But we should not record any slots in non-black
+  // objects. Grey object's slots would be rescanned.
+  // White object might not survive until the end of collection
+  // it would be a violation of the invariant to record it's slots.
+  bool record_slots = false;
+  if (incremental_marking()->IsCompacting()) {
+    MarkBit mark_bit = Marking::MarkBitFrom(target);
+    record_slots = Marking::IsBlack(mark_bit);
+  }
+
+  // Do not scavenge JSArrayBuffer's contents
+  switch (target->ContentType()) {
+    case HeapObjectContents::kTaggedValues: {
+      IterateAndMarkPointersToFromSpace(record_slots, obj_address,
+          obj_address + size, callback);
+      break;
+    }
+    case HeapObjectContents::kMixedValues: {
+      if (target->IsFixedTypedArrayBase()) {
+        IterateAndMarkPointersToFromSpace(
+            record_slots, obj_address + FixedTypedArrayBase::kBasePointerOffset,
+            obj_address + FixedTypedArrayBase::kHeaderSize, callback);
+      } else if (target->IsJSArrayBuffer()) {
+        IterateAndMarkPointersToFromSpace(
+            record_slots, obj_address,
+            obj_address + JSArrayBuffer::kByteLengthOffset + kPointerSize,
+            callback);
+        IterateAndMarkPointersToFromSpace(
+            record_slots, obj_address + JSArrayBuffer::kSize,
+            obj_address + size, callback);
+#if V8_DOUBLE_FIELDS_UNBOXING
+      } else if (FLAG_unbox_double_fields) {
+        LayoutDescriptorHelper helper(target->map());
+        DCHECK(!helper.all_fields_tagged());
+
+        for (int offset = 0; offset < size;) {
+          int end_of_region_offset;
+          if (helper.IsTagged(offset, size, &end_of_region_offset)) {
+            IterateAndMarkPointersToFromSpace(
+                record_slots, obj_address + offset,
+                obj_address + end_of_region_offset, callback);
+          }
+          offset = end_of_region_offset;
+        }
+#endif
+      }
+      break;
+    }
+    case HeapObjectContents::kRawValues: {
+      break;
+    }
+  }
+}
+
+
 void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
   IterateStrongRoots(v, mode);
   IterateWeakRoots(v, mode);
index 0f0cfc1..0afac31 100644 (file)
@@ -953,6 +953,9 @@ class Heap {
 
   // Iterate pointers to from semispace of new space found in memory interval
   // from start to end.
+  void IteratePointersToFromSpace(HeapObject* target, int size,
+                                  ObjectSlotCallback callback);
+
   void IterateAndMarkPointersToFromSpace(bool record_slots, Address start,
                                          Address end,
                                          ObjectSlotCallback callback);
index 88d4aef..058241d 100644 (file)
@@ -14191,6 +14191,32 @@ THREADED_TEST(SkipArrayBufferBackingStoreDuringGC) {
 }
 
 
+THREADED_TEST(SkipArrayBufferDuringScavenge) {
+  LocalContext env;
+  v8::Isolate* isolate = env->GetIsolate();
+  v8::HandleScope handle_scope(isolate);
+
+  // Make sure the pointer looks like a heap object
+  Local<v8::Object> tmp = v8::Object::New(isolate);
+  uint8_t* store_ptr =
+      reinterpret_cast<uint8_t*>(*reinterpret_cast<uintptr_t*>(*tmp));
+
+  // Make `store_ptr` point to from space
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+
+  // Create ArrayBuffer with pointer-that-cannot-be-visited in the backing store
+  Local<v8::ArrayBuffer> ab = v8::ArrayBuffer::New(isolate, store_ptr, 8);
+
+  // Should not crash,
+  // i.e. backing store pointer should not be treated as a heap object pointer
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
+  CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
+
+  // Use `ab` to silence compiler warning
+  CHECK_EQ(ab->GetContents().Data(), store_ptr);
+}
+
+
 THREADED_TEST(SharedUint8Array) {
   i::FLAG_harmony_sharedarraybuffer = true;
   TypedArrayTestHelper<uint8_t, v8::Uint8Array, i::ExternalUint8Array,