Just visit young array buffers during scavenge. Additionally keep the views in new...
authorhpayer <hpayer@chromium.org>
Wed, 11 Mar 2015 10:57:53 +0000 (03:57 -0700)
committerCommit bot <commit-bot@chromium.org>
Wed, 11 Mar 2015 10:58:03 +0000 (10:58 +0000)
BUG=

Review URL: https://codereview.chromium.org/990423004

Cr-Commit-Position: refs/heads/master@{#27128}

src/factory.cc
src/heap/heap.cc
src/heap/heap.h
src/heap/objects-visiting.cc
src/heap/objects-visiting.h
src/objects.cc
src/runtime/runtime-typedarray.cc
src/serialize.cc
test/cctest/test-weaktypedarrays.cc

index f36cf21eff54cebd43545925e140064f746c44df..aa213fd99f6dd10412fb5845af42a10bf6ba9272 100644 (file)
@@ -1790,8 +1790,14 @@ void SetupArrayBufferView(i::Isolate* isolate,
 
   obj->set_buffer(*buffer);
 
-  obj->set_weak_next(buffer->weak_first_view());
-  buffer->set_weak_first_view(*obj);
+  Heap* heap = isolate->heap();
+  if (heap->InNewSpace(*obj)) {
+    obj->set_weak_next(heap->new_array_buffer_views_list());
+    heap->set_new_array_buffer_views_list(*obj);
+  } else {
+    obj->set_weak_next(buffer->weak_first_view());
+    buffer->set_weak_first_view(*obj);
+  }
 
   i::Handle<i::Object> byte_offset_object =
       isolate->factory()->NewNumberFromSize(byte_offset);
index 010b3726dfc01d949c0e47b9c0edd0f8dce4d3f0..e0dd8b63b2dd0ea3f073aff50f896110d7fcae9c 100644 (file)
@@ -143,7 +143,9 @@ Heap::Heap()
       chunks_queued_for_free_(NULL),
       gc_callbacks_depth_(0),
       deserialization_complete_(false),
-      concurrent_sweeping_enabled_(false) {
+      concurrent_sweeping_enabled_(false),
+      migration_failure_(false),
+      previous_migration_failure_(false) {
 // Allow build-time customization of the max semispace size. Building
 // V8 with snapshots and a non-default max semispace size is much
 // easier if you can define it as part of the build environment.
@@ -737,6 +739,13 @@ void Heap::GarbageCollectionEpilogue() {
   // Remember the last top pointer so that we can later find out
   // whether we allocated in new space since the last GC.
   new_space_top_after_last_gc_ = new_space()->top();
+
+  if (migration_failure_) {
+    set_previous_migration_failure(true);
+  } else {
+    set_previous_migration_failure(false);
+  }
+  set_migration_failure(false);
 }
 
 
@@ -1738,29 +1747,63 @@ void Heap::UpdateReferencesInExternalStringTable(
 
 
 void Heap::ProcessAllWeakReferences(WeakObjectRetainer* retainer) {
-  ProcessArrayBuffers(retainer);
+  ProcessArrayBuffers(retainer, false);
+  ProcessNewArrayBufferViews(retainer);
   ProcessNativeContexts(retainer);
   ProcessAllocationSites(retainer);
 }
 
 
 void Heap::ProcessYoungWeakReferences(WeakObjectRetainer* retainer) {
-  ProcessArrayBuffers(retainer);
+  ProcessArrayBuffers(retainer, true);
+  ProcessNewArrayBufferViews(retainer);
   ProcessNativeContexts(retainer);
 }
 
 
 void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
-  Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
+  Object* head =
+      VisitWeakList<Context>(this, native_contexts_list(), retainer, false);
   // Update the head of the list of contexts.
   set_native_contexts_list(head);
 }
 
 
-void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) {
-  Object* array_buffer_obj =
-      VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer);
+void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
+                               bool stop_after_young) {
+  Object* array_buffer_obj = VisitWeakList<JSArrayBuffer>(
+      this, array_buffers_list(), retainer, stop_after_young);
   set_array_buffers_list(array_buffer_obj);
+
+#ifdef DEBUG
+  // Verify invariant that young array buffers come before old array buffers
+  // in array buffers list if there was no promotion failure.
+  Object* undefined = undefined_value();
+  Object* next = array_buffers_list();
+  bool old_objects_recorded = false;
+  if (migration_failure()) return;
+  while (next != undefined) {
+    if (!old_objects_recorded) {
+      old_objects_recorded = !InNewSpace(next);
+    }
+    DCHECK((InNewSpace(next) && !old_objects_recorded) || !InNewSpace(next));
+    next = JSArrayBuffer::cast(next)->weak_next();
+  }
+#endif
+}
+
+
+void Heap::ProcessNewArrayBufferViews(WeakObjectRetainer* retainer) {
+  // Retain the list of new space views.
+  Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
+      this, new_array_buffer_views_list_, retainer, false);
+  set_new_array_buffer_views_list(typed_array_obj);
+
+  // Some objects in the list may be in old space now. Find them
+  // and move them to the corresponding array buffer.
+  Object* view = VisitNewArrayBufferViewsWeakList(
+      this, new_array_buffer_views_list_, retainer);
+  set_new_array_buffer_views_list(view);
 }
 
 
@@ -1776,8 +1819,8 @@ void Heap::TearDownArrayBuffers() {
 
 
 void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
-  Object* allocation_site_obj =
-      VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
+  Object* allocation_site_obj = VisitWeakList<AllocationSite>(
+      this, allocation_sites_list(), retainer, false);
   set_allocation_sites_list(allocation_site_obj);
 }
 
@@ -2189,6 +2232,7 @@ class ScavengingVisitor : public StaticVisitorBase {
       if (SemiSpaceCopyObject<alignment>(map, slot, object, object_size)) {
         return;
       }
+      heap->set_migration_failure(true);
     }
 
     if (PromoteObject<object_contents, alignment>(map, slot, object,
@@ -5542,6 +5586,7 @@ bool Heap::CreateHeapObjects() {
 
   set_native_contexts_list(undefined_value());
   set_array_buffers_list(undefined_value());
+  set_new_array_buffer_views_list(undefined_value());
   set_allocation_sites_list(undefined_value());
   return true;
 }
index 3720c82fff9aa7d1a617e56a1e8989b18ea3fb6b..b6d792cc05b49bb9a586db87615dc07c6d3db156 100644 (file)
@@ -870,6 +870,13 @@ class Heap {
   void set_array_buffers_list(Object* object) { array_buffers_list_ = object; }
   Object* array_buffers_list() const { return array_buffers_list_; }
 
+  void set_new_array_buffer_views_list(Object* object) {
+    new_array_buffer_views_list_ = object;
+  }
+  Object* new_array_buffer_views_list() const {
+    return new_array_buffer_views_list_;
+  }
+
   void set_allocation_sites_list(Object* object) {
     allocation_sites_list_ = object;
   }
@@ -1469,6 +1476,18 @@ class Heap {
 
   bool deserialization_complete() const { return deserialization_complete_; }
 
+  bool migration_failure() const { return migration_failure_; }
+  void set_migration_failure(bool migration_failure) {
+    migration_failure_ = migration_failure;
+  }
+
+  bool previous_migration_failure() const {
+    return previous_migration_failure_;
+  }
+  void set_previous_migration_failure(bool previous_migration_failure) {
+    previous_migration_failure_ = previous_migration_failure;
+  }
+
  protected:
   // Methods made available to tests.
 
@@ -1636,11 +1655,16 @@ class Heap {
   bool inline_allocation_disabled_;
 
   // Weak list heads, threaded through the objects.
-  // List heads are initilized lazily and contain the undefined_value at start.
+  // List heads are initialized lazily and contain the undefined_value at start.
   Object* native_contexts_list_;
   Object* array_buffers_list_;
   Object* allocation_sites_list_;
 
+  // This is a global list of array buffer views in new space. When the views
+  // get promoted, they are removed form the list and added to the corresponding
+  // array buffer.
+  Object* new_array_buffer_views_list_;
+
   // List of encountered weak collections (JSWeakMap and JSWeakSet) during
   // marking. It is initialized during marking, destroyed after marking and
   // contains Smi(0) while marking is not active.
@@ -1973,7 +1997,8 @@ class Heap {
   void MarkCompactEpilogue();
 
   void ProcessNativeContexts(WeakObjectRetainer* retainer);
-  void ProcessArrayBuffers(WeakObjectRetainer* retainer);
+  void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool stop_after_young);
+  void ProcessNewArrayBufferViews(WeakObjectRetainer* retainer);
   void ProcessAllocationSites(WeakObjectRetainer* retainer);
 
   // Deopts all code that contains allocation instruction which are tenured or
@@ -2135,6 +2160,13 @@ class Heap {
 
   bool concurrent_sweeping_enabled_;
 
+  // A migration failure indicates that a semi-space copy of an object during
+  // a scavenge failed and the object got promoted instead.
+  bool migration_failure_;
+
+  // A migration failure happened in the previous scavenge.
+  bool previous_migration_failure_;
+
   friend class AlwaysAllocateScope;
   friend class Deserializer;
   friend class Factory;
index 7b2e2d9a388939440e0aac0bb5c3263fedc4676f..9d6d99ccae22bdaca59d02d04838498c6c4c482c 100644 (file)
@@ -191,15 +191,19 @@ struct WeakListVisitor;
 
 
 template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
+                      bool stop_after_young) {
   Object* undefined = heap->undefined_value();
   Object* head = undefined;
   T* tail = NULL;
   MarkCompactCollector* collector = heap->mark_compact_collector();
   bool record_slots = MustRecordSlots(heap);
+
   while (list != undefined) {
     // Check whether to keep the candidate in the list.
     T* candidate = reinterpret_cast<T*>(list);
+    T* original_candidate = candidate;
+
     Object* retained = retainer->RetainAs(list);
     if (retained != NULL) {
       if (head == undefined) {
@@ -220,9 +224,21 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
       candidate = reinterpret_cast<T*>(retained);
       tail = candidate;
 
-
       // tail is a live object, visit it.
       WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
+
+      // The list of weak objects is usually order. It starts with objects
+      // recently allocated in the young generation followed by objects
+      // allocated in the old generation. When a migration failure happened,
+      // the list is not ordered until the next GC that has no migration
+      // failure.
+      // For young generation collections we just have to visit until the last
+      // young generation objects.
+      if (stop_after_young && !heap->migration_failure() &&
+          !heap->previous_migration_failure() &&
+          !heap->InNewSpace(original_candidate)) {
+        return head;
+      }
     } else {
       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
     }
@@ -239,6 +255,56 @@ Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
 }
 
 
+Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
+                                         WeakObjectRetainer* retainer) {
+  Object* undefined = heap->undefined_value();
+  Object* previous = undefined;
+  Object* head = undefined;
+  Object* next;
+  MarkCompactCollector* collector = heap->mark_compact_collector();
+  bool record_slots = MustRecordSlots(heap);
+
+  for (Object* o = list; o != undefined;) {
+    JSArrayBufferView* view = JSArrayBufferView::cast(o);
+    next = view->weak_next();
+    if (!heap->InNewSpace(view)) {
+      if (previous != undefined) {
+        // We are in the middle of the list, skip the old space element.
+        JSArrayBufferView* previous_view = JSArrayBufferView::cast(previous);
+        previous_view->set_weak_next(next);
+        if (record_slots) {
+          Object** next_slot = HeapObject::RawField(
+              previous_view, JSArrayBufferView::kWeakNextOffset);
+          collector->RecordSlot(next_slot, next_slot, next);
+        }
+      }
+      JSArrayBuffer* buffer = JSArrayBuffer::cast(view->buffer());
+      view->set_weak_next(buffer->weak_first_view());
+      if (record_slots) {
+        Object** next_slot =
+            HeapObject::RawField(view, JSArrayBufferView::kWeakNextOffset);
+        collector->RecordSlot(next_slot, next_slot, buffer->weak_first_view());
+      }
+      buffer->set_weak_first_view(view);
+      if (record_slots) {
+        Object** slot =
+            HeapObject::RawField(buffer, JSArrayBuffer::kWeakFirstViewOffset);
+        heap->mark_compact_collector()->RecordSlot(slot, slot, view);
+      }
+    } else {
+      // We found a valid new space view, remember it.
+      previous = view;
+      if (head == undefined) {
+        // We are at the list head.
+        head = view;
+      }
+    }
+    o = next;
+  }
+  return head;
+}
+
+
 template <class T>
 static void ClearWeakList(Heap* heap, Object* list) {
   Object* undefined = heap->undefined_value();
@@ -316,7 +382,8 @@ struct WeakListVisitor<Context> {
   static void DoWeakList(Heap* heap, Context* context,
                          WeakObjectRetainer* retainer, int index) {
     // Visit the weak list, removing dead intermediate elements.
-    Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
+    Object* list_head =
+        VisitWeakList<T>(heap, context->get(index), retainer, false);
 
     // Update the list head.
     context->set(index, list_head, UPDATE_WRITE_BARRIER);
@@ -368,7 +435,7 @@ struct WeakListVisitor<JSArrayBuffer> {
   static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
                               WeakObjectRetainer* retainer) {
     Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
-        heap, array_buffer->weak_first_view(), retainer);
+        heap, array_buffer->weak_first_view(), retainer, false);
     array_buffer->set_weak_first_view(typed_array_obj);
     if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
       Object** slot = HeapObject::RawField(array_buffer,
@@ -399,23 +466,21 @@ struct WeakListVisitor<AllocationSite> {
 };
 
 
-template Object* VisitWeakList<Code>(Heap* heap, Object* list,
-                                     WeakObjectRetainer* retainer);
-
-
-template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
-                                           WeakObjectRetainer* retainer);
-
-
 template Object* VisitWeakList<Context>(Heap* heap, Object* list,
-                                        WeakObjectRetainer* retainer);
+                                        WeakObjectRetainer* retainer,
+                                        bool stop_after_young);
 
 
 template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
-                                              WeakObjectRetainer* retainer);
+                                              WeakObjectRetainer* retainer,
+                                              bool stop_after_young);
 
+template Object* VisitWeakList<JSArrayBufferView>(Heap* heap, Object* list,
+                                                  WeakObjectRetainer* retainer,
+                                                  bool stop_after_young);
 
 template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
-                                               WeakObjectRetainer* retainer);
+                                               WeakObjectRetainer* retainer,
+                                               bool stop_after_young);
 }
 }  // namespace v8::internal
index a442867569606c1d4471a3ee2720a1fc89cf5b13..30005ec622ace5c0fff41e1d80d67bd0a165c8d4 100644 (file)
@@ -489,7 +489,10 @@ class WeakObjectRetainer;
 // pointers. The template parameter T is a WeakListVisitor that defines how to
 // access the next-element pointers.
 template <class T>
-Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer,
+                      bool stop_after_young);
+Object* VisitNewArrayBufferViewsWeakList(Heap* heap, Object* list,
+                                         WeakObjectRetainer* retainer);
 }
 }  // namespace v8::internal
 
index 8b4662d1899fc7634c22376eccccbcad0419fc90..ae8409e13f2bac649e97a83596bf854735ce324c 100644 (file)
@@ -17008,8 +17008,15 @@ Handle<JSArrayBuffer> JSTypedArray::MaterializeArrayBuffer(
           fixed_typed_array->length(), typed_array->type(),
           static_cast<uint8_t*>(buffer->backing_store()));
 
-  buffer->set_weak_first_view(*typed_array);
-  DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
+  Heap* heap = isolate->heap();
+  if (heap->InNewSpace(*typed_array)) {
+    DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
+    typed_array->set_weak_next(heap->new_array_buffer_views_list());
+    heap->set_new_array_buffer_views_list(*typed_array);
+  } else {
+    buffer->set_weak_first_view(*typed_array);
+    DCHECK(typed_array->weak_next() == isolate->heap()->undefined_value());
+  }
   typed_array->set_buffer(*buffer);
   JSObject::SetMapAndElements(typed_array, new_map, new_elements);
 
index 82224bc9b3e8021d6b780e7af2343f75b9bb65c0..59c417f5b7c92ef548e2370703f94109dc670a66 100644 (file)
@@ -88,6 +88,8 @@ bool Runtime::SetupArrayBufferAllocatingData(Isolate* isolate,
 
 void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
   Isolate* isolate = array_buffer->GetIsolate();
+  // Firstly, iterate over the views which are referenced directly by the array
+  // buffer.
   for (Handle<Object> view_obj(array_buffer->weak_first_view(), isolate);
        !view_obj->IsUndefined();) {
     Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
@@ -100,6 +102,24 @@ void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
     }
     view_obj = handle(view->weak_next(), isolate);
   }
+
+  // Secondly, iterate over the global list of new space views to find views
+  // that belong to the neutered array buffer.
+  Heap* heap = isolate->heap();
+  for (Handle<Object> view_obj(heap->new_array_buffer_views_list(), isolate);
+       !view_obj->IsUndefined();) {
+    Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
+    if (view->buffer() == *array_buffer) {
+      if (view->IsJSTypedArray()) {
+        JSTypedArray::cast(*view)->Neuter();
+      } else if (view->IsJSDataView()) {
+        JSDataView::cast(*view)->Neuter();
+      } else {
+        UNREACHABLE();
+      }
+    }
+    view_obj = handle(view->weak_next(), isolate);
+  }
   array_buffer->Neuter();
 }
 
@@ -265,11 +285,18 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitialize) {
   holder->set_byte_offset(*byte_offset_object);
   holder->set_byte_length(*byte_length_object);
 
+  Heap* heap = isolate->heap();
   if (!maybe_buffer->IsNull()) {
     Handle<JSArrayBuffer> buffer = Handle<JSArrayBuffer>::cast(maybe_buffer);
     holder->set_buffer(*buffer);
-    holder->set_weak_next(buffer->weak_first_view());
-    buffer->set_weak_first_view(*holder);
+
+    if (heap->InNewSpace(*holder)) {
+      holder->set_weak_next(heap->new_array_buffer_views_list());
+      heap->set_new_array_buffer_views_list(*holder);
+    } else {
+      holder->set_weak_next(buffer->weak_first_view());
+      buffer->set_weak_first_view(*holder);
+    }
 
     Handle<ExternalArray> elements = isolate->factory()->NewExternalArray(
         static_cast<int>(length), array_type,
@@ -367,8 +394,15 @@ RUNTIME_FUNCTION(Runtime_TypedArrayInitializeFromArrayLike) {
       isolate->factory()->NewNumberFromSize(byte_length));
   holder->set_byte_length(*byte_length_obj);
   holder->set_length(*length_obj);
-  holder->set_weak_next(buffer->weak_first_view());
-  buffer->set_weak_first_view(*holder);
+
+  Heap* heap = isolate->heap();
+  if (heap->InNewSpace(*holder)) {
+    holder->set_weak_next(heap->new_array_buffer_views_list());
+    heap->set_new_array_buffer_views_list(*holder);
+  } else {
+    holder->set_weak_next(buffer->weak_first_view());
+    buffer->set_weak_first_view(*holder);
+  }
 
   Handle<ExternalArray> elements = isolate->factory()->NewExternalArray(
       static_cast<int>(length), array_type,
@@ -542,8 +576,14 @@ RUNTIME_FUNCTION(Runtime_DataViewInitialize) {
   holder->set_byte_offset(*byte_offset);
   holder->set_byte_length(*byte_length);
 
-  holder->set_weak_next(buffer->weak_first_view());
-  buffer->set_weak_first_view(*holder);
+  Heap* heap = isolate->heap();
+  if (heap->InNewSpace(*holder)) {
+    holder->set_weak_next(heap->new_array_buffer_views_list());
+    heap->set_new_array_buffer_views_list(*holder);
+  } else {
+    holder->set_weak_next(buffer->weak_first_view());
+    buffer->set_weak_first_view(*holder);
+  }
 
   return isolate->heap()->undefined_value();
 }
index 9146ca7e204b804ccd5be5af30589ba2204a2109..bab0b2598af47e30c3f5e1b76d25ce87965a7ba4 100644 (file)
@@ -572,6 +572,8 @@ void Deserializer::Deserialize(Isolate* isolate) {
       isolate_->heap()->undefined_value());
   isolate_->heap()->set_array_buffers_list(
       isolate_->heap()->undefined_value());
+  isolate->heap()->set_new_array_buffer_views_list(
+      isolate_->heap()->undefined_value());
 
   // The allocation site list is build during root iteration, but if no sites
   // were encountered then it needs to be initialized to undefined.
index d40b7e95a91be1ee3ad328fe51f841102ed1d819..c1f59de45aa3c669da0c8e1e1ff2578234c77312 100644 (file)
@@ -62,7 +62,20 @@ static bool HasArrayBufferInWeakList(Heap* heap, JSArrayBuffer* ab) {
 }
 
 
-static int CountViews(JSArrayBuffer* array_buffer) {
+static int CountViewsInNewSpaceList(Heap* heap, JSArrayBuffer* array_buffer) {
+  int count = 0;
+  for (Object* o = heap->new_array_buffer_views_list(); !o->IsUndefined();) {
+    JSArrayBufferView* view = JSArrayBufferView::cast(o);
+    if (array_buffer == view->buffer()) {
+      count++;
+    }
+    o = view->weak_next();
+  }
+  return count;
+}
+
+
+static int CountViews(Heap* heap, JSArrayBuffer* array_buffer) {
   int count = 0;
   for (Object* o = array_buffer->weak_first_view();
        !o->IsUndefined();
@@ -70,17 +83,27 @@ static int CountViews(JSArrayBuffer* array_buffer) {
     count++;
   }
 
-  return count;
+  return count + CountViewsInNewSpaceList(heap, array_buffer);
 }
 
-static bool HasViewInWeakList(JSArrayBuffer* array_buffer,
+
+static bool HasViewInNewSpaceList(Heap* heap, JSArrayBufferView* ta) {
+  for (Object* o = heap->new_array_buffer_views_list(); !o->IsUndefined();
+       o = JSArrayBufferView::cast(o)->weak_next()) {
+    if (ta == o) return true;
+  }
+  return false;
+}
+
+
+static bool HasViewInWeakList(Heap* heap, JSArrayBuffer* array_buffer,
                               JSArrayBufferView* ta) {
   for (Object* o = array_buffer->weak_first_view();
        !o->IsUndefined();
        o = JSArrayBufferView::cast(o)->weak_next()) {
     if (ta == o) return true;
   }
-  return false;
+  return HasViewInNewSpaceList(heap, ta);
 }
 
 
@@ -200,18 +223,18 @@ void TestViewFromApi() {
 
       Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
       Handle<JSArrayBufferView> ita2 = v8::Utils::OpenHandle(*ta2);
-      CHECK_EQ(2, CountViews(*iab));
-      CHECK(HasViewInWeakList(*iab, *ita1));
-      CHECK(HasViewInWeakList(*iab, *ita2));
+      CHECK_EQ(2, CountViews(isolate->heap(), *iab));
+      CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
+      CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita2));
     }
     isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
-    CHECK_EQ(1, CountViews(*iab));
+    CHECK_EQ(1, CountViews(isolate->heap(), *iab));
     Handle<JSArrayBufferView> ita1 = v8::Utils::OpenHandle(*ta1);
-    CHECK(HasViewInWeakList(*iab, *ita1));
+    CHECK(HasViewInWeakList(isolate->heap(), *iab, *ita1));
   }
   isolate->heap()->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
 
-  CHECK_EQ(0, CountViews(*iab));
+  CHECK_EQ(0, CountViews(isolate->heap(), *iab));
 }
 
 
@@ -299,10 +322,13 @@ static void TestTypedArrayFromScript(const char* constructor) {
           v8::Handle<TypedArray>::Cast(CompileRun("ta3"));
       CHECK_EQ(1, CountArrayBuffersInWeakList(isolate->heap()) - start);
       Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
-      CHECK_EQ(3, CountViews(*iab));
-      CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta1)));
-      CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta2)));
-      CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta3)));
+      CHECK_EQ(3, CountViews(isolate->heap(), *iab));
+      CHECK(HasViewInWeakList(isolate->heap(), *iab,
+                              *v8::Utils::OpenHandle(*ta1)));
+      CHECK(HasViewInWeakList(isolate->heap(), *iab,
+                              *v8::Utils::OpenHandle(*ta2)));
+      CHECK(HasViewInWeakList(isolate->heap(), *iab,
+                              *v8::Utils::OpenHandle(*ta3)));
     }
 
     i::SNPrintF(source, "ta%d = null;", i);
@@ -316,13 +342,14 @@ static void TestTypedArrayFromScript(const char* constructor) {
       v8::Handle<v8::ArrayBuffer> ab =
           v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab"));
       Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
-      CHECK_EQ(2, CountViews(*iab));
+      CHECK_EQ(2, CountViews(isolate->heap(), *iab));
       for (int j = 1; j <= 3; j++) {
         if (j == i) continue;
         i::SNPrintF(source, "ta%d", j);
         v8::Handle<TypedArray> ta =
             v8::Handle<TypedArray>::Cast(CompileRun(source.start()));
-        CHECK(HasViewInWeakList(*iab, *v8::Utils::OpenHandle(*ta)));
+        CHECK(HasViewInWeakList(isolate->heap(), *iab,
+                                *v8::Utils::OpenHandle(*ta)));
       }
     }
 
@@ -336,7 +363,7 @@ static void TestTypedArrayFromScript(const char* constructor) {
       v8::Handle<v8::ArrayBuffer> ab =
           v8::Handle<v8::ArrayBuffer>::Cast(CompileRun("ab"));
       Handle<JSArrayBuffer> iab = v8::Utils::OpenHandle(*ab);
-      CHECK_EQ(0, CountViews(*iab));
+      CHECK_EQ(0, CountViews(isolate->heap(), *iab));
     }
   }
 }