Introduce per-isolate assert scopes and API to guard JS execution.
authoryangguo@chromium.org <yangguo@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 19 Mar 2014 11:31:43 +0000 (11:31 +0000)
committeryangguo@chromium.org <yangguo@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Wed, 19 Mar 2014 11:31:43 +0000 (11:31 +0000)
R=jochen@chromium.org

Review URL: https://codereview.chromium.org/198253004

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20062 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

17 files changed:
include/v8.h
src/api.cc
src/assert-scope.cc [new file with mode: 0644]
src/assert-scope.h
src/builtins.cc
src/execution.cc
src/heap-inl.h
src/heap.cc
src/heap.h
src/isolate.cc
src/isolate.h
src/mark-compact.cc
test/cctest/cctest.status
test/cctest/test-api.cc
test/cctest/test-heap.cc
test/cctest/test-strings.cc
tools/gyp/v8.gyp

index 214bf9f..942ef4d 100644 (file)
@@ -4084,6 +4084,37 @@ class V8_EXPORT Isolate {
     Scope& operator=(const Scope&);
   };
 
+
+  /**
+   * Assert that no Javascript code is invoked.
+   */
+  class DisallowJavascriptExecutionScope {
+   public:
+    explicit DisallowJavascriptExecutionScope(Isolate* isolate);
+    ~DisallowJavascriptExecutionScope();
+
+   private:
+    void* internal_;
+
+    // Prevent copying of Scope objects.
+    DisallowJavascriptExecutionScope(const DisallowJavascriptExecutionScope&);
+    DisallowJavascriptExecutionScope& operator=(
+        const DisallowJavascriptExecutionScope&);
+  };
+
+
+  /**
+   * Introduce exception to DisallowJavascriptExecutionScope.
+   */
+  class AllowJavascriptExecutionScope {
+   public:
+    explicit AllowJavascriptExecutionScope(Isolate* isolate);
+    ~AllowJavascriptExecutionScope();
+
+   private:
+    void* internal_;
+  };
+
   /**
    * Types of garbage collections that can be requested via
    * RequestGarbageCollectionForTesting.
index 5dd392f..381ca00 100644 (file)
@@ -6514,6 +6514,32 @@ void Isolate::Exit() {
 }
 
 
+Isolate::DisallowJavascriptExecutionScope::DisallowJavascriptExecutionScope(
+    Isolate* isolate) {
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  internal_ = reinterpret_cast<void*>(
+      new i::DisallowJavascriptExecution(i_isolate));
+}
+
+
+Isolate::DisallowJavascriptExecutionScope::~DisallowJavascriptExecutionScope() {
+  delete reinterpret_cast<i::DisallowJavascriptExecution*>(internal_);
+}
+
+
+Isolate::AllowJavascriptExecutionScope::AllowJavascriptExecutionScope(
+    Isolate* isolate) {
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  internal_ = reinterpret_cast<void*>(
+      new i::AllowJavascriptExecution(i_isolate));
+}
+
+
+Isolate::AllowJavascriptExecutionScope::~AllowJavascriptExecutionScope() {
+  delete reinterpret_cast<i::AllowJavascriptExecution*>(internal_);
+}
+
+
 void Isolate::GetHeapStatistics(HeapStatistics* heap_statistics) {
   i::Isolate* isolate = reinterpret_cast<i::Isolate*>(this);
   if (!isolate->IsInitialized()) {
diff --git a/src/assert-scope.cc b/src/assert-scope.cc
new file mode 100644 (file)
index 0000000..960567c
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2014 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+
+#include "assert-scope.h"
+#include "v8.h"
+
+namespace v8 {
+namespace internal {
+
+uint32_t PerIsolateAssertBase::GetData(Isolate* isolate) {
+  return isolate->per_isolate_assert_data();
+}
+
+
+void PerIsolateAssertBase::SetData(Isolate* isolate, uint32_t data) {
+  isolate->set_per_isolate_assert_data(data);
+}
+
+} }  // namespace v8::internal
index 269b280..4357056 100644 (file)
@@ -30,6 +30,7 @@
 
 #include "allocation.h"
 #include "platform.h"
+#include "utils.h"
 
 namespace v8 {
 namespace internal {
@@ -46,7 +47,12 @@ enum PerThreadAssertType {
 };
 
 
-#ifdef DEBUG
+enum PerIsolateAssertType {
+  JAVASCRIPT_EXECUTION_ASSERT,
+  ALLOCATION_FAILURE_ASSERT
+};
+
+
 class PerThreadAssertData {
  public:
   PerThreadAssertData() : nesting_level_(0) {
@@ -72,12 +78,9 @@ class PerThreadAssertData {
 
   DISALLOW_COPY_AND_ASSIGN(PerThreadAssertData);
 };
-#endif  // DEBUG
 
 
 class PerThreadAssertScopeBase {
-#ifdef DEBUG
-
  protected:
   PerThreadAssertScopeBase() {
     data_ = GetAssertData();
@@ -110,18 +113,12 @@ class PerThreadAssertScopeBase {
   static void SetThreadLocalData(PerThreadAssertData* data) {
     Thread::SetThreadLocal(thread_local_key, data);
   }
-#endif  // DEBUG
 };
 
 
-
 template <PerThreadAssertType type, bool allow>
 class PerThreadAssertScope : public PerThreadAssertScopeBase {
  public:
-#ifndef DEBUG
-  PerThreadAssertScope() { }
-  static void SetIsAllowed(bool is_allowed) { }
-#else
   PerThreadAssertScope() {
     old_state_ = data_->get(type);
     data_->set(type, allow);
@@ -136,49 +133,132 @@ class PerThreadAssertScope : public PerThreadAssertScopeBase {
 
  private:
   bool old_state_;
+
+  DISALLOW_COPY_AND_ASSIGN(PerThreadAssertScope);
+};
+
+
+class PerIsolateAssertBase {
+ protected:
+  static uint32_t GetData(Isolate* isolate);
+  static void SetData(Isolate* isolate, uint32_t data);
+};
+
+
+template <PerIsolateAssertType type, bool allow>
+class PerIsolateAssertScope : public PerIsolateAssertBase {
+ public:
+  explicit PerIsolateAssertScope(Isolate* isolate) : isolate_(isolate) {
+    STATIC_ASSERT(type < 32);
+    old_data_ = GetData(isolate_);
+    SetData(isolate_, DataBit::update(old_data_, allow));
+  }
+
+  ~PerIsolateAssertScope() {
+    SetData(isolate_, old_data_);
+  }
+
+  static bool IsAllowed(Isolate* isolate) {
+    return DataBit::decode(GetData(isolate));
+  }
+
+ private:
+  typedef BitField<bool, type, 1> DataBit;
+
+  uint32_t old_data_;
+  Isolate* isolate_;
+
+  DISALLOW_COPY_AND_ASSIGN(PerIsolateAssertScope);
+};
+
+
+template <PerThreadAssertType type, bool allow>
+#ifdef DEBUG
+class PerThreadAssertScopeDebugOnly : public
+    PerThreadAssertScope<type, allow> {
+#else
+class PerThreadAssertScopeDebugOnly {
+ public:
+  PerThreadAssertScopeDebugOnly() { }
+#endif
+};
+
+
+template <PerIsolateAssertType type, bool allow>
+#ifdef DEBUG
+class PerIsolateAssertScopeDebugOnly : public
+    PerIsolateAssertScope<type, allow> {
+ public:
+  explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate)
+      : PerIsolateAssertScope<type, allow>(isolate) { }
+#else
+class PerIsolateAssertScopeDebugOnly {
+ public:
+  explicit PerIsolateAssertScopeDebugOnly(Isolate* isolate) { }
 #endif
 };
 
+// Per-thread assert scopes.
+
 // Scope to document where we do not expect handles to be created.
-typedef PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, false>
+typedef PerThreadAssertScopeDebugOnly<HANDLE_ALLOCATION_ASSERT, false>
     DisallowHandleAllocation;
 
 // Scope to introduce an exception to DisallowHandleAllocation.
-typedef PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, true>
+typedef PerThreadAssertScopeDebugOnly<HANDLE_ALLOCATION_ASSERT, true>
     AllowHandleAllocation;
 
 // Scope to document where we do not expect any allocation and GC.
-typedef PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, false>
+typedef PerThreadAssertScopeDebugOnly<HEAP_ALLOCATION_ASSERT, false>
     DisallowHeapAllocation;
 
 // Scope to introduce an exception to DisallowHeapAllocation.
-typedef PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, true>
+typedef PerThreadAssertScopeDebugOnly<HEAP_ALLOCATION_ASSERT, true>
     AllowHeapAllocation;
 
 // Scope to document where we do not expect any handle dereferences.
-typedef PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, false>
+typedef PerThreadAssertScopeDebugOnly<HANDLE_DEREFERENCE_ASSERT, false>
     DisallowHandleDereference;
 
 // Scope to introduce an exception to DisallowHandleDereference.
-typedef PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, true>
+typedef PerThreadAssertScopeDebugOnly<HANDLE_DEREFERENCE_ASSERT, true>
     AllowHandleDereference;
 
 // Scope to document where we do not expect deferred handles to be dereferenced.
-typedef PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT, false>
+typedef PerThreadAssertScopeDebugOnly<DEFERRED_HANDLE_DEREFERENCE_ASSERT, false>
     DisallowDeferredHandleDereference;
 
 // Scope to introduce an exception to DisallowDeferredHandleDereference.
-typedef PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT, true>
+typedef PerThreadAssertScopeDebugOnly<DEFERRED_HANDLE_DEREFERENCE_ASSERT, true>
     AllowDeferredHandleDereference;
 
 // Scope to document where we do not expect deferred handles to be dereferenced.
-typedef PerThreadAssertScope<CODE_DEPENDENCY_CHANGE_ASSERT, false>
+typedef PerThreadAssertScopeDebugOnly<CODE_DEPENDENCY_CHANGE_ASSERT, false>
     DisallowCodeDependencyChange;
 
 // Scope to introduce an exception to DisallowDeferredHandleDereference.
-typedef PerThreadAssertScope<CODE_DEPENDENCY_CHANGE_ASSERT, true>
+typedef PerThreadAssertScopeDebugOnly<CODE_DEPENDENCY_CHANGE_ASSERT, true>
     AllowCodeDependencyChange;
 
+
+// Per-isolate assert scopes.
+
+// Scope to document where we do not expect javascript execution.
+typedef PerIsolateAssertScope<JAVASCRIPT_EXECUTION_ASSERT, false>
+    DisallowJavascriptExecution;
+
+// Scope to introduce an exception to DisallowJavascriptExecution.
+typedef PerIsolateAssertScope<JAVASCRIPT_EXECUTION_ASSERT, true>
+    AllowJavascriptExecution;
+
+// Scope to document where we do not expect an allocation failure.
+typedef PerIsolateAssertScopeDebugOnly<ALLOCATION_FAILURE_ASSERT, false>
+    DisallowAllocationFailure;
+
+// Scope to introduce an exception to DisallowAllocationFailure.
+typedef PerIsolateAssertScopeDebugOnly<ALLOCATION_FAILURE_ASSERT, true>
+    AllowAllocationFailure;
+
 } }  // namespace v8::internal
 
 #endif  // V8_ASSERT_SCOPE_H_
index 38e090e..e90e7aa 100644 (file)
@@ -1665,7 +1665,7 @@ void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
       {
         // During startup it's OK to always allocate and defer GC to later.
         // This simplifies things because we don't need to retry.
-        AlwaysAllocateScope __scope__;
+        AlwaysAllocateScope __scope__(isolate);
         { MaybeObject* maybe_code =
               heap->CreateCode(desc, flags, masm.CodeObject());
           if (!maybe_code->ToObject(&code)) {
index ac848e1..bef01a2 100644 (file)
@@ -77,6 +77,7 @@ static Handle<Object> Invoke(bool is_construct,
 
   // Entering JavaScript.
   VMState<JS> state(isolate);
+  CHECK(AllowJavascriptExecution::IsAllowed(isolate));
 
   // Placeholder for return value.
   MaybeObject* value = reinterpret_cast<Object*>(kZapValue);
index 29a2fbf..c36a6fd 100644 (file)
@@ -223,7 +223,7 @@ MaybeObject* Heap::AllocateRaw(int size_in_bytes,
   HeapProfiler* profiler = isolate_->heap_profiler();
 #ifdef DEBUG
   if (FLAG_gc_interval >= 0 &&
-      !disallow_allocation_failure_ &&
+      AllowAllocationFailure::IsAllowed(isolate_) &&
       Heap::allocation_timeout_-- <= 0) {
     return Failure::RetryAfterGC(space);
   }
@@ -663,7 +663,7 @@ Isolate* Heap::isolate() {
     (ISOLATE)->counters()->gc_last_resort_from_handles()->Increment();         \
     (ISOLATE)->heap()->CollectAllAvailableGarbage("last resort gc");           \
     {                                                                          \
-      AlwaysAllocateScope __scope__;                                           \
+      AlwaysAllocateScope __scope__(ISOLATE);                                  \
       __maybe_object__ = FUNCTION_CALL;                                        \
     }                                                                          \
     if (__maybe_object__->ToObject(&__object__)) RETURN_VALUE;                 \
@@ -778,21 +778,20 @@ void Heap::CompletelyClearInstanceofCache() {
 }
 
 
-AlwaysAllocateScope::AlwaysAllocateScope() {
+AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
+    : heap_(isolate->heap()), daf_(isolate) {
   // We shouldn't hit any nested scopes, because that requires
   // non-handle code to call handle code. The code still works but
   // performance will degrade, so we want to catch this situation
   // in debug mode.
-  Isolate* isolate = Isolate::Current();
-  ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
-  isolate->heap()->always_allocate_scope_depth_++;
+  ASSERT(heap_->always_allocate_scope_depth_ == 0);
+  heap_->always_allocate_scope_depth_++;
 }
 
 
 AlwaysAllocateScope::~AlwaysAllocateScope() {
-  Isolate* isolate = Isolate::Current();
-  isolate->heap()->always_allocate_scope_depth_--;
-  ASSERT(isolate->heap()->always_allocate_scope_depth_ == 0);
+  heap_->always_allocate_scope_depth_--;
+  ASSERT(heap_->always_allocate_scope_depth_ == 0);
 }
 
 
@@ -848,23 +847,6 @@ double GCTracer::SizeOfHeapObjects() {
 }
 
 
-DisallowAllocationFailure::DisallowAllocationFailure() {
-#ifdef DEBUG
-  Isolate* isolate = Isolate::Current();
-  old_state_ = isolate->heap()->disallow_allocation_failure_;
-  isolate->heap()->disallow_allocation_failure_ = true;
-#endif
-}
-
-
-DisallowAllocationFailure::~DisallowAllocationFailure() {
-#ifdef DEBUG
-  Isolate* isolate = Isolate::Current();
-  isolate->heap()->disallow_allocation_failure_ = old_state_;
-#endif
-}
-
-
 } }  // namespace v8::internal
 
 #endif  // V8_HEAP_INL_H_
index 37a3804..6790fe9 100644 (file)
@@ -105,7 +105,6 @@ Heap::Heap()
       unflattened_strings_length_(0),
 #ifdef DEBUG
       allocation_timeout_(0),
-      disallow_allocation_failure_(false),
 #endif  // DEBUG
       new_space_high_promotion_mode_active_(false),
       old_generation_allocation_limit_(kMinimumOldGenerationAllocationLimit),
@@ -7618,7 +7617,7 @@ void DescriptorLookupCache::Clear() {
 void Heap::GarbageCollectionGreedyCheck() {
   ASSERT(FLAG_gc_greedy);
   if (isolate_->bootstrapper()->IsActive()) return;
-  if (disallow_allocation_failure()) return;
+  if (!AllowAllocationFailure::IsAllowed(isolate_)) return;
   CollectGarbage(NEW_SPACE);
 }
 #endif
index 81bf850..4c280aa 100644 (file)
@@ -1496,10 +1496,6 @@ class Heap {
     allocation_timeout_ = timeout;
   }
 
-  bool disallow_allocation_failure() {
-    return disallow_allocation_failure_;
-  }
-
   void TracePathToObjectFrom(Object* target, Object* root);
   void TracePathToObject(Object* target);
   void TracePathToGlobal();
@@ -2009,10 +2005,6 @@ class Heap {
   // variable holds the value indicating the number of allocations
   // remain until the next failure and garbage collection.
   int allocation_timeout_;
-
-  // Do we expect to be able to handle allocation failure at this
-  // time?
-  bool disallow_allocation_failure_;
 #endif  // DEBUG
 
   // Indicates that the new space should be kept small due to high promotion
@@ -2521,15 +2513,11 @@ class Heap {
   MemoryChunk* chunks_queued_for_free_;
 
   Mutex* relocation_mutex_;
-#ifdef DEBUG
-  bool relocation_mutex_locked_by_optimizer_thread_;
-#endif  // DEBUG;
 
   int gc_callbacks_depth_;
 
   friend class Factory;
   friend class GCTracer;
-  friend class DisallowAllocationFailure;
   friend class AlwaysAllocateScope;
   friend class Page;
   friend class Isolate;
@@ -2580,26 +2568,15 @@ class HeapStats {
 };
 
 
-class DisallowAllocationFailure {
- public:
-  inline DisallowAllocationFailure();
-  inline ~DisallowAllocationFailure();
-
-#ifdef DEBUG
- private:
-  bool old_state_;
-#endif
-};
-
-
 class AlwaysAllocateScope {
  public:
-  inline AlwaysAllocateScope();
+  explicit inline AlwaysAllocateScope(Isolate* isolate);
   inline ~AlwaysAllocateScope();
 
  private:
   // Implicitly disable artificial allocation failures.
-  DisallowAllocationFailure disallow_allocation_failure_;
+  Heap* heap_;
+  DisallowAllocationFailure daf_;
 };
 
 
index c48e2d3..50b402b 100644 (file)
@@ -1918,7 +1918,7 @@ bool Isolate::Init(Deserializer* des) {
   }
 
   // The initialization process does not handle memory exhaustion.
-  DisallowAllocationFailure disallow_allocation_failure;
+  DisallowAllocationFailure disallow_allocation_failure(this);
 
   InitializeLoggingAndCounters();
 
index dfab99d..d1f7003 100644 (file)
@@ -380,6 +380,7 @@ typedef List<HeapObject*> DebugObjectCache;
   V(CodeTracer*, code_tracer, NULL)                                            \
   V(bool, fp_stubs_generated, false)                                           \
   V(int, max_available_threads, 0)                                             \
+  V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu)                            \
   ISOLATE_INIT_SIMULATOR_LIST(V)                                               \
   ISOLATE_DEBUGGER_INIT_LIST(V)
 
index 2413930..eea9eb4 100644 (file)
@@ -3045,7 +3045,7 @@ void MarkCompactCollector::EvacuateNewSpace() {
   // There are soft limits in the allocation code, designed trigger a mark
   // sweep collection by failing allocations.  But since we are already in
   // a mark-sweep allocation, there is no sense in trying to trigger one.
-  AlwaysAllocateScope scope;
+  AlwaysAllocateScope scope(isolate());
   heap()->CheckNewSpaceExpansionCriteria();
 
   NewSpace* new_space = heap()->new_space();
@@ -3077,7 +3077,7 @@ void MarkCompactCollector::EvacuateNewSpace() {
 
 
 void MarkCompactCollector::EvacuateLiveObjectsFromPage(Page* p) {
-  AlwaysAllocateScope always_allocate;
+  AlwaysAllocateScope always_allocate(isolate());
   PagedSpace* space = static_cast<PagedSpace*>(p->owner());
   ASSERT(p->IsEvacuationCandidate() && !p->WasSwept());
   p->MarkSweptPrecisely();
index 0fcdfc1..753421c 100644 (file)
   # This test always fails.  It tests that LiveEdit causes abort when turned off.
   'test-debug/LiveEditDisabled': [FAIL],
 
+  # This test always fails.  It tests that DisallowJavascriptExecutionScope
+  # works as intended.
+  'test-api/DisallowJavascriptExecutionScope': [FAIL],
+
   # TODO(gc): Temporarily disabled in the GC branch.
   'test-log/EquivalenceOfLoggingAndTraversal': [PASS, FAIL],
 
index de73da5..74afab2 100644 (file)
@@ -14218,7 +14218,8 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
   // have remnants of state from other code.
   v8::Isolate* isolate = v8::Isolate::New();
   isolate->Enter();
-  i::Heap* heap = reinterpret_cast<i::Isolate*>(isolate)->heap();
+  i::Isolate* i_isolate = reinterpret_cast<i::Isolate*>(isolate);
+  i::Heap* heap = i_isolate->heap();
 
   {
     v8::HandleScope scope(isolate);
@@ -14238,7 +14239,7 @@ UNINITIALIZED_TEST(SetJitCodeEventHandler) {
     const int kIterations = 10;
     for (int i = 0; i < kIterations; ++i) {
       LocalContext env(isolate);
-      i::AlwaysAllocateScope always_allocate;
+      i::AlwaysAllocateScope always_allocate(i_isolate);
       SimulateFullSpace(heap->code_space());
       CompileRun(script);
 
@@ -17649,7 +17650,7 @@ TEST(DynamicWithSourceURLInStackTraceString) {
 static void CreateGarbageInOldSpace() {
   i::Factory* factory = CcTest::i_isolate()->factory();
   v8::HandleScope scope(CcTest::isolate());
-  i::AlwaysAllocateScope always_allocate;
+  i::AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   for (int i = 0; i < 1000; i++) {
     factory->NewFixedArray(1000, i::TENURED);
   }
@@ -22392,3 +22393,23 @@ TEST(Promises) {
   CHECK_EQ(3, global->Get(v8_str("x1"))->Int32Value());
   CHECK_EQ(4, global->Get(v8_str("x2"))->Int32Value());
 }
+
+
+TEST(DisallowJavascriptExecutionScope) {
+  LocalContext context;
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::HandleScope scope(isolate);
+  v8::Isolate::DisallowJavascriptExecutionScope no_js(isolate);
+  CompileRun("2+2");
+}
+
+
+TEST(AllowJavascriptExecutionScope) {
+  LocalContext context;
+  v8::Isolate* isolate = context->GetIsolate();
+  v8::HandleScope scope(isolate);
+  v8::Isolate::DisallowJavascriptExecutionScope no_js(isolate);
+  { v8::Isolate::AllowJavascriptExecutionScope yes_js(isolate);
+    CompileRun("1+1");
+  }
+}
index 376c735..96af44b 100644 (file)
@@ -1025,7 +1025,7 @@ TEST(Regression39128) {
   // Step 4: clone jsobject, but force always allocate first to create a clone
   // in old pointer space.
   Address old_pointer_space_top = heap->old_pointer_space()->top();
-  AlwaysAllocateScope aa_scope;
+  AlwaysAllocateScope aa_scope(isolate);
   Object* clone_obj = heap->CopyJSObject(jsobject)->ToObjectChecked();
   JSObject* clone = JSObject::cast(clone_obj);
   if (clone->address() != old_pointer_space_top) {
@@ -1599,7 +1599,7 @@ TEST(TestSizeOfObjects) {
   {
     // Allocate objects on several different old-space pages so that
     // lazy sweeping kicks in for subsequent GC runs.
-    AlwaysAllocateScope always_allocate;
+    AlwaysAllocateScope always_allocate(CcTest::i_isolate());
     int filler_size = static_cast<int>(FixedArray::SizeFor(8192));
     for (int i = 1; i <= 100; i++) {
       CcTest::heap()->AllocateFixedArray(8192, TENURED)->ToObjectChecked();
@@ -1666,7 +1666,7 @@ static void FillUpNewSpace(NewSpace* new_space) {
   Isolate* isolate = heap->isolate();
   Factory* factory = isolate->factory();
   HandleScope scope(isolate);
-  AlwaysAllocateScope always_allocate;
+  AlwaysAllocateScope always_allocate(isolate);
   intptr_t available = new_space->EffectiveCapacity() - new_space->Size();
   intptr_t number_of_fillers = (available / FixedArray::SizeFor(32)) - 1;
   for (intptr_t i = 0; i < number_of_fillers; i++) {
@@ -2045,7 +2045,7 @@ TEST(PrototypeTransitionClearing) {
   Handle<JSObject> prototype;
   PagedSpace* space = CcTest::heap()->old_pointer_space();
   {
-    AlwaysAllocateScope always_allocate;
+    AlwaysAllocateScope always_allocate(isolate);
     SimulateFullSpace(space);
     prototype = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
   }
@@ -2173,7 +2173,7 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
   v8::HandleScope scope(CcTest::isolate());
 
   SimulateFullSpace(CcTest::heap()->new_space());
-  AlwaysAllocateScope always_allocate;
+  AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   v8::Local<v8::Value> res = CompileRun(
       "function c(x) {"
       "  this.x = x;"
@@ -2555,7 +2555,7 @@ TEST(OptimizedPretenuringCallNew) {
   v8::HandleScope scope(CcTest::isolate());
   CcTest::heap()->SetNewSpaceHighPromotionModeActive(true);
 
-  AlwaysAllocateScope always_allocate;
+  AlwaysAllocateScope always_allocate(CcTest::i_isolate());
   v8::Local<v8::Value> res = CompileRun(
       "function g() { this.a = 0; }"
       "function f() {"
@@ -2587,7 +2587,7 @@ TEST(Regress1465) {
   static const int transitions_count = 256;
 
   {
-    AlwaysAllocateScope always_allocate;
+    AlwaysAllocateScope always_allocate(CcTest::i_isolate());
     for (int i = 0; i < transitions_count; i++) {
       EmbeddedVector<char, 64> buffer;
       OS::SNPrintF(buffer, "var o = new Object; o.prop%d = %d;", i, i);
@@ -2717,7 +2717,7 @@ TEST(ReleaseOverReservedPages) {
   PagedSpace* old_pointer_space = heap->old_pointer_space();
   CHECK_EQ(1, old_pointer_space->CountTotalPages());
   for (int i = 0; i < number_of_test_pages; i++) {
-    AlwaysAllocateScope always_allocate;
+    AlwaysAllocateScope always_allocate(isolate);
     SimulateFullSpace(old_pointer_space);
     factory->NewFixedArray(1, TENURED);
   }
@@ -2766,7 +2766,7 @@ TEST(Regress2237) {
     // Generate a sliced string that is based on the above parent and
     // lives in old-space.
     SimulateFullSpace(CcTest::heap()->new_space());
-    AlwaysAllocateScope always_allocate;
+    AlwaysAllocateScope always_allocate(isolate);
     Handle<String> t = factory->NewProperSubString(s, 5, 35);
     CHECK(t->IsSlicedString());
     CHECK(!CcTest::heap()->InNewSpace(*t));
@@ -3359,7 +3359,7 @@ TEST(Regress169928) {
 
   // This should crash with a protection violation if we are running a build
   // with the bug.
-  AlwaysAllocateScope aa_scope;
+  AlwaysAllocateScope aa_scope(isolate);
   v8::Script::Compile(mote_code_string)->Run();
 }
 
index 4b31e61..129e6cf 100644 (file)
@@ -661,7 +661,7 @@ void TestStringCharacterStream(BuildString build, int test_cases) {
   for (int i = 0; i < test_cases; i++) {
     printf("%d\n", i);
     HandleScope inner_scope(isolate);
-    AlwaysAllocateScope always_allocate;
+    AlwaysAllocateScope always_allocate(isolate);
     // Build flat version of cons string.
     Handle<String> flat_string = build(i, &data);
     ConsStringStats flat_string_stats;
index 6039ce1..5967bfe 100644 (file)
         '../../src/assembler.cc',
         '../../src/assembler.h',
         '../../src/assert-scope.h',
+        '../../src/assert-scope.cc',
         '../../src/ast.cc',
         '../../src/ast.h',
         '../../src/atomicops.h',